Repository: caderek/gramma
Branch: master
Commit: 1c5de79042a1
Files: 93
Total size: 510.2 KB
Directory structure:
gitextract_6at5w77x/
├── .circleci/
│ └── config.yml
├── .eslintignore
├── .eslintrc.json
├── .github/
│ └── ISSUE_TEMPLATE/
│ ├── bug_report.md
│ ├── documentation.md
│ ├── feature_request.md
│ └── question.md
├── .gitignore
├── .gramma.json
├── .husky/
│ ├── commit-msg
│ ├── post-commit
│ └── pre-commit
├── .npmignore
├── .prettierrc
├── CHANGELOG.md
├── LICENSE.md
├── README.md
├── _config.yml
├── _layouts/
│ └── default.html
├── assets/
│ └── css/
│ └── style.scss
├── bundle/
│ └── gramma.esm.js
├── data/
│ ├── languages.json
│ └── rules.json
├── examples/
│ ├── api-markdown.js
│ ├── api-plain.js
│ └── api-simple.js
├── hello.md
├── lib/
│ ├── findUpSync.mjs
│ ├── package.json
│ └── prepareMarkdown.mjs
├── package.json
├── scripts/
│ ├── checkLanguagesSupport.js
│ └── zipBinaries.js
├── src/
│ ├── actions/
│ │ ├── checkInteractively.js
│ │ ├── checkNonInteractively.js
│ │ ├── configure.js
│ │ ├── save.js
│ │ └── saveNow.js
│ ├── boot/
│ │ ├── load.js
│ │ └── prepareConfig.js
│ ├── cli.js
│ ├── cli.test.js
│ ├── commands/
│ │ ├── check.js
│ │ ├── commit.js
│ │ ├── config.js
│ │ ├── debug.js
│ │ ├── hook.js
│ │ ├── init.js
│ │ ├── listen.js
│ │ ├── paths.js
│ │ └── server.js
│ ├── components/
│ │ ├── FixMenu.js
│ │ ├── FixMenu.test.js
│ │ ├── Mistake.js
│ │ └── Mistake.test.js
│ ├── context.js
│ ├── index.d.ts
│ ├── index.js
│ ├── initialConfig.js
│ ├── prompts/
│ │ ├── confirmConfig.js
│ │ ├── confirmInit.js
│ │ ├── confirmPort.js
│ │ ├── confirmServerReinstall.js
│ │ ├── handleMistake.js
│ │ ├── handleSave.js
│ │ └── mainMenu.js
│ ├── requests/
│ │ ├── checkViaAPI.d.ts
│ │ ├── checkViaAPI.js
│ │ ├── checkViaCmd.js
│ │ ├── checkWithFallback.js
│ │ └── updates.js
│ ├── server/
│ │ ├── getServerInfo.js
│ │ ├── getServerPID.js
│ │ ├── installServer.js
│ │ ├── showServerGUI.js
│ │ ├── startServer.js
│ │ └── stopServer.js
│ ├── text-manipulation/
│ │ ├── replace.js
│ │ ├── replace.test.js
│ │ ├── replaceAll.d.ts
│ │ ├── replaceAll.js
│ │ └── replaceAll.test.js
│ ├── utils/
│ │ ├── appLocation.js
│ │ ├── downloadFile.js
│ │ ├── equal.js
│ │ ├── findUpSync.js
│ │ ├── prepareMarkdown.js
│ │ ├── stripStyles.js
│ │ ├── stripStyles.test.js
│ │ └── unzipFile.js
│ └── validators/
│ ├── languages.js
│ └── rules.js
└── tsconfig.json
================================================
FILE CONTENTS
================================================
================================================
FILE: .circleci/config.yml
================================================
version: 2
jobs:
build:
docker:
- image: cimg/node:14.18.0
working_directory: ~/repo
steps:
- checkout
- restore_cache:
keys:
- v1-dependencies-{{ checksum "package.json" }}
- v1-dependencies-
- run: yarn install
- save_cache:
paths:
- node_modules
key: v1-dependencies-{{ checksum "package.json" }}
- run: yarn run test:ci
- run: yarn run lint
================================================
FILE: .eslintignore
================================================
lib/prepareMarkdown.mjs
src/utils/prepareMarkdown.js
src/utils/findUpSync.js
*/**/*.d.ts
================================================
FILE: .eslintrc.json
================================================
{
"extends": ["airbnb", "prettier"],
"rules": {
"no-console": 0,
"arrow-body-style": 0,
"no-restricted-syntax": 0,
"no-await-in-loop": 0,
"camelcase": 0
},
"env": {
"jest": true,
"node": true
},
"globals": {
"fetch": true
}
}
================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.md
================================================
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: caderek
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior.
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. Linux, macOS, Windows]
- Browser [e.g. Ubunto 18.04, Mojave, 10]
**Additional context**
Add any other context about the problem here.
================================================
FILE: .github/ISSUE_TEMPLATE/documentation.md
================================================
---
name: Documentation
about: All docs related issues
title: ''
labels: documentation
assignees: caderek
---
**Describe what is missing, unclear or incorrect**
A clear and concise description of what you want us to change/add.
================================================
FILE: .github/ISSUE_TEMPLATE/feature_request.md
================================================
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: caderek
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
================================================
FILE: .github/ISSUE_TEMPLATE/question.md
================================================
---
name: Question
about: All questions that do not require changes to the codebase
title: ''
labels: help wanted
assignees: caderek
---
**How can I help you?**
================================================
FILE: .gitignore
================================================
node_modules
.history
.vscode
example-responses
example.txt
coverage
bin
assets/css/*.css
assets/css/*.css.map
================================================
FILE: .gramma.json
================================================
{
"api_url": "https://api.languagetool.org/v2/check",
"api_key": "",
"dictionary": [
"Asturian",
"Bugfix",
"CHANGELOG",
"CircleCI",
"Codacy",
"CommonJS",
"Config",
"Github",
"Gramma",
"Grammarbot",
"IIFE",
"JS",
"Moçambique",
"NPM",
"README",
"XXXXXXXX",
"YYYYYYYY",
"_blank",
"api",
"api_key",
"ast-ES",
"async",
"backend",
"boolean",
"br-FR",
"chmod",
"config",
"confused_words",
"const",
"correctText",
"da-DK",
"de",
"de-AT",
"de-CH",
"de-DE",
"dev",
"el-GR",
"eo",
"eslintignore",
"esm",
"esm-min",
"exampleReplacements",
"false_friends",
"foo",
"fr",
"gender_neutrality",
"gl-ES",
"gramma",
"grammarbot",
"gui",
"href",
"iife",
"img",
"init",
"io",
"ja-JP",
"js",
"json",
"km-KH",
"linter",
"nl",
"npm",
"npmignore",
"pid",
"preAO",
"prepareReplacements",
"replaceAll",
"rimraf",
"ro-RO",
"ru-RU",
"signup",
"sk-SK",
"sl-SI",
"src",
"stdin",
"stdout",
"stylesheet",
"sv",
"symlink",
"tl-PH",
"uk-UA",
"url",
"usedCfg",
"zh-CN"
],
"language": "en-US",
"rules": {
"casing": true,
"colloquialisms": true,
"compounding": true,
"confused_words": true,
"false_friends": true,
"gender_neutrality": true,
"grammar": true,
"misc": true,
"punctuation": true,
"redundancy": true,
"regionalisms": true,
"repetitions": true,
"semantics": true,
"style": true,
"typography": false,
"typos": true
}
}
================================================
FILE: .husky/commit-msg
================================================
#!/bin/sh
exec < /dev/tty
npx gramma hook $1
================================================
FILE: .husky/post-commit
================================================
#!/bin/sh
npx gramma hook cleanup
================================================
FILE: .husky/pre-commit
================================================
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"
npm test
================================================
FILE: .npmignore
================================================
_layouts
node_modules
.history
.circleci
example-responses
example.txt
coverage
bin
.husky
.github
.vscode
lib
assets/css
assets/gramma-logo.svg
assets/gramma-text.svg
assets/banner.png
assets/banner-small.png
scripts
examples
================================================
FILE: .prettierrc
================================================
{
"trailingComma": "all",
"tabWidth": 2,
"semi": false,
"singleQuote": false,
"arrowParens": "always",
"printWidth": 80
}
================================================
FILE: CHANGELOG.md
================================================
# CHANGELOG
## 1.0.0
First stable release.
## 1.1.0
- Added Git hook integration
- Updated dependencies and documentation
- Improved error handling
## 1.2.0
- Added Markdown support
- Used api.languagetool.org as the default API
## 1.3.0
- Support for environment variables in config files
- Local config works in subdirectories
- Automatic markdown support for .md files
- Better error handling
- Improved documentation
## 1.4.0
- Automatically include changes to .gramma.json when executing Git hook
- Standalone binaries migrated to Node 16
## 1.4.1
- Fixed JS API, added type definitions
- Fixed hooks behavior with commit --verbose flag
## 1.4.2 - 1.4.4
- Isomorphic JS API (works on browser)
## 1.4.5
- Fixed CORS in JS API (browser)
## 1.4.6 - 1.4.7
- Bundles (esm, esm-min, iife)
## 1.4.8
- Fixed links in README
## 1.5.0
- When local server is installed but not running, Gramma will now try to use command-line interface for LanguageTool communication instead of spawning HTTP server (if possible).
- Gramma will now automatically check for updates once a day.
- Added validation for languages and rules parameters.
## 1.6.0
- Added `gramma server info` command.
- Added option to set custom port when managing local server manually.
================================================
FILE: LICENSE.md
================================================
Copyright 2021 Maciej Cąderek
Permission to use, copy, modify, and/or distribute this software
for any purpose with or without fee is hereby granted,
provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
================================================
FILE: README.md
================================================
## Features
- Provides advanced grammar checks via LanguageTool (remote API or local server).
- Supports global and local (per-project) configuration.
- Supports plain text and markdown.
- Git integration!
- Fully interactive!
## Contents
1. [Installation](#installation)
- [Via NPM (global)](#installation-npm)
- [Standalone binary](#installation-binary)
- [Dev tool for JS/TS projects](#installation-dev)
- [Local LanguageTool server (optional)](#installation-server)
1. [Usage](#usage)
- [Check file](#usage-check)
- [Check string](#usage-listen)
- [Git commit with grammar check](#usage-commit)
- [Command-line options](#usage-options)
- [Usage inside VIM](#usage-vim)
1. [Configuration](#config)
- [Introduction](#config-intro)
- [Local config](#config-local)
- [Git integration](#config-git)
- [Checker settings](#config-checker)
- [Customizing API server](#config-server)
- [Security](#config-security)
1. [Managing a local server](#server)
1. [JS API](#js)
1. [License](#license)
## Installation
### Via NPM
It is the recommended way if you have Node.js already installed (or you are willing to do so).
```
npm i gramma -g
```
### Standalone binary
If you prefer a single binary file, you can download it for the most popular platforms:
- [gramma-linux64-v1.6.0.zip](https://github.com/caderek/gramma/releases/download/v1.6.0/gramma-linux64-v1.6.0.zip)
- [gramma-macos-v1.6.0.zip](https://github.com/caderek/gramma/releases/download/v1.6.0/gramma-macos-v1.6.0.zip)
- [gramma-windows64-v1.6.0.zip](https://github.com/caderek/gramma/releases/download/v1.6.0/gramma-windows64-v1.6.0.zip)
After downloading and unpacking the binary, add it to your PATH or create a symlink to your executable directory (depending on the platform).
### Dev tool for JS/TS projects
You can install Gramma locally for your JS/TS project - this method gives you a separate, project specific config.
```
npm i gramma -D
```
or
```
yarn add gramma -D
```
Then create the local config file:
```
npx gramma init
```
You will be asked if you want to integrate Gramma with Git (via hook). You can later manually toggle git hook via `npx gramma hook` command.
Git hook also works with a non-default hooks path (Husky, etc.).
### Local LanguageTool server (optional)
For this to work, you have to install Java 1.8 or higher (you can find it [here](https://adoptium.net)). You can check if you have it installed already by running:
```
java -version
```
To install the local server, use:
```
gramma server install
```
That's it - Gramma will now use and manage the local server automatically.
## Usage
### Check file
Interactive fix:
```
gramma check [file]
```
Just print potential mistakes and return status code:
```
gramma check -p [file]
```
Examples:
```
gramma check path/to/my_file.txt
```
```
gramma check -p path/to/other/file.txt
```
### Check string
Interactive fix:
```
gramma listen [text]
```
Just print potential mistakes and return status code:
```
gramma listen -p [text]
```
Examples:
```
gramma listen "This sentence will be checked interactively."
```
```
gramma listen -p "Suggestions for this sentence will be printed."
```
### Git commit with grammar check
_**TIP:** Instead of the commands below, you can use [Git integration](#config-git)._
Equivalent to `git commit -m [message]`:
```
gramma commit [text]
```
Equivalent to `git commit -am [message]`:
```
gramma commit -a [text]
```
Examples:
```
gramma commit "My commit message"
```
```
gramma commit -a "Another commit message (files added)"
```
### Command-line options
_Note: This section describes options for grammar-checking commands only. Other command-specific options are described in their specific sections of this document._
- `-p / --print` - check text in the non-interactive mode
- `-n / --no-colors` - when paired with the `-p` flag, removes colors from the output
- `-d / --disable ` - disable specific [rule](#available-rules)
- `-e / --enable ` - enable specific [rule](#available-rules)
- `-l / --language ` - mark a text as written in provided [language](#available-languages)
- `-m / --markdown` - treat the input as markdown (removes some false-positives)
You can enable or disable multiple rules in one command by using a corresponding option multiple times. You can also compound boolean options if you use their short version.
Example:
```
gramma listen "I like making mistkaes!" -pn -d typos -d typography -e casing -l en-GB
```
### Usage inside VIM
If you are a VIM/Neovim user, you can use Gramma directly inside the editor:
Print the potential mistakes:
```
:w !gramma check /dev/stdin -pn
```
Interactive fix of the current file:
```
:terminal gramma check %
```
It will open the interactive terminal inside VIM - to handle Gramma suggestions, enter the interactive mode (`a` or `i`) and use Gramma as usual. After you fix the mistakes and replace a file, press `Enter` to return to the editor.
Example GIF (click to expand)
## Configuration
### Introduction
With Gramma, you can use a global and local configuration file. Gramma will use a proper config file following their priority:
1. Command-line options
2. Local config
3. Global config
Gramma will automatically generate a global configuration file on the first run.
You can check the path to the global configuration file (as well as other paths used by Gramma) via the following command:
```
gramma paths
```
You can change your settings by manually editing configuration files or running:
```
gramma config [-g]
```
_Note: `-g` (`--global`) flag should be used when you want to alter the global config._
### Local config
You can initialize local config by running the following command in your project's root directory:
```
gramma init
```
Gramma creates the local configuration file in your working directory under `.gramma.json` name.
### Git integration
You can toggle Git hook via:
```
gramma hook
```
It will add/remove an entry in `commit-msg` hook.
Gramma follows the Git configuration file, so it should work with a non-standard hooks location.
### Checker settings
#### Adding a word to the dictionary
Usually, you will add custom words to the local or global dictionary via interactive menu during the fix process, but you can also make it via separate command:
```
gramma config dictionary [-g]
```
Examples:
```
gramma config dictionary aws
gramma config dictionary figma -g
```
#### Changing default language
```
gramma config language [-g]
```
Examples:
```
gramma config language en-GB
gramma config language pl-PL -g
```
Available languages (click to expand)
Code
Name
languagetool.org
grammarbot.io
local
auto
automatic language detection
✔
✔
✔
ar
Arabic
✔
-
✔
ast-ES
Asturian
✔
✔
✔
be-BY
Belarusian
✔
✔
✔
br-FR
Breton
✔
✔
✔
ca-ES
Catalan
✔
✔
✔
ca-ES-valencia
Catalan (Valencian)
✔
✔
✔
zh-CN
Chinese
✔
-
✔
da-DK
Danish
✔
✔
✔
nl
Dutch
✔
✔
✔
nl-BE
Dutch (Belgium)
✔
-
✔
en
English
✔
✔
✔
en-AU
English (Australian)
✔
✔
✔
en-CA
English (Canadian)
✔
✔
✔
en-GB
English (GB)
✔
✔
✔
en-NZ
English (New Zealand)
✔
✔
✔
en-ZA
English (South African)
✔
✔
✔
en-US
English (US)
✔
✔
✔
eo
Esperanto
✔
✔
✔
fr
French
✔
-
✔
gl-ES
Galician
✔
✔
✔
de
German
✔
-
✔
de-AT
German (Austria)
✔
-
✔
de-DE
German (Germany)
✔
-
✔
de-CH
German (Swiss)
✔
-
✔
el-GR
Greek
✔
✔
✔
ga-IE
Irish
✔
-
✔
it
Italian
✔
-
✔
ja-JP
Japanese
✔
✔
✔
km-KH
Khmer
✔
✔
✔
fa
Persian
✔
✔
✔
pl-PL
Polish
✔
✔
✔
pt
Portuguese
✔
-
✔
pt-AO
Portuguese (Angola preAO)
✔
-
✔
pt-BR
Portuguese (Brazil)
✔
-
✔
pt-MZ
Portuguese (Moçambique preAO)
✔
-
✔
pt-PT
Portuguese (Portugal)
✔
-
✔
ro-RO
Romanian
✔
✔
✔
ru-RU
Russian
✔
-
✔
de-DE-x-simple-language
Simple German
✔
✔
✔
sk-SK
Slovak
✔
✔
✔
sl-SI
Slovenian
✔
✔
✔
es
Spanish
✔
-
✔
es-AR
Spanish (voseo)
✔
-
✔
sv
Swedish
✔
✔
✔
tl-PH
Tagalog
✔
✔
✔
ta-IN
Tamil
✔
✔
✔
uk-UA
Ukrainian
✔
✔
✔
_Note: By default, Gramma uses US English (`en-US`)._
#### Enabling and disabling rules
Enabling a specific rule:
```
gramma config enable [-g]
```
Disabling a specific rule:
```
gramma config disable [-g]
```
Examples:
```
gramma config enable punctuation
gramma config enable casing -g
gramma config disable typography
gramma config disable style -g
```
Available rules (click to expand)
Rule Description
casing Rules about detecting uppercase words where lowercase is required and vice versa.
colloquialisms Colloquial style.
compounding Rules about spelling terms as one word or as as separate words.
confused_words Words that are easily confused, like 'there' and 'their' in English.
false_friends False friends: words easily confused by language learners because a similar word exists in their native language.
gender_neutrality Helps to ensure gender-neutral terms.
grammar Basic grammar check.
misc Miscellaneous rules that don't fit elsewhere.
punctuation Punctuation mistakes.
redundancy Redundant words.
regionalisms Regionalisms: words used only in another language variant or used with different meanings.
repetitions Repeated words.
semantics Logic, content, and consistency problems.
style General style issues not covered by other categories, like overly verbose wording.
typography Problems like incorrectly used dash or quote characters.
typos Spelling issues.
_Note: By default, all rules are enabled._
### Customizing API server
#### Defining custom API endpoint
If you want to use remote LanguageTool server, or use the one already installed in your system (not installed via `gramma server install`), you can define a custom API endpoint:
```
gramma config api_url [-g]
```
Examples:
```
gramma config api_url https://my-custom-api-url.xyz/v2/check
gramma config api_url http://localhost:8081/v2/check -g
```
#### Running local server only when needed
If you do not want the local server to run all the time, you can configure Gramma to run it only when needed (`run → check → close`). It is useful when you run Gramma only from time to time and want to lower the memory consumption:
```
gramma config server_once true -g
```
Revert:
```
gramma config server_once false -g
```
#### Adding API key
If you use a paid option on [grammarbot.io](https://www.grammarbot.io/) or [languagetool.org](https://languagetool.org), you will receive an API key that you can use in Gramma:
```
gramma config api_key [-g]
```
### Security
If you need to store some sensitive data in your local config file (API key etc.) you can use environment variables directly in the config file (supports `.env` files).
Example:
```json
{
"api_url": "https://my-language-tool-api.com/v2/check",
"api_key": "${MY_ENV_VARIABLE}",
...other_settings
}
```
_Note: The default API (`api.languagetool.org`) is generally [safe and does not store your texts](https://languagetool.org/pl/legal/privacy), but if you want to be extra careful, you should use a [local server](#installation-server) or custom API endpoint._
## Managing a local server
If you have [configured a local server](#installation-server), Gramma will manage the server automatically - nevertheless, there might be situations when you want to manage the server manually. Gramma simplifies this by exposing basic server commands:
#### Starting the server
```
gramma server start
```
You can also specify a custom port:
```
gramma server start --port
```
_Note: When you use this command, Gramma will ignore the `server_once` config option. This is expected behavior - I assume that if you use this command, you want the server to actually run, not stop after the first check._
#### Stopping the server
```
gramma server stop
```
#### Getting the server info
```
gramma server info
```
#### Getting the server PID
```
gramma server pid
```
_Note: You can use `gramma server info` instead - this command is kept to not break backward compatibility._
#### Opening the built-in GUI
```
gramma server gui
```
## JS API
In addition to command-line usage, you can use two exposed methods if you want to handle mistakes by yourself.
#### Imports
If you use Node.js or a bundler for your browser build, you can use CommonJS or esm:
```js
const gramma = require("gramma")
```
```js
import gramma from "gramma"
```
If you don't use a bundler and want to use gramma in the browser, there are some prebuild packages in [/bundle](https://github.com/caderek/gramma/tree/master/bundle) directory:
- `gramma.esm.js` - ES Modules bundle
- `gramma.esm.min.js` - minified ES Modules bundle
- `gramma.min.js` - IIFE bundle exposing global `gramma` variable
You can also import ESM bundle directly from CDN:
```html
```
#### check() method
Returns a promise with a check result.
```js
const gramma = require("gramma")
gramma.check("Some text to check.").then(console.log)
```
You can also pass a second argument - an options object. Available options:
- `api_url` - url to a non-default API server
- `api_key` - server API key
- `dictionary` - an array of words that should be whitelisted
- `language` - language code to specify the text language
- `rules` - object defining which rules should be disabled
Default options object (click to expand)
{
"api_url": "https://api.languagetool.org/v2/check",
"api_key": "",
"dictionary": [],
"language": "en-US",
"rules": {
"casing": true,
"colloquialisms": true,
"compounding": true,
"confused_words": true,
"false_friends": true,
"gender_neutrality": true,
"grammar": true,
"misc": true,
"punctuation": true,
"redundancy": true,
"regionalisms": true,
"repetitions": true,
"semantics": true,
"style": true,
"typography": true,
"typos": true
}
}
You can find all available values for each setting in the [configuration section](#config) of this document.
Example with all options set:
```js
const gramma = require("gramma")
gramma
.check("Some text to check.", {
api_url: "http://my-custom-language-tool-server.xyz/v2/check",
api_key: "SOME_API_KEY",
dictionary: ["npm", "gramma"],
language: "pl-PL",
rules: {
typography: false,
casing: false,
},
})
.then(console.log)
```
#### replaceAll() method
Replace words with provided ones. It takes an array of objects in the following format:
```js
const exampleReplacements = [
{ offset: 6, length: 3, change: "correct phrase" },
{ offset: 20, length: 7, change: "another phrase" },
]
```
You can find proper `offset` and `length` values in the object returned by the `check()` method.
Example usage:
```js
const gramma = require("gramma")
/** Your custom function **/
const prepareReplacements = (matches) => {
// your code...
}
const fix = async (text) => {
const { matches } = await gramma.check(text)
const replacements = prepareReplacements(matches)
return gramma.replaceAll(text, replacements)
}
const main = () => {
const correctText = await fix("Some text to check")
console.log(correctText)
}
main()
```
## License
The project is under open, non-restrictive [ISC license](https://github.com/caderek/gramma/blob/master/LICENSE.md).
================================================
FILE: _config.yml
================================================
theme: jekyll-theme-cayman
================================================
FILE: _layouts/default.html
================================================
{% if site.google_analytics %}
{% endif %}
Gramma - command-line grammar checker
v1.6.0
{{ content }}
================================================
FILE: assets/css/style.scss
================================================
---
---
@import "{{ site.theme }}";
body {
margin: 0;
}
.page-header {
color: #fff;
text-align: center;
background-color: #0081b8;
background-image: linear-gradient(120deg, #0081b8, #b8e045);
padding: 15px;
}
.main-content h1,
.main-content h4,
.main-content h5,
.main-content h6 {
color: #0f9250;
}
.main-content h2 {
color: white;
background: linear-gradient(to right, #0081b8, #b8e045);
padding: 5px 10px;
margin-top: 50px;
}
.main-content h3 {
color: black;
background: linear-gradient(to right, #b5ddee, #e8f3c7);
padding: 5px 10px;
margin-top: 30px;
}
.project-tagline {
font-family: monospace;
}
.download {
display: inline-block;
padding: 10px;
opacity: 0.8;
width: 200px;
text-align: center;
margin-top: 10px;
cursor: pointer;
transition-duration: 0.2s;
}
.download:hover {
opacity: 1;
}
.download i {
font-size: 50px;
margin: 10px;
}
.download__link,
.download__link:visited {
text-decoration: none;
color: white;
outline: none;
}
.download__link:hover {
text-decoration: none;
color: white;
}
@media only screen and (max-width: 640px) {
.download {
display: block;
width: auto;
height: auto;
text-align: left;
margin: 0;
}
.download i {
font-size: 30px;
margin: 0 10px;
}
p {
display: initial;
position: relative;
top: -8px;
}
}
.divider {
display: none;
}
.version {
color: white;
position: absolute;
top: 10px;
right: 15px;
font-size: 20px;
opacity: 0.8;
}
.actions {
text-align: center;
padding: 0 10px 30px 10px;
}
.actions--bottom {
padding: 30px 10px 0 10px;
}
.site-footer {
text-align: center;
}
================================================
FILE: bundle/gramma.esm.js
================================================
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[Object.keys(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
// node_modules/whatwg-fetch/dist/fetch.umd.js
var require_fetch_umd = __commonJS({
"node_modules/whatwg-fetch/dist/fetch.umd.js"(exports, module) {
(function(global, factory) {
typeof exports === "object" && typeof module !== "undefined" ? factory(exports) : typeof define === "function" && define.amd ? define(["exports"], factory) : factory(global.WHATWGFetch = {});
})(exports, function(exports2) {
"use strict";
var global = typeof globalThis !== "undefined" && globalThis || typeof self !== "undefined" && self || typeof global !== "undefined" && global;
var support = {
searchParams: "URLSearchParams" in global,
iterable: "Symbol" in global && "iterator" in Symbol,
blob: "FileReader" in global && "Blob" in global && function() {
try {
new Blob();
return true;
} catch (e) {
return false;
}
}(),
formData: "FormData" in global,
arrayBuffer: "ArrayBuffer" in global
};
function isDataView(obj) {
return obj && DataView.prototype.isPrototypeOf(obj);
}
if (support.arrayBuffer) {
var viewClasses = [
"[object Int8Array]",
"[object Uint8Array]",
"[object Uint8ClampedArray]",
"[object Int16Array]",
"[object Uint16Array]",
"[object Int32Array]",
"[object Uint32Array]",
"[object Float32Array]",
"[object Float64Array]"
];
var isArrayBufferView = ArrayBuffer.isView || function(obj) {
return obj && viewClasses.indexOf(Object.prototype.toString.call(obj)) > -1;
};
}
function normalizeName(name) {
if (typeof name !== "string") {
name = String(name);
}
if (/[^a-z0-9\-#$%&'*+.^_`|~!]/i.test(name) || name === "") {
throw new TypeError('Invalid character in header field name: "' + name + '"');
}
return name.toLowerCase();
}
function normalizeValue(value) {
if (typeof value !== "string") {
value = String(value);
}
return value;
}
function iteratorFor(items) {
var iterator = {
next: function() {
var value = items.shift();
return { done: value === void 0, value };
}
};
if (support.iterable) {
iterator[Symbol.iterator] = function() {
return iterator;
};
}
return iterator;
}
function Headers(headers) {
this.map = {};
if (headers instanceof Headers) {
headers.forEach(function(value, name) {
this.append(name, value);
}, this);
} else if (Array.isArray(headers)) {
headers.forEach(function(header) {
this.append(header[0], header[1]);
}, this);
} else if (headers) {
Object.getOwnPropertyNames(headers).forEach(function(name) {
this.append(name, headers[name]);
}, this);
}
}
Headers.prototype.append = function(name, value) {
name = normalizeName(name);
value = normalizeValue(value);
var oldValue = this.map[name];
this.map[name] = oldValue ? oldValue + ", " + value : value;
};
Headers.prototype["delete"] = function(name) {
delete this.map[normalizeName(name)];
};
Headers.prototype.get = function(name) {
name = normalizeName(name);
return this.has(name) ? this.map[name] : null;
};
Headers.prototype.has = function(name) {
return this.map.hasOwnProperty(normalizeName(name));
};
Headers.prototype.set = function(name, value) {
this.map[normalizeName(name)] = normalizeValue(value);
};
Headers.prototype.forEach = function(callback, thisArg) {
for (var name in this.map) {
if (this.map.hasOwnProperty(name)) {
callback.call(thisArg, this.map[name], name, this);
}
}
};
Headers.prototype.keys = function() {
var items = [];
this.forEach(function(value, name) {
items.push(name);
});
return iteratorFor(items);
};
Headers.prototype.values = function() {
var items = [];
this.forEach(function(value) {
items.push(value);
});
return iteratorFor(items);
};
Headers.prototype.entries = function() {
var items = [];
this.forEach(function(value, name) {
items.push([name, value]);
});
return iteratorFor(items);
};
if (support.iterable) {
Headers.prototype[Symbol.iterator] = Headers.prototype.entries;
}
function consumed(body) {
if (body.bodyUsed) {
return Promise.reject(new TypeError("Already read"));
}
body.bodyUsed = true;
}
function fileReaderReady(reader) {
return new Promise(function(resolve, reject) {
reader.onload = function() {
resolve(reader.result);
};
reader.onerror = function() {
reject(reader.error);
};
});
}
function readBlobAsArrayBuffer(blob) {
var reader = new FileReader();
var promise = fileReaderReady(reader);
reader.readAsArrayBuffer(blob);
return promise;
}
function readBlobAsText(blob) {
var reader = new FileReader();
var promise = fileReaderReady(reader);
reader.readAsText(blob);
return promise;
}
function readArrayBufferAsText(buf) {
var view = new Uint8Array(buf);
var chars = new Array(view.length);
for (var i = 0; i < view.length; i++) {
chars[i] = String.fromCharCode(view[i]);
}
return chars.join("");
}
function bufferClone(buf) {
if (buf.slice) {
return buf.slice(0);
} else {
var view = new Uint8Array(buf.byteLength);
view.set(new Uint8Array(buf));
return view.buffer;
}
}
function Body() {
this.bodyUsed = false;
this._initBody = function(body) {
this.bodyUsed = this.bodyUsed;
this._bodyInit = body;
if (!body) {
this._bodyText = "";
} else if (typeof body === "string") {
this._bodyText = body;
} else if (support.blob && Blob.prototype.isPrototypeOf(body)) {
this._bodyBlob = body;
} else if (support.formData && FormData.prototype.isPrototypeOf(body)) {
this._bodyFormData = body;
} else if (support.searchParams && URLSearchParams.prototype.isPrototypeOf(body)) {
this._bodyText = body.toString();
} else if (support.arrayBuffer && support.blob && isDataView(body)) {
this._bodyArrayBuffer = bufferClone(body.buffer);
this._bodyInit = new Blob([this._bodyArrayBuffer]);
} else if (support.arrayBuffer && (ArrayBuffer.prototype.isPrototypeOf(body) || isArrayBufferView(body))) {
this._bodyArrayBuffer = bufferClone(body);
} else {
this._bodyText = body = Object.prototype.toString.call(body);
}
if (!this.headers.get("content-type")) {
if (typeof body === "string") {
this.headers.set("content-type", "text/plain;charset=UTF-8");
} else if (this._bodyBlob && this._bodyBlob.type) {
this.headers.set("content-type", this._bodyBlob.type);
} else if (support.searchParams && URLSearchParams.prototype.isPrototypeOf(body)) {
this.headers.set("content-type", "application/x-www-form-urlencoded;charset=UTF-8");
}
}
};
if (support.blob) {
this.blob = function() {
var rejected = consumed(this);
if (rejected) {
return rejected;
}
if (this._bodyBlob) {
return Promise.resolve(this._bodyBlob);
} else if (this._bodyArrayBuffer) {
return Promise.resolve(new Blob([this._bodyArrayBuffer]));
} else if (this._bodyFormData) {
throw new Error("could not read FormData body as blob");
} else {
return Promise.resolve(new Blob([this._bodyText]));
}
};
this.arrayBuffer = function() {
if (this._bodyArrayBuffer) {
var isConsumed = consumed(this);
if (isConsumed) {
return isConsumed;
}
if (ArrayBuffer.isView(this._bodyArrayBuffer)) {
return Promise.resolve(this._bodyArrayBuffer.buffer.slice(this._bodyArrayBuffer.byteOffset, this._bodyArrayBuffer.byteOffset + this._bodyArrayBuffer.byteLength));
} else {
return Promise.resolve(this._bodyArrayBuffer);
}
} else {
return this.blob().then(readBlobAsArrayBuffer);
}
};
}
this.text = function() {
var rejected = consumed(this);
if (rejected) {
return rejected;
}
if (this._bodyBlob) {
return readBlobAsText(this._bodyBlob);
} else if (this._bodyArrayBuffer) {
return Promise.resolve(readArrayBufferAsText(this._bodyArrayBuffer));
} else if (this._bodyFormData) {
throw new Error("could not read FormData body as text");
} else {
return Promise.resolve(this._bodyText);
}
};
if (support.formData) {
this.formData = function() {
return this.text().then(decode);
};
}
this.json = function() {
return this.text().then(JSON.parse);
};
return this;
}
var methods = ["DELETE", "GET", "HEAD", "OPTIONS", "POST", "PUT"];
function normalizeMethod(method) {
var upcased = method.toUpperCase();
return methods.indexOf(upcased) > -1 ? upcased : method;
}
function Request(input, options) {
if (!(this instanceof Request)) {
throw new TypeError('Please use the "new" operator, this DOM object constructor cannot be called as a function.');
}
options = options || {};
var body = options.body;
if (input instanceof Request) {
if (input.bodyUsed) {
throw new TypeError("Already read");
}
this.url = input.url;
this.credentials = input.credentials;
if (!options.headers) {
this.headers = new Headers(input.headers);
}
this.method = input.method;
this.mode = input.mode;
this.signal = input.signal;
if (!body && input._bodyInit != null) {
body = input._bodyInit;
input.bodyUsed = true;
}
} else {
this.url = String(input);
}
this.credentials = options.credentials || this.credentials || "same-origin";
if (options.headers || !this.headers) {
this.headers = new Headers(options.headers);
}
this.method = normalizeMethod(options.method || this.method || "GET");
this.mode = options.mode || this.mode || null;
this.signal = options.signal || this.signal;
this.referrer = null;
if ((this.method === "GET" || this.method === "HEAD") && body) {
throw new TypeError("Body not allowed for GET or HEAD requests");
}
this._initBody(body);
if (this.method === "GET" || this.method === "HEAD") {
if (options.cache === "no-store" || options.cache === "no-cache") {
var reParamSearch = /([?&])_=[^&]*/;
if (reParamSearch.test(this.url)) {
this.url = this.url.replace(reParamSearch, "$1_=" + new Date().getTime());
} else {
var reQueryString = /\?/;
this.url += (reQueryString.test(this.url) ? "&" : "?") + "_=" + new Date().getTime();
}
}
}
}
Request.prototype.clone = function() {
return new Request(this, { body: this._bodyInit });
};
function decode(body) {
var form = new FormData();
body.trim().split("&").forEach(function(bytes) {
if (bytes) {
var split = bytes.split("=");
var name = split.shift().replace(/\+/g, " ");
var value = split.join("=").replace(/\+/g, " ");
form.append(decodeURIComponent(name), decodeURIComponent(value));
}
});
return form;
}
function parseHeaders(rawHeaders) {
var headers = new Headers();
var preProcessedHeaders = rawHeaders.replace(/\r?\n[\t ]+/g, " ");
preProcessedHeaders.split("\r").map(function(header) {
return header.indexOf("\n") === 0 ? header.substr(1, header.length) : header;
}).forEach(function(line) {
var parts = line.split(":");
var key = parts.shift().trim();
if (key) {
var value = parts.join(":").trim();
headers.append(key, value);
}
});
return headers;
}
Body.call(Request.prototype);
function Response(bodyInit, options) {
if (!(this instanceof Response)) {
throw new TypeError('Please use the "new" operator, this DOM object constructor cannot be called as a function.');
}
if (!options) {
options = {};
}
this.type = "default";
this.status = options.status === void 0 ? 200 : options.status;
this.ok = this.status >= 200 && this.status < 300;
this.statusText = options.statusText === void 0 ? "" : "" + options.statusText;
this.headers = new Headers(options.headers);
this.url = options.url || "";
this._initBody(bodyInit);
}
Body.call(Response.prototype);
Response.prototype.clone = function() {
return new Response(this._bodyInit, {
status: this.status,
statusText: this.statusText,
headers: new Headers(this.headers),
url: this.url
});
};
Response.error = function() {
var response = new Response(null, { status: 0, statusText: "" });
response.type = "error";
return response;
};
var redirectStatuses = [301, 302, 303, 307, 308];
Response.redirect = function(url, status) {
if (redirectStatuses.indexOf(status) === -1) {
throw new RangeError("Invalid status code");
}
return new Response(null, { status, headers: { location: url } });
};
exports2.DOMException = global.DOMException;
try {
new exports2.DOMException();
} catch (err) {
exports2.DOMException = function(message, name) {
this.message = message;
this.name = name;
var error = Error(message);
this.stack = error.stack;
};
exports2.DOMException.prototype = Object.create(Error.prototype);
exports2.DOMException.prototype.constructor = exports2.DOMException;
}
function fetch2(input, init) {
return new Promise(function(resolve, reject) {
var request = new Request(input, init);
if (request.signal && request.signal.aborted) {
return reject(new exports2.DOMException("Aborted", "AbortError"));
}
var xhr = new XMLHttpRequest();
function abortXhr() {
xhr.abort();
}
xhr.onload = function() {
var options = {
status: xhr.status,
statusText: xhr.statusText,
headers: parseHeaders(xhr.getAllResponseHeaders() || "")
};
options.url = "responseURL" in xhr ? xhr.responseURL : options.headers.get("X-Request-URL");
var body = "response" in xhr ? xhr.response : xhr.responseText;
setTimeout(function() {
resolve(new Response(body, options));
}, 0);
};
xhr.onerror = function() {
setTimeout(function() {
reject(new TypeError("Network request failed"));
}, 0);
};
xhr.ontimeout = function() {
setTimeout(function() {
reject(new TypeError("Network request failed"));
}, 0);
};
xhr.onabort = function() {
setTimeout(function() {
reject(new exports2.DOMException("Aborted", "AbortError"));
}, 0);
};
function fixUrl(url) {
try {
return url === "" && global.location.href ? global.location.href : url;
} catch (e) {
return url;
}
}
xhr.open(request.method, fixUrl(request.url), true);
if (request.credentials === "include") {
xhr.withCredentials = true;
} else if (request.credentials === "omit") {
xhr.withCredentials = false;
}
if ("responseType" in xhr) {
if (support.blob) {
xhr.responseType = "blob";
} else if (support.arrayBuffer && request.headers.get("Content-Type") && request.headers.get("Content-Type").indexOf("application/octet-stream") !== -1) {
xhr.responseType = "arraybuffer";
}
}
if (init && typeof init.headers === "object" && !(init.headers instanceof Headers)) {
Object.getOwnPropertyNames(init.headers).forEach(function(name) {
xhr.setRequestHeader(name, normalizeValue(init.headers[name]));
});
} else {
request.headers.forEach(function(value, name) {
xhr.setRequestHeader(name, value);
});
}
if (request.signal) {
request.signal.addEventListener("abort", abortXhr);
xhr.onreadystatechange = function() {
if (xhr.readyState === 4) {
request.signal.removeEventListener("abort", abortXhr);
}
};
}
xhr.send(typeof request._bodyInit === "undefined" ? null : request._bodyInit);
});
}
fetch2.polyfill = true;
if (!global.fetch) {
global.fetch = fetch2;
global.Headers = Headers;
global.Request = Request;
global.Response = Response;
}
exports2.Headers = Headers;
exports2.Request = Request;
exports2.Response = Response;
exports2.fetch = fetch2;
Object.defineProperty(exports2, "__esModule", { value: true });
});
}
});
// node_modules/isomorphic-fetch/fetch-npm-browserify.js
var require_fetch_npm_browserify = __commonJS({
"node_modules/isomorphic-fetch/fetch-npm-browserify.js"(exports, module) {
require_fetch_umd();
module.exports = self.fetch.bind(self);
}
});
// node_modules/strict-uri-encode/index.js
var require_strict_uri_encode = __commonJS({
"node_modules/strict-uri-encode/index.js"(exports, module) {
"use strict";
module.exports = (str) => encodeURIComponent(str).replace(/[!'()*]/g, (x) => `%${x.charCodeAt(0).toString(16).toUpperCase()}`);
}
});
// node_modules/decode-uri-component/index.js
var require_decode_uri_component = __commonJS({
"node_modules/decode-uri-component/index.js"(exports, module) {
"use strict";
var token = "%[a-f0-9]{2}";
var singleMatcher = new RegExp(token, "gi");
var multiMatcher = new RegExp("(" + token + ")+", "gi");
function decodeComponents(components, split) {
try {
return decodeURIComponent(components.join(""));
} catch (err) {
}
if (components.length === 1) {
return components;
}
split = split || 1;
var left = components.slice(0, split);
var right = components.slice(split);
return Array.prototype.concat.call([], decodeComponents(left), decodeComponents(right));
}
function decode(input) {
try {
return decodeURIComponent(input);
} catch (err) {
var tokens = input.match(singleMatcher);
for (var i = 1; i < tokens.length; i++) {
input = decodeComponents(tokens, i).join("");
tokens = input.match(singleMatcher);
}
return input;
}
}
function customDecodeURIComponent(input) {
var replaceMap = {
"%FE%FF": "\uFFFD\uFFFD",
"%FF%FE": "\uFFFD\uFFFD"
};
var match = multiMatcher.exec(input);
while (match) {
try {
replaceMap[match[0]] = decodeURIComponent(match[0]);
} catch (err) {
var result = decode(match[0]);
if (result !== match[0]) {
replaceMap[match[0]] = result;
}
}
match = multiMatcher.exec(input);
}
replaceMap["%C2"] = "\uFFFD";
var entries = Object.keys(replaceMap);
for (var i = 0; i < entries.length; i++) {
var key = entries[i];
input = input.replace(new RegExp(key, "g"), replaceMap[key]);
}
return input;
}
module.exports = function(encodedURI) {
if (typeof encodedURI !== "string") {
throw new TypeError("Expected `encodedURI` to be of type `string`, got `" + typeof encodedURI + "`");
}
try {
encodedURI = encodedURI.replace(/\+/g, " ");
return decodeURIComponent(encodedURI);
} catch (err) {
return customDecodeURIComponent(encodedURI);
}
};
}
});
// node_modules/split-on-first/index.js
var require_split_on_first = __commonJS({
"node_modules/split-on-first/index.js"(exports, module) {
"use strict";
module.exports = (string, separator) => {
if (!(typeof string === "string" && typeof separator === "string")) {
throw new TypeError("Expected the arguments to be of type `string`");
}
if (separator === "") {
return [string];
}
const separatorIndex = string.indexOf(separator);
if (separatorIndex === -1) {
return [string];
}
return [
string.slice(0, separatorIndex),
string.slice(separatorIndex + separator.length)
];
};
}
});
// node_modules/filter-obj/index.js
var require_filter_obj = __commonJS({
"node_modules/filter-obj/index.js"(exports, module) {
"use strict";
module.exports = function(obj, predicate) {
var ret = {};
var keys = Object.keys(obj);
var isArr = Array.isArray(predicate);
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
var val = obj[key];
if (isArr ? predicate.indexOf(key) !== -1 : predicate(key, val, obj)) {
ret[key] = val;
}
}
return ret;
};
}
});
// node_modules/query-string/index.js
var require_query_string = __commonJS({
"node_modules/query-string/index.js"(exports) {
"use strict";
var strictUriEncode = require_strict_uri_encode();
var decodeComponent = require_decode_uri_component();
var splitOnFirst = require_split_on_first();
var filterObject = require_filter_obj();
var isNullOrUndefined = (value) => value === null || value === void 0;
var encodeFragmentIdentifier = Symbol("encodeFragmentIdentifier");
function encoderForArrayFormat(options) {
switch (options.arrayFormat) {
case "index":
return (key) => (result, value) => {
const index = result.length;
if (value === void 0 || options.skipNull && value === null || options.skipEmptyString && value === "") {
return result;
}
if (value === null) {
return [...result, [encode(key, options), "[", index, "]"].join("")];
}
return [
...result,
[encode(key, options), "[", encode(index, options), "]=", encode(value, options)].join("")
];
};
case "bracket":
return (key) => (result, value) => {
if (value === void 0 || options.skipNull && value === null || options.skipEmptyString && value === "") {
return result;
}
if (value === null) {
return [...result, [encode(key, options), "[]"].join("")];
}
return [...result, [encode(key, options), "[]=", encode(value, options)].join("")];
};
case "comma":
case "separator":
case "bracket-separator": {
const keyValueSep = options.arrayFormat === "bracket-separator" ? "[]=" : "=";
return (key) => (result, value) => {
if (value === void 0 || options.skipNull && value === null || options.skipEmptyString && value === "") {
return result;
}
value = value === null ? "" : value;
if (result.length === 0) {
return [[encode(key, options), keyValueSep, encode(value, options)].join("")];
}
return [[result, encode(value, options)].join(options.arrayFormatSeparator)];
};
}
default:
return (key) => (result, value) => {
if (value === void 0 || options.skipNull && value === null || options.skipEmptyString && value === "") {
return result;
}
if (value === null) {
return [...result, encode(key, options)];
}
return [...result, [encode(key, options), "=", encode(value, options)].join("")];
};
}
}
function parserForArrayFormat(options) {
let result;
switch (options.arrayFormat) {
case "index":
return (key, value, accumulator) => {
result = /\[(\d*)\]$/.exec(key);
key = key.replace(/\[\d*\]$/, "");
if (!result) {
accumulator[key] = value;
return;
}
if (accumulator[key] === void 0) {
accumulator[key] = {};
}
accumulator[key][result[1]] = value;
};
case "bracket":
return (key, value, accumulator) => {
result = /(\[\])$/.exec(key);
key = key.replace(/\[\]$/, "");
if (!result) {
accumulator[key] = value;
return;
}
if (accumulator[key] === void 0) {
accumulator[key] = [value];
return;
}
accumulator[key] = [].concat(accumulator[key], value);
};
case "comma":
case "separator":
return (key, value, accumulator) => {
const isArray = typeof value === "string" && value.includes(options.arrayFormatSeparator);
const isEncodedArray = typeof value === "string" && !isArray && decode(value, options).includes(options.arrayFormatSeparator);
value = isEncodedArray ? decode(value, options) : value;
const newValue = isArray || isEncodedArray ? value.split(options.arrayFormatSeparator).map((item) => decode(item, options)) : value === null ? value : decode(value, options);
accumulator[key] = newValue;
};
case "bracket-separator":
return (key, value, accumulator) => {
const isArray = /(\[\])$/.test(key);
key = key.replace(/\[\]$/, "");
if (!isArray) {
accumulator[key] = value ? decode(value, options) : value;
return;
}
const arrayValue = value === null ? [] : value.split(options.arrayFormatSeparator).map((item) => decode(item, options));
if (accumulator[key] === void 0) {
accumulator[key] = arrayValue;
return;
}
accumulator[key] = [].concat(accumulator[key], arrayValue);
};
default:
return (key, value, accumulator) => {
if (accumulator[key] === void 0) {
accumulator[key] = value;
return;
}
accumulator[key] = [].concat(accumulator[key], value);
};
}
}
function validateArrayFormatSeparator(value) {
if (typeof value !== "string" || value.length !== 1) {
throw new TypeError("arrayFormatSeparator must be single character string");
}
}
function encode(value, options) {
if (options.encode) {
return options.strict ? strictUriEncode(value) : encodeURIComponent(value);
}
return value;
}
function decode(value, options) {
if (options.decode) {
return decodeComponent(value);
}
return value;
}
function keysSorter(input) {
if (Array.isArray(input)) {
return input.sort();
}
if (typeof input === "object") {
return keysSorter(Object.keys(input)).sort((a, b) => Number(a) - Number(b)).map((key) => input[key]);
}
return input;
}
function removeHash(input) {
const hashStart = input.indexOf("#");
if (hashStart !== -1) {
input = input.slice(0, hashStart);
}
return input;
}
function getHash(url) {
let hash = "";
const hashStart = url.indexOf("#");
if (hashStart !== -1) {
hash = url.slice(hashStart);
}
return hash;
}
function extract(input) {
input = removeHash(input);
const queryStart = input.indexOf("?");
if (queryStart === -1) {
return "";
}
return input.slice(queryStart + 1);
}
function parseValue(value, options) {
if (options.parseNumbers && !Number.isNaN(Number(value)) && (typeof value === "string" && value.trim() !== "")) {
value = Number(value);
} else if (options.parseBooleans && value !== null && (value.toLowerCase() === "true" || value.toLowerCase() === "false")) {
value = value.toLowerCase() === "true";
}
return value;
}
function parse(query, options) {
options = Object.assign({
decode: true,
sort: true,
arrayFormat: "none",
arrayFormatSeparator: ",",
parseNumbers: false,
parseBooleans: false
}, options);
validateArrayFormatSeparator(options.arrayFormatSeparator);
const formatter = parserForArrayFormat(options);
const ret = Object.create(null);
if (typeof query !== "string") {
return ret;
}
query = query.trim().replace(/^[?#&]/, "");
if (!query) {
return ret;
}
for (const param of query.split("&")) {
if (param === "") {
continue;
}
let [key, value] = splitOnFirst(options.decode ? param.replace(/\+/g, " ") : param, "=");
value = value === void 0 ? null : ["comma", "separator", "bracket-separator"].includes(options.arrayFormat) ? value : decode(value, options);
formatter(decode(key, options), value, ret);
}
for (const key of Object.keys(ret)) {
const value = ret[key];
if (typeof value === "object" && value !== null) {
for (const k of Object.keys(value)) {
value[k] = parseValue(value[k], options);
}
} else {
ret[key] = parseValue(value, options);
}
}
if (options.sort === false) {
return ret;
}
return (options.sort === true ? Object.keys(ret).sort() : Object.keys(ret).sort(options.sort)).reduce((result, key) => {
const value = ret[key];
if (Boolean(value) && typeof value === "object" && !Array.isArray(value)) {
result[key] = keysSorter(value);
} else {
result[key] = value;
}
return result;
}, Object.create(null));
}
exports.extract = extract;
exports.parse = parse;
exports.stringify = (object, options) => {
if (!object) {
return "";
}
options = Object.assign({
encode: true,
strict: true,
arrayFormat: "none",
arrayFormatSeparator: ","
}, options);
validateArrayFormatSeparator(options.arrayFormatSeparator);
const shouldFilter = (key) => options.skipNull && isNullOrUndefined(object[key]) || options.skipEmptyString && object[key] === "";
const formatter = encoderForArrayFormat(options);
const objectCopy = {};
for (const key of Object.keys(object)) {
if (!shouldFilter(key)) {
objectCopy[key] = object[key];
}
}
const keys = Object.keys(objectCopy);
if (options.sort !== false) {
keys.sort(options.sort);
}
return keys.map((key) => {
const value = object[key];
if (value === void 0) {
return "";
}
if (value === null) {
return encode(key, options);
}
if (Array.isArray(value)) {
if (value.length === 0 && options.arrayFormat === "bracket-separator") {
return encode(key, options) + "[]";
}
return value.reduce(formatter(key), []).join("&");
}
return encode(key, options) + "=" + encode(value, options);
}).filter((x) => x.length > 0).join("&");
};
exports.parseUrl = (url, options) => {
options = Object.assign({
decode: true
}, options);
const [url_, hash] = splitOnFirst(url, "#");
return Object.assign({
url: url_.split("?")[0] || "",
query: parse(extract(url), options)
}, options && options.parseFragmentIdentifier && hash ? { fragmentIdentifier: decode(hash, options) } : {});
};
exports.stringifyUrl = (object, options) => {
options = Object.assign({
encode: true,
strict: true,
[encodeFragmentIdentifier]: true
}, options);
const url = removeHash(object.url).split("?")[0] || "";
const queryFromUrl = exports.extract(object.url);
const parsedQueryFromUrl = exports.parse(queryFromUrl, { sort: false });
const query = Object.assign(parsedQueryFromUrl, object.query);
let queryString = exports.stringify(query, options);
if (queryString) {
queryString = `?${queryString}`;
}
let hash = getHash(object.url);
if (object.fragmentIdentifier) {
hash = `#${options[encodeFragmentIdentifier] ? encode(object.fragmentIdentifier, options) : object.fragmentIdentifier}`;
}
return `${url}${queryString}${hash}`;
};
exports.pick = (input, filter, options) => {
options = Object.assign({
parseFragmentIdentifier: true,
[encodeFragmentIdentifier]: false
}, options);
const { url, query, fragmentIdentifier } = exports.parseUrl(input, options);
return exports.stringifyUrl({
url,
query: filterObject(query, filter),
fragmentIdentifier
}, options);
};
exports.exclude = (input, filter, options) => {
const exclusionFilter = Array.isArray(filter) ? (key) => !filter.includes(key) : (key, value) => !filter(key, value);
return exports.pick(input, exclusionFilter, options);
};
}
});
// data/rules.json
var require_rules = __commonJS({
"data/rules.json"(exports, module) {
module.exports = [
{
id: "CASING",
description: "Detecting uppercase words where lowercase is required and vice versa."
},
{
id: "COLLOQUIALISMS",
description: "Colloquial style."
},
{
id: "COMPOUNDING",
description: "Rules about spelling terms as one word or as as separate words."
},
{
id: "CONFUSED_WORDS",
description: "Words that are easily confused, like 'there' and 'their' in English."
},
{
id: "FALSE_FRIENDS",
description: "Words easily confused by language learners because a similar word exists in their native language."
},
{
id: "GENDER_NEUTRALITY",
description: ""
},
{
id: "GRAMMAR",
description: ""
},
{
id: "MISC",
description: "Miscellaneous rules that don't fit elsewhere."
},
{
id: "PUNCTUATION",
description: ""
},
{
id: "REDUNDANCY",
description: ""
},
{
id: "REGIONALISMS",
description: "Words used only in another language variant or used with different meanings."
},
{
id: "REPETITIONS",
description: ""
},
{
id: "SEMANTICS",
description: "Logic, content, and consistency problems."
},
{
id: "STYLE",
description: "General style issues not covered by other categories, like overly verbose wording."
},
{
id: "TYPOGRAPHY",
description: "Problems like incorrectly used dash or quote characters."
},
{
id: "TYPOS",
description: "Spelling issues."
}
];
}
});
// src/validators/rules.js
var require_rules2 = __commonJS({
"src/validators/rules.js"(exports, module) {
var rules = require_rules();
var ruleOptions = rules.map((rule) => rule.id.toLowerCase());
var isRule = (value) => {
return ruleOptions.includes(value);
};
module.exports = {
ruleOptions,
isRule
};
}
});
// src/initialConfig.js
var require_initialConfig = __commonJS({
"src/initialConfig.js"(exports, module) {
var { ruleOptions } = require_rules2();
var rules = {};
ruleOptions.forEach((rule) => {
rules[rule] = true;
});
var initialConfig = {
api_url: "https://api.languagetool.org/v2/check",
api_key: "",
dictionary: [],
language: "en-US",
rules
};
module.exports = initialConfig;
}
});
// src/utils/prepareMarkdown.js
var require_prepareMarkdown = __commonJS({
"src/utils/prepareMarkdown.js"(exports) {
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __markAsModule = (target) => __defProp(target, "__esModule", { value: true });
var __commonJS2 = (cb, mod) => function __require() {
return mod || (0, cb[Object.keys(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all2) => {
__markAsModule(target);
for (var name in all2)
__defProp(target, name, { get: all2[name], enumerable: true });
};
var __reExport = (target, module2, desc) => {
if (module2 && typeof module2 === "object" || typeof module2 === "function") {
for (let key of __getOwnPropNames(module2))
if (!__hasOwnProp.call(target, key) && key !== "default")
__defProp(target, key, {
get: () => module2[key],
enumerable: !(desc = __getOwnPropDesc(module2, key)) || desc.enumerable
});
}
return target;
};
var __toModule = (module2) => {
return __reExport(__markAsModule(__defProp(module2 != null ? __create(__getProtoOf(module2)) : {}, "default", module2 && module2.__esModule && "default" in module2 ? { get: () => module2.default, enumerable: true } : { value: module2, enumerable: true })), module2);
};
var require_format = __commonJS2({
"node_modules/format/format.js"(exports2, module2) {
;
(function() {
var namespace;
if (typeof module2 !== "undefined") {
namespace = module2.exports = format;
} else {
namespace = function() {
return this || (1, eval)("this");
}();
}
namespace.format = format;
namespace.vsprintf = vsprintf;
if (typeof console !== "undefined" && typeof console.log === "function") {
namespace.printf = printf;
}
function printf() {
console.log(format.apply(null, arguments));
}
function vsprintf(fmt, replacements) {
return format.apply(null, [fmt].concat(replacements));
}
function format(fmt) {
var argIndex = 1, args = [].slice.call(arguments), i = 0, n = fmt.length, result = "", c, escaped = false, arg, tmp, leadingZero = false, precision, nextArg = function() {
return args[argIndex++];
}, slurpNumber = function() {
var digits = "";
while (/\d/.test(fmt[i])) {
digits += fmt[i++];
c = fmt[i];
}
return digits.length > 0 ? parseInt(digits) : null;
};
for (; i < n; ++i) {
c = fmt[i];
if (escaped) {
escaped = false;
if (c == ".") {
leadingZero = false;
c = fmt[++i];
} else if (c == "0" && fmt[i + 1] == ".") {
leadingZero = true;
i += 2;
c = fmt[i];
} else {
leadingZero = true;
}
precision = slurpNumber();
switch (c) {
case "b":
result += parseInt(nextArg(), 10).toString(2);
break;
case "c":
arg = nextArg();
if (typeof arg === "string" || arg instanceof String)
result += arg;
else
result += String.fromCharCode(parseInt(arg, 10));
break;
case "d":
result += parseInt(nextArg(), 10);
break;
case "f":
tmp = String(parseFloat(nextArg()).toFixed(precision || 6));
result += leadingZero ? tmp : tmp.replace(/^0/, "");
break;
case "j":
result += JSON.stringify(nextArg());
break;
case "o":
result += "0" + parseInt(nextArg(), 10).toString(8);
break;
case "s":
result += nextArg();
break;
case "x":
result += "0x" + parseInt(nextArg(), 10).toString(16);
break;
case "X":
result += "0x" + parseInt(nextArg(), 10).toString(16).toUpperCase();
break;
default:
result += c;
break;
}
} else if (c === "%") {
escaped = true;
} else {
result += c;
}
}
return result;
}
})();
}
});
var require_is_buffer = __commonJS2({
"node_modules/is-buffer/index.js"(exports2, module2) {
module2.exports = function isBuffer2(obj) {
return obj != null && obj.constructor != null && typeof obj.constructor.isBuffer === "function" && obj.constructor.isBuffer(obj);
};
}
});
var require_extend = __commonJS2({
"node_modules/extend/index.js"(exports2, module2) {
"use strict";
var hasOwn = Object.prototype.hasOwnProperty;
var toStr = Object.prototype.toString;
var defineProperty = Object.defineProperty;
var gOPD = Object.getOwnPropertyDescriptor;
var isArray = function isArray2(arr) {
if (typeof Array.isArray === "function") {
return Array.isArray(arr);
}
return toStr.call(arr) === "[object Array]";
};
var isPlainObject2 = function isPlainObject3(obj) {
if (!obj || toStr.call(obj) !== "[object Object]") {
return false;
}
var hasOwnConstructor = hasOwn.call(obj, "constructor");
var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, "isPrototypeOf");
if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {
return false;
}
var key;
for (key in obj) {
}
return typeof key === "undefined" || hasOwn.call(obj, key);
};
var setProperty = function setProperty2(target, options) {
if (defineProperty && options.name === "__proto__") {
defineProperty(target, options.name, {
enumerable: true,
configurable: true,
value: options.newValue,
writable: true
});
} else {
target[options.name] = options.newValue;
}
};
var getProperty = function getProperty2(obj, name) {
if (name === "__proto__") {
if (!hasOwn.call(obj, name)) {
return void 0;
} else if (gOPD) {
return gOPD(obj, name).value;
}
}
return obj[name];
};
module2.exports = function extend2() {
var options, name, src, copy, copyIsArray, clone;
var target = arguments[0];
var i = 1;
var length = arguments.length;
var deep = false;
if (typeof target === "boolean") {
deep = target;
target = arguments[1] || {};
i = 2;
}
if (target == null || typeof target !== "object" && typeof target !== "function") {
target = {};
}
for (; i < length; ++i) {
options = arguments[i];
if (options != null) {
for (name in options) {
src = getProperty(target, name);
copy = getProperty(options, name);
if (target !== copy) {
if (deep && copy && (isPlainObject2(copy) || (copyIsArray = isArray(copy)))) {
if (copyIsArray) {
copyIsArray = false;
clone = src && isArray(src) ? src : [];
} else {
clone = src && isPlainObject2(src) ? src : {};
}
setProperty(target, {
name,
newValue: extend2(deep, clone, copy)
});
} else if (typeof copy !== "undefined") {
setProperty(target, { name, newValue: copy });
}
}
}
}
}
return target;
};
}
});
__export(exports, {
default: () => prepareMarkdown_default
});
var defaults = {
children(node) {
return node.children;
},
annotatetextnode(node, text3) {
if (node.type === "text") {
return {
offset: {
end: node.position.end.offset,
start: node.position.start.offset
},
text: text3.substring(node.position.start.offset, node.position.end.offset)
};
} else {
return null;
}
},
interpretmarkup(text3 = "") {
return text3;
}
};
function collecttextnodes(ast, text3, options = defaults) {
const textannotations = [];
function recurse(node) {
const annotation = options.annotatetextnode(node, text3);
if (annotation !== null) {
textannotations.push(annotation);
}
const children = options.children(node);
if (children !== null && Array.isArray(children)) {
children.forEach(recurse);
}
}
recurse(ast);
return textannotations;
}
function composeannotation(text3, annotatedtextnodes, options = defaults) {
const annotations = [];
let prior = {
offset: {
end: 0,
start: 0
}
};
for (const current of annotatedtextnodes) {
const currenttext = text3.substring(prior.offset.end, current.offset.start);
annotations.push({
interpretAs: options.interpretmarkup(currenttext),
markup: currenttext,
offset: {
end: current.offset.start,
start: prior.offset.end
}
});
annotations.push(current);
prior = current;
}
const finaltext = text3.substring(prior.offset.end, text3.length);
annotations.push({
interpretAs: options.interpretmarkup(finaltext),
markup: finaltext,
offset: {
end: text3.length,
start: prior.offset.end
}
});
return { annotation: annotations };
}
function build(text3, parse3, options = defaults) {
const nodes = parse3(text3);
const textnodes = collecttextnodes(nodes, text3, options);
return composeannotation(text3, textnodes, options);
}
var import_format = __toModule(require_format());
var fault = Object.assign(create(Error), {
eval: create(EvalError),
range: create(RangeError),
reference: create(ReferenceError),
syntax: create(SyntaxError),
type: create(TypeError),
uri: create(URIError)
});
function create(Constructor) {
FormattedError.displayName = Constructor.displayName || Constructor.name;
return FormattedError;
function FormattedError(format, ...values) {
var reason = format ? (0, import_format.default)(format, ...values) : format;
return new Constructor(reason);
}
}
var own = {}.hasOwnProperty;
var markers = {
yaml: "-",
toml: "+"
};
function matters(options = "yaml") {
const results = [];
let index2 = -1;
if (!Array.isArray(options)) {
options = [options];
}
while (++index2 < options.length) {
results[index2] = matter(options[index2]);
}
return results;
}
function matter(option) {
let result = option;
if (typeof result === "string") {
if (!own.call(markers, result)) {
throw fault("Missing matter definition for `%s`", result);
}
result = {
type: result,
marker: markers[result]
};
} else if (typeof result !== "object") {
throw fault("Expected matter to be an object, not `%j`", result);
}
if (!own.call(result, "type")) {
throw fault("Missing `type` in matter `%j`", result);
}
if (!own.call(result, "fence") && !own.call(result, "marker")) {
throw fault("Missing `marker` or `fence` in matter `%j`", result);
}
return result;
}
var unicodePunctuationRegex = /[!-/:-@[-`{-~\u00A1\u00A7\u00AB\u00B6\u00B7\u00BB\u00BF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C77\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4F\u2E52\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]/;
var asciiAlpha = regexCheck(/[A-Za-z]/);
var asciiDigit = regexCheck(/\d/);
var asciiHexDigit = regexCheck(/[\dA-Fa-f]/);
var asciiAlphanumeric = regexCheck(/[\dA-Za-z]/);
var asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/);
var asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/);
function asciiControl(code) {
return code !== null && (code < 32 || code === 127);
}
function markdownLineEndingOrSpace(code) {
return code !== null && (code < 0 || code === 32);
}
function markdownLineEnding(code) {
return code !== null && code < -2;
}
function markdownSpace(code) {
return code === -2 || code === -1 || code === 32;
}
var unicodeWhitespace = regexCheck(/\s/);
var unicodePunctuation = regexCheck(unicodePunctuationRegex);
function regexCheck(regex) {
return check;
function check(code) {
return code !== null && regex.test(String.fromCharCode(code));
}
}
function frontmatter(options) {
const settings = matters(options);
const flow3 = {};
let index2 = -1;
let matter2;
let code;
while (++index2 < settings.length) {
matter2 = settings[index2];
code = fence(matter2, "open").charCodeAt(0);
if (code in flow3) {
flow3[code].push(parse(matter2));
} else {
flow3[code] = [parse(matter2)];
}
}
return {
flow: flow3
};
}
function parse(matter2) {
const name = matter2.type;
const anywhere = matter2.anywhere;
const valueType = name + "Value";
const fenceType = name + "Fence";
const sequenceType = fenceType + "Sequence";
const fenceConstruct = {
tokenize: tokenizeFence,
partial: true
};
let buffer2;
return {
tokenize: tokenizeFrontmatter,
concrete: true
};
function tokenizeFrontmatter(effects, ok, nok) {
const self2 = this;
return start;
function start(code) {
const position2 = self2.now();
if (position2.column !== 1 || !anywhere && position2.line !== 1) {
return nok(code);
}
effects.enter(name);
buffer2 = fence(matter2, "open");
return effects.attempt(fenceConstruct, afterOpeningFence, nok)(code);
}
function afterOpeningFence(code) {
buffer2 = fence(matter2, "close");
return lineEnd(code);
}
function lineStart(code) {
if (code === null || markdownLineEnding(code)) {
return lineEnd(code);
}
effects.enter(valueType);
return lineData(code);
}
function lineData(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit(valueType);
return lineEnd(code);
}
effects.consume(code);
return lineData;
}
function lineEnd(code) {
if (code === null) {
return nok(code);
}
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return effects.attempt(fenceConstruct, after, lineStart);
}
function after(code) {
effects.exit(name);
return ok(code);
}
}
function tokenizeFence(effects, ok, nok) {
let bufferIndex = 0;
return start;
function start(code) {
if (code === buffer2.charCodeAt(bufferIndex)) {
effects.enter(fenceType);
effects.enter(sequenceType);
return insideSequence(code);
}
return nok(code);
}
function insideSequence(code) {
if (bufferIndex === buffer2.length) {
effects.exit(sequenceType);
if (markdownSpace(code)) {
effects.enter("whitespace");
return insideWhitespace(code);
}
return fenceEnd(code);
}
if (code === buffer2.charCodeAt(bufferIndex++)) {
effects.consume(code);
return insideSequence;
}
return nok(code);
}
function insideWhitespace(code) {
if (markdownSpace(code)) {
effects.consume(code);
return insideWhitespace;
}
effects.exit("whitespace");
return fenceEnd(code);
}
function fenceEnd(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit(fenceType);
return ok(code);
}
return nok(code);
}
}
}
function fence(matter2, prop) {
return matter2.marker ? pick(matter2.marker, prop).repeat(3) : pick(matter2.fence, prop);
}
function pick(schema, prop) {
return typeof schema === "string" ? schema : schema[prop];
}
function frontmatterFromMarkdown(options) {
const settings = matters(options);
const enter = {};
const exit2 = {};
let index2 = -1;
while (++index2 < settings.length) {
const matter2 = settings[index2];
enter[matter2.type] = opener(matter2);
exit2[matter2.type] = close;
exit2[matter2.type + "Value"] = value;
}
return { enter, exit: exit2 };
}
function opener(matter2) {
return open;
function open(token) {
this.enter({ type: matter2.type, value: "" }, token);
this.buffer();
}
}
function close(token) {
const data = this.resume();
this.exit(token).value = data.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, "");
}
function value(token) {
this.config.enter.data.call(this, token);
this.config.exit.data.call(this, token);
}
function frontmatterToMarkdown(options) {
const unsafe = [];
const handlers = {};
const settings = matters(options);
let index2 = -1;
while (++index2 < settings.length) {
const matter2 = settings[index2];
handlers[matter2.type] = handler(matter2);
unsafe.push({ atBreak: true, character: fence2(matter2, "open").charAt(0) });
}
return { unsafe, handlers };
}
function handler(matter2) {
const open = fence2(matter2, "open");
const close2 = fence2(matter2, "close");
return handle;
function handle(node) {
return open + (node.value ? "\n" + node.value : "") + "\n" + close2;
}
}
function fence2(matter2, prop) {
return matter2.marker ? pick2(matter2.marker, prop).repeat(3) : pick2(matter2.fence, prop);
}
function pick2(schema, prop) {
return typeof schema === "string" ? schema : schema[prop];
}
function remarkFrontmatter(options = "yaml") {
const data = this.data();
add("micromarkExtensions", frontmatter(options));
add("fromMarkdownExtensions", frontmatterFromMarkdown(options));
add("toMarkdownExtensions", frontmatterToMarkdown(options));
function add(field, value2) {
const list2 = data[field] ? data[field] : data[field] = [];
list2.push(value2);
}
}
function toString(node, options) {
var { includeImageAlt = true } = options || {};
return one(node, includeImageAlt);
}
function one(node, includeImageAlt) {
return node && typeof node === "object" && (node.value || (includeImageAlt ? node.alt : "") || "children" in node && all(node.children, includeImageAlt) || Array.isArray(node) && all(node, includeImageAlt)) || "";
}
function all(values, includeImageAlt) {
var result = [];
var index2 = -1;
while (++index2 < values.length) {
result[index2] = one(values[index2], includeImageAlt);
}
return result.join("");
}
function splice(list2, start, remove, items) {
const end = list2.length;
let chunkStart = 0;
let parameters;
if (start < 0) {
start = -start > end ? 0 : end + start;
} else {
start = start > end ? end : start;
}
remove = remove > 0 ? remove : 0;
if (items.length < 1e4) {
parameters = Array.from(items);
parameters.unshift(start, remove);
[].splice.apply(list2, parameters);
} else {
if (remove)
[].splice.apply(list2, [start, remove]);
while (chunkStart < items.length) {
parameters = items.slice(chunkStart, chunkStart + 1e4);
parameters.unshift(start, 0);
[].splice.apply(list2, parameters);
chunkStart += 1e4;
start += 1e4;
}
}
}
function push(list2, items) {
if (list2.length > 0) {
splice(list2, list2.length, 0, items);
return list2;
}
return items;
}
var hasOwnProperty = {}.hasOwnProperty;
function combineExtensions(extensions) {
const all2 = {};
let index2 = -1;
while (++index2 < extensions.length) {
syntaxExtension(all2, extensions[index2]);
}
return all2;
}
function syntaxExtension(all2, extension2) {
let hook;
for (hook in extension2) {
const maybe = hasOwnProperty.call(all2, hook) ? all2[hook] : void 0;
const left = maybe || (all2[hook] = {});
const right = extension2[hook];
let code;
for (code in right) {
if (!hasOwnProperty.call(left, code))
left[code] = [];
const value2 = right[code];
constructs(left[code], Array.isArray(value2) ? value2 : value2 ? [value2] : []);
}
}
}
function constructs(existing, list2) {
let index2 = -1;
const before = [];
while (++index2 < list2.length) {
;
(list2[index2].add === "after" ? existing : before).push(list2[index2]);
}
splice(existing, 0, 0, before);
}
function factorySpace(effects, ok, type, max) {
const limit = max ? max - 1 : Number.POSITIVE_INFINITY;
let size = 0;
return start;
function start(code) {
if (markdownSpace(code)) {
effects.enter(type);
return prefix(code);
}
return ok(code);
}
function prefix(code) {
if (markdownSpace(code) && size++ < limit) {
effects.consume(code);
return prefix;
}
effects.exit(type);
return ok(code);
}
}
var content = {
tokenize: initializeContent
};
function initializeContent(effects) {
const contentStart = effects.attempt(this.parser.constructs.contentInitial, afterContentStartConstruct, paragraphInitial);
let previous2;
return contentStart;
function afterContentStartConstruct(code) {
if (code === null) {
effects.consume(code);
return;
}
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return factorySpace(effects, contentStart, "linePrefix");
}
function paragraphInitial(code) {
effects.enter("paragraph");
return lineStart(code);
}
function lineStart(code) {
const token = effects.enter("chunkText", {
contentType: "text",
previous: previous2
});
if (previous2) {
previous2.next = token;
}
previous2 = token;
return data(code);
}
function data(code) {
if (code === null) {
effects.exit("chunkText");
effects.exit("paragraph");
effects.consume(code);
return;
}
if (markdownLineEnding(code)) {
effects.consume(code);
effects.exit("chunkText");
return lineStart;
}
effects.consume(code);
return data;
}
}
var document2 = {
tokenize: initializeDocument
};
var containerConstruct = {
tokenize: tokenizeContainer
};
function initializeDocument(effects) {
const self2 = this;
const stack = [];
let continued = 0;
let childFlow;
let childToken;
let lineStartOffset;
return start;
function start(code) {
if (continued < stack.length) {
const item = stack[continued];
self2.containerState = item[1];
return effects.attempt(item[0].continuation, documentContinue, checkNewContainers)(code);
}
return checkNewContainers(code);
}
function documentContinue(code) {
continued++;
if (self2.containerState._closeFlow) {
self2.containerState._closeFlow = void 0;
if (childFlow) {
closeFlow();
}
const indexBeforeExits = self2.events.length;
let indexBeforeFlow = indexBeforeExits;
let point2;
while (indexBeforeFlow--) {
if (self2.events[indexBeforeFlow][0] === "exit" && self2.events[indexBeforeFlow][1].type === "chunkFlow") {
point2 = self2.events[indexBeforeFlow][1].end;
break;
}
}
exitContainers(continued);
let index2 = indexBeforeExits;
while (index2 < self2.events.length) {
self2.events[index2][1].end = Object.assign({}, point2);
index2++;
}
splice(self2.events, indexBeforeFlow + 1, 0, self2.events.slice(indexBeforeExits));
self2.events.length = index2;
return checkNewContainers(code);
}
return start(code);
}
function checkNewContainers(code) {
if (continued === stack.length) {
if (!childFlow) {
return documentContinued(code);
}
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {
return flowStart(code);
}
self2.interrupt = Boolean(childFlow.currentConstruct);
}
self2.containerState = {};
return effects.check(containerConstruct, thereIsANewContainer, thereIsNoNewContainer)(code);
}
function thereIsANewContainer(code) {
if (childFlow)
closeFlow();
exitContainers(continued);
return documentContinued(code);
}
function thereIsNoNewContainer(code) {
self2.parser.lazy[self2.now().line] = continued !== stack.length;
lineStartOffset = self2.now().offset;
return flowStart(code);
}
function documentContinued(code) {
self2.containerState = {};
return effects.attempt(containerConstruct, containerContinue, flowStart)(code);
}
function containerContinue(code) {
continued++;
stack.push([self2.currentConstruct, self2.containerState]);
return documentContinued(code);
}
function flowStart(code) {
if (code === null) {
if (childFlow)
closeFlow();
exitContainers(0);
effects.consume(code);
return;
}
childFlow = childFlow || self2.parser.flow(self2.now());
effects.enter("chunkFlow", {
contentType: "flow",
previous: childToken,
_tokenizer: childFlow
});
return flowContinue(code);
}
function flowContinue(code) {
if (code === null) {
writeToChild(effects.exit("chunkFlow"), true);
exitContainers(0);
effects.consume(code);
return;
}
if (markdownLineEnding(code)) {
effects.consume(code);
writeToChild(effects.exit("chunkFlow"));
continued = 0;
self2.interrupt = void 0;
return start;
}
effects.consume(code);
return flowContinue;
}
function writeToChild(token, eof) {
const stream = self2.sliceStream(token);
if (eof)
stream.push(null);
token.previous = childToken;
if (childToken)
childToken.next = token;
childToken = token;
childFlow.defineSkip(token.start);
childFlow.write(stream);
if (self2.parser.lazy[token.start.line]) {
let index2 = childFlow.events.length;
while (index2--) {
if (childFlow.events[index2][1].start.offset < lineStartOffset && (!childFlow.events[index2][1].end || childFlow.events[index2][1].end.offset > lineStartOffset)) {
return;
}
}
const indexBeforeExits = self2.events.length;
let indexBeforeFlow = indexBeforeExits;
let seen;
let point2;
while (indexBeforeFlow--) {
if (self2.events[indexBeforeFlow][0] === "exit" && self2.events[indexBeforeFlow][1].type === "chunkFlow") {
if (seen) {
point2 = self2.events[indexBeforeFlow][1].end;
break;
}
seen = true;
}
}
exitContainers(continued);
index2 = indexBeforeExits;
while (index2 < self2.events.length) {
self2.events[index2][1].end = Object.assign({}, point2);
index2++;
}
splice(self2.events, indexBeforeFlow + 1, 0, self2.events.slice(indexBeforeExits));
self2.events.length = index2;
}
}
function exitContainers(size) {
let index2 = stack.length;
while (index2-- > size) {
const entry = stack[index2];
self2.containerState = entry[1];
entry[0].exit.call(self2, effects);
}
stack.length = size;
}
function closeFlow() {
childFlow.write([null]);
childToken = void 0;
childFlow = void 0;
self2.containerState._closeFlow = void 0;
}
}
function tokenizeContainer(effects, ok, nok) {
return factorySpace(effects, effects.attempt(this.parser.constructs.document, ok, nok), "linePrefix", this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4);
}
function classifyCharacter(code) {
if (code === null || markdownLineEndingOrSpace(code) || unicodeWhitespace(code)) {
return 1;
}
if (unicodePunctuation(code)) {
return 2;
}
}
function resolveAll(constructs2, events, context) {
const called = [];
let index2 = -1;
while (++index2 < constructs2.length) {
const resolve = constructs2[index2].resolveAll;
if (resolve && !called.includes(resolve)) {
events = resolve(events, context);
called.push(resolve);
}
}
return events;
}
var attention = {
name: "attention",
tokenize: tokenizeAttention,
resolveAll: resolveAllAttention
};
function resolveAllAttention(events, context) {
let index2 = -1;
let open;
let group;
let text3;
let openingSequence;
let closingSequence;
let use;
let nextEvents;
let offset;
while (++index2 < events.length) {
if (events[index2][0] === "enter" && events[index2][1].type === "attentionSequence" && events[index2][1]._close) {
open = index2;
while (open--) {
if (events[open][0] === "exit" && events[open][1].type === "attentionSequence" && events[open][1]._open && context.sliceSerialize(events[open][1]).charCodeAt(0) === context.sliceSerialize(events[index2][1]).charCodeAt(0)) {
if ((events[open][1]._close || events[index2][1]._open) && (events[index2][1].end.offset - events[index2][1].start.offset) % 3 && !((events[open][1].end.offset - events[open][1].start.offset + events[index2][1].end.offset - events[index2][1].start.offset) % 3)) {
continue;
}
use = events[open][1].end.offset - events[open][1].start.offset > 1 && events[index2][1].end.offset - events[index2][1].start.offset > 1 ? 2 : 1;
const start = Object.assign({}, events[open][1].end);
const end = Object.assign({}, events[index2][1].start);
movePoint(start, -use);
movePoint(end, use);
openingSequence = {
type: use > 1 ? "strongSequence" : "emphasisSequence",
start,
end: Object.assign({}, events[open][1].end)
};
closingSequence = {
type: use > 1 ? "strongSequence" : "emphasisSequence",
start: Object.assign({}, events[index2][1].start),
end
};
text3 = {
type: use > 1 ? "strongText" : "emphasisText",
start: Object.assign({}, events[open][1].end),
end: Object.assign({}, events[index2][1].start)
};
group = {
type: use > 1 ? "strong" : "emphasis",
start: Object.assign({}, openingSequence.start),
end: Object.assign({}, closingSequence.end)
};
events[open][1].end = Object.assign({}, openingSequence.start);
events[index2][1].start = Object.assign({}, closingSequence.end);
nextEvents = [];
if (events[open][1].end.offset - events[open][1].start.offset) {
nextEvents = push(nextEvents, [
["enter", events[open][1], context],
["exit", events[open][1], context]
]);
}
nextEvents = push(nextEvents, [
["enter", group, context],
["enter", openingSequence, context],
["exit", openingSequence, context],
["enter", text3, context]
]);
nextEvents = push(nextEvents, resolveAll(context.parser.constructs.insideSpan.null, events.slice(open + 1, index2), context));
nextEvents = push(nextEvents, [
["exit", text3, context],
["enter", closingSequence, context],
["exit", closingSequence, context],
["exit", group, context]
]);
if (events[index2][1].end.offset - events[index2][1].start.offset) {
offset = 2;
nextEvents = push(nextEvents, [
["enter", events[index2][1], context],
["exit", events[index2][1], context]
]);
} else {
offset = 0;
}
splice(events, open - 1, index2 - open + 3, nextEvents);
index2 = open + nextEvents.length - offset - 2;
break;
}
}
}
}
index2 = -1;
while (++index2 < events.length) {
if (events[index2][1].type === "attentionSequence") {
events[index2][1].type = "data";
}
}
return events;
}
function tokenizeAttention(effects, ok) {
const attentionMarkers2 = this.parser.constructs.attentionMarkers.null;
const previous2 = this.previous;
const before = classifyCharacter(previous2);
let marker;
return start;
function start(code) {
effects.enter("attentionSequence");
marker = code;
return sequence(code);
}
function sequence(code) {
if (code === marker) {
effects.consume(code);
return sequence;
}
const token = effects.exit("attentionSequence");
const after = classifyCharacter(code);
const open = !after || after === 2 && before || attentionMarkers2.includes(code);
const close2 = !before || before === 2 && after || attentionMarkers2.includes(previous2);
token._open = Boolean(marker === 42 ? open : open && (before || !close2));
token._close = Boolean(marker === 42 ? close2 : close2 && (after || !open));
return ok(code);
}
}
function movePoint(point2, offset) {
point2.column += offset;
point2.offset += offset;
point2._bufferIndex += offset;
}
var autolink = {
name: "autolink",
tokenize: tokenizeAutolink
};
function tokenizeAutolink(effects, ok, nok) {
let size = 1;
return start;
function start(code) {
effects.enter("autolink");
effects.enter("autolinkMarker");
effects.consume(code);
effects.exit("autolinkMarker");
effects.enter("autolinkProtocol");
return open;
}
function open(code) {
if (asciiAlpha(code)) {
effects.consume(code);
return schemeOrEmailAtext;
}
return asciiAtext(code) ? emailAtext(code) : nok(code);
}
function schemeOrEmailAtext(code) {
return code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code) ? schemeInsideOrEmailAtext(code) : emailAtext(code);
}
function schemeInsideOrEmailAtext(code) {
if (code === 58) {
effects.consume(code);
return urlInside;
}
if ((code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) && size++ < 32) {
effects.consume(code);
return schemeInsideOrEmailAtext;
}
return emailAtext(code);
}
function urlInside(code) {
if (code === 62) {
effects.exit("autolinkProtocol");
return end(code);
}
if (code === null || code === 32 || code === 60 || asciiControl(code)) {
return nok(code);
}
effects.consume(code);
return urlInside;
}
function emailAtext(code) {
if (code === 64) {
effects.consume(code);
size = 0;
return emailAtSignOrDot;
}
if (asciiAtext(code)) {
effects.consume(code);
return emailAtext;
}
return nok(code);
}
function emailAtSignOrDot(code) {
return asciiAlphanumeric(code) ? emailLabel(code) : nok(code);
}
function emailLabel(code) {
if (code === 46) {
effects.consume(code);
size = 0;
return emailAtSignOrDot;
}
if (code === 62) {
effects.exit("autolinkProtocol").type = "autolinkEmail";
return end(code);
}
return emailValue(code);
}
function emailValue(code) {
if ((code === 45 || asciiAlphanumeric(code)) && size++ < 63) {
effects.consume(code);
return code === 45 ? emailValue : emailLabel;
}
return nok(code);
}
function end(code) {
effects.enter("autolinkMarker");
effects.consume(code);
effects.exit("autolinkMarker");
effects.exit("autolink");
return ok;
}
}
var blankLine = {
tokenize: tokenizeBlankLine,
partial: true
};
function tokenizeBlankLine(effects, ok, nok) {
return factorySpace(effects, afterWhitespace, "linePrefix");
function afterWhitespace(code) {
return code === null || markdownLineEnding(code) ? ok(code) : nok(code);
}
}
var blockQuote = {
name: "blockQuote",
tokenize: tokenizeBlockQuoteStart,
continuation: {
tokenize: tokenizeBlockQuoteContinuation
},
exit
};
function tokenizeBlockQuoteStart(effects, ok, nok) {
const self2 = this;
return start;
function start(code) {
if (code === 62) {
const state = self2.containerState;
if (!state.open) {
effects.enter("blockQuote", {
_container: true
});
state.open = true;
}
effects.enter("blockQuotePrefix");
effects.enter("blockQuoteMarker");
effects.consume(code);
effects.exit("blockQuoteMarker");
return after;
}
return nok(code);
}
function after(code) {
if (markdownSpace(code)) {
effects.enter("blockQuotePrefixWhitespace");
effects.consume(code);
effects.exit("blockQuotePrefixWhitespace");
effects.exit("blockQuotePrefix");
return ok;
}
effects.exit("blockQuotePrefix");
return ok(code);
}
}
function tokenizeBlockQuoteContinuation(effects, ok, nok) {
return factorySpace(effects, effects.attempt(blockQuote, ok, nok), "linePrefix", this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4);
}
function exit(effects) {
effects.exit("blockQuote");
}
var characterEscape = {
name: "characterEscape",
tokenize: tokenizeCharacterEscape
};
function tokenizeCharacterEscape(effects, ok, nok) {
return start;
function start(code) {
effects.enter("characterEscape");
effects.enter("escapeMarker");
effects.consume(code);
effects.exit("escapeMarker");
return open;
}
function open(code) {
if (asciiPunctuation(code)) {
effects.enter("characterEscapeValue");
effects.consume(code);
effects.exit("characterEscapeValue");
effects.exit("characterEscape");
return ok;
}
return nok(code);
}
}
var semicolon = 59;
var element;
function decodeEntity(characters) {
var entity = "&" + characters + ";";
var char;
element = element || document.createElement("i");
element.innerHTML = entity;
char = element.textContent;
if (char.charCodeAt(char.length - 1) === semicolon && characters !== "semi") {
return false;
}
return char === entity ? false : char;
}
var characterReference = {
name: "characterReference",
tokenize: tokenizeCharacterReference
};
function tokenizeCharacterReference(effects, ok, nok) {
const self2 = this;
let size = 0;
let max;
let test;
return start;
function start(code) {
effects.enter("characterReference");
effects.enter("characterReferenceMarker");
effects.consume(code);
effects.exit("characterReferenceMarker");
return open;
}
function open(code) {
if (code === 35) {
effects.enter("characterReferenceMarkerNumeric");
effects.consume(code);
effects.exit("characterReferenceMarkerNumeric");
return numeric;
}
effects.enter("characterReferenceValue");
max = 31;
test = asciiAlphanumeric;
return value2(code);
}
function numeric(code) {
if (code === 88 || code === 120) {
effects.enter("characterReferenceMarkerHexadecimal");
effects.consume(code);
effects.exit("characterReferenceMarkerHexadecimal");
effects.enter("characterReferenceValue");
max = 6;
test = asciiHexDigit;
return value2;
}
effects.enter("characterReferenceValue");
max = 7;
test = asciiDigit;
return value2(code);
}
function value2(code) {
let token;
if (code === 59 && size) {
token = effects.exit("characterReferenceValue");
if (test === asciiAlphanumeric && !decodeEntity(self2.sliceSerialize(token))) {
return nok(code);
}
effects.enter("characterReferenceMarker");
effects.consume(code);
effects.exit("characterReferenceMarker");
effects.exit("characterReference");
return ok;
}
if (test(code) && size++ < max) {
effects.consume(code);
return value2;
}
return nok(code);
}
}
var codeFenced = {
name: "codeFenced",
tokenize: tokenizeCodeFenced,
concrete: true
};
function tokenizeCodeFenced(effects, ok, nok) {
const self2 = this;
const closingFenceConstruct = {
tokenize: tokenizeClosingFence,
partial: true
};
const nonLazyLine = {
tokenize: tokenizeNonLazyLine,
partial: true
};
const tail = this.events[this.events.length - 1];
const initialPrefix = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
let sizeOpen = 0;
let marker;
return start;
function start(code) {
effects.enter("codeFenced");
effects.enter("codeFencedFence");
effects.enter("codeFencedFenceSequence");
marker = code;
return sequenceOpen(code);
}
function sequenceOpen(code) {
if (code === marker) {
effects.consume(code);
sizeOpen++;
return sequenceOpen;
}
effects.exit("codeFencedFenceSequence");
return sizeOpen < 3 ? nok(code) : factorySpace(effects, infoOpen, "whitespace")(code);
}
function infoOpen(code) {
if (code === null || markdownLineEnding(code)) {
return openAfter(code);
}
effects.enter("codeFencedFenceInfo");
effects.enter("chunkString", {
contentType: "string"
});
return info(code);
}
function info(code) {
if (code === null || markdownLineEndingOrSpace(code)) {
effects.exit("chunkString");
effects.exit("codeFencedFenceInfo");
return factorySpace(effects, infoAfter, "whitespace")(code);
}
if (code === 96 && code === marker)
return nok(code);
effects.consume(code);
return info;
}
function infoAfter(code) {
if (code === null || markdownLineEnding(code)) {
return openAfter(code);
}
effects.enter("codeFencedFenceMeta");
effects.enter("chunkString", {
contentType: "string"
});
return meta(code);
}
function meta(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("chunkString");
effects.exit("codeFencedFenceMeta");
return openAfter(code);
}
if (code === 96 && code === marker)
return nok(code);
effects.consume(code);
return meta;
}
function openAfter(code) {
effects.exit("codeFencedFence");
return self2.interrupt ? ok(code) : contentStart(code);
}
function contentStart(code) {
if (code === null) {
return after(code);
}
if (markdownLineEnding(code)) {
return effects.attempt(nonLazyLine, effects.attempt(closingFenceConstruct, after, initialPrefix ? factorySpace(effects, contentStart, "linePrefix", initialPrefix + 1) : contentStart), after)(code);
}
effects.enter("codeFlowValue");
return contentContinue(code);
}
function contentContinue(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("codeFlowValue");
return contentStart(code);
}
effects.consume(code);
return contentContinue;
}
function after(code) {
effects.exit("codeFenced");
return ok(code);
}
function tokenizeNonLazyLine(effects2, ok2, nok2) {
const self22 = this;
return start2;
function start2(code) {
effects2.enter("lineEnding");
effects2.consume(code);
effects2.exit("lineEnding");
return lineStart;
}
function lineStart(code) {
return self22.parser.lazy[self22.now().line] ? nok2(code) : ok2(code);
}
}
function tokenizeClosingFence(effects2, ok2, nok2) {
let size = 0;
return factorySpace(effects2, closingSequenceStart, "linePrefix", this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4);
function closingSequenceStart(code) {
effects2.enter("codeFencedFence");
effects2.enter("codeFencedFenceSequence");
return closingSequence(code);
}
function closingSequence(code) {
if (code === marker) {
effects2.consume(code);
size++;
return closingSequence;
}
if (size < sizeOpen)
return nok2(code);
effects2.exit("codeFencedFenceSequence");
return factorySpace(effects2, closingSequenceEnd, "whitespace")(code);
}
function closingSequenceEnd(code) {
if (code === null || markdownLineEnding(code)) {
effects2.exit("codeFencedFence");
return ok2(code);
}
return nok2(code);
}
}
}
var codeIndented = {
name: "codeIndented",
tokenize: tokenizeCodeIndented
};
var indentedContent = {
tokenize: tokenizeIndentedContent,
partial: true
};
function tokenizeCodeIndented(effects, ok, nok) {
const self2 = this;
return start;
function start(code) {
effects.enter("codeIndented");
return factorySpace(effects, afterStartPrefix, "linePrefix", 4 + 1)(code);
}
function afterStartPrefix(code) {
const tail = self2.events[self2.events.length - 1];
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? afterPrefix(code) : nok(code);
}
function afterPrefix(code) {
if (code === null) {
return after(code);
}
if (markdownLineEnding(code)) {
return effects.attempt(indentedContent, afterPrefix, after)(code);
}
effects.enter("codeFlowValue");
return content3(code);
}
function content3(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("codeFlowValue");
return afterPrefix(code);
}
effects.consume(code);
return content3;
}
function after(code) {
effects.exit("codeIndented");
return ok(code);
}
}
function tokenizeIndentedContent(effects, ok, nok) {
const self2 = this;
return start;
function start(code) {
if (self2.parser.lazy[self2.now().line]) {
return nok(code);
}
if (markdownLineEnding(code)) {
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return start;
}
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code);
}
function afterPrefix(code) {
const tail = self2.events[self2.events.length - 1];
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? ok(code) : markdownLineEnding(code) ? start(code) : nok(code);
}
}
var codeText = {
name: "codeText",
tokenize: tokenizeCodeText,
resolve: resolveCodeText,
previous
};
function resolveCodeText(events) {
let tailExitIndex = events.length - 4;
let headEnterIndex = 3;
let index2;
let enter;
if ((events[headEnterIndex][1].type === "lineEnding" || events[headEnterIndex][1].type === "space") && (events[tailExitIndex][1].type === "lineEnding" || events[tailExitIndex][1].type === "space")) {
index2 = headEnterIndex;
while (++index2 < tailExitIndex) {
if (events[index2][1].type === "codeTextData") {
events[headEnterIndex][1].type = "codeTextPadding";
events[tailExitIndex][1].type = "codeTextPadding";
headEnterIndex += 2;
tailExitIndex -= 2;
break;
}
}
}
index2 = headEnterIndex - 1;
tailExitIndex++;
while (++index2 <= tailExitIndex) {
if (enter === void 0) {
if (index2 !== tailExitIndex && events[index2][1].type !== "lineEnding") {
enter = index2;
}
} else if (index2 === tailExitIndex || events[index2][1].type === "lineEnding") {
events[enter][1].type = "codeTextData";
if (index2 !== enter + 2) {
events[enter][1].end = events[index2 - 1][1].end;
events.splice(enter + 2, index2 - enter - 2);
tailExitIndex -= index2 - enter - 2;
index2 = enter + 2;
}
enter = void 0;
}
}
return events;
}
function previous(code) {
return code !== 96 || this.events[this.events.length - 1][1].type === "characterEscape";
}
function tokenizeCodeText(effects, ok, nok) {
const self2 = this;
let sizeOpen = 0;
let size;
let token;
return start;
function start(code) {
effects.enter("codeText");
effects.enter("codeTextSequence");
return openingSequence(code);
}
function openingSequence(code) {
if (code === 96) {
effects.consume(code);
sizeOpen++;
return openingSequence;
}
effects.exit("codeTextSequence");
return gap(code);
}
function gap(code) {
if (code === null) {
return nok(code);
}
if (code === 96) {
token = effects.enter("codeTextSequence");
size = 0;
return closingSequence(code);
}
if (code === 32) {
effects.enter("space");
effects.consume(code);
effects.exit("space");
return gap;
}
if (markdownLineEnding(code)) {
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return gap;
}
effects.enter("codeTextData");
return data(code);
}
function data(code) {
if (code === null || code === 32 || code === 96 || markdownLineEnding(code)) {
effects.exit("codeTextData");
return gap(code);
}
effects.consume(code);
return data;
}
function closingSequence(code) {
if (code === 96) {
effects.consume(code);
size++;
return closingSequence;
}
if (size === sizeOpen) {
effects.exit("codeTextSequence");
effects.exit("codeText");
return ok(code);
}
token.type = "codeTextData";
return data(code);
}
}
function subtokenize(events) {
const jumps = {};
let index2 = -1;
let event;
let lineIndex;
let otherIndex;
let otherEvent;
let parameters;
let subevents;
let more;
while (++index2 < events.length) {
while (index2 in jumps) {
index2 = jumps[index2];
}
event = events[index2];
if (index2 && event[1].type === "chunkFlow" && events[index2 - 1][1].type === "listItemPrefix") {
subevents = event[1]._tokenizer.events;
otherIndex = 0;
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "lineEndingBlank") {
otherIndex += 2;
}
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "content") {
while (++otherIndex < subevents.length) {
if (subevents[otherIndex][1].type === "content") {
break;
}
if (subevents[otherIndex][1].type === "chunkText") {
subevents[otherIndex][1]._isInFirstContentOfListItem = true;
otherIndex++;
}
}
}
}
if (event[0] === "enter") {
if (event[1].contentType) {
Object.assign(jumps, subcontent(events, index2));
index2 = jumps[index2];
more = true;
}
} else if (event[1]._container) {
otherIndex = index2;
lineIndex = void 0;
while (otherIndex--) {
otherEvent = events[otherIndex];
if (otherEvent[1].type === "lineEnding" || otherEvent[1].type === "lineEndingBlank") {
if (otherEvent[0] === "enter") {
if (lineIndex) {
events[lineIndex][1].type = "lineEndingBlank";
}
otherEvent[1].type = "lineEnding";
lineIndex = otherIndex;
}
} else {
break;
}
}
if (lineIndex) {
event[1].end = Object.assign({}, events[lineIndex][1].start);
parameters = events.slice(lineIndex, index2);
parameters.unshift(event);
splice(events, lineIndex, index2 - lineIndex + 1, parameters);
}
}
}
return !more;
}
function subcontent(events, eventIndex) {
const token = events[eventIndex][1];
const context = events[eventIndex][2];
let startPosition = eventIndex - 1;
const startPositions = [];
const tokenizer = token._tokenizer || context.parser[token.contentType](token.start);
const childEvents = tokenizer.events;
const jumps = [];
const gaps = {};
let stream;
let previous2;
let index2 = -1;
let current = token;
let adjust = 0;
let start = 0;
const breaks = [start];
while (current) {
while (events[++startPosition][1] !== current) {
}
startPositions.push(startPosition);
if (!current._tokenizer) {
stream = context.sliceStream(current);
if (!current.next) {
stream.push(null);
}
if (previous2) {
tokenizer.defineSkip(current.start);
}
if (current._isInFirstContentOfListItem) {
tokenizer._gfmTasklistFirstContentOfListItem = true;
}
tokenizer.write(stream);
if (current._isInFirstContentOfListItem) {
tokenizer._gfmTasklistFirstContentOfListItem = void 0;
}
}
previous2 = current;
current = current.next;
}
current = token;
while (++index2 < childEvents.length) {
if (childEvents[index2][0] === "exit" && childEvents[index2 - 1][0] === "enter" && childEvents[index2][1].type === childEvents[index2 - 1][1].type && childEvents[index2][1].start.line !== childEvents[index2][1].end.line) {
start = index2 + 1;
breaks.push(start);
current._tokenizer = void 0;
current.previous = void 0;
current = current.next;
}
}
tokenizer.events = [];
if (current) {
current._tokenizer = void 0;
current.previous = void 0;
} else {
breaks.pop();
}
index2 = breaks.length;
while (index2--) {
const slice = childEvents.slice(breaks[index2], breaks[index2 + 1]);
const start2 = startPositions.pop();
jumps.unshift([start2, start2 + slice.length - 1]);
splice(events, start2, 2, slice);
}
index2 = -1;
while (++index2 < jumps.length) {
gaps[adjust + jumps[index2][0]] = adjust + jumps[index2][1];
adjust += jumps[index2][1] - jumps[index2][0] - 1;
}
return gaps;
}
var content2 = {
tokenize: tokenizeContent,
resolve: resolveContent
};
var continuationConstruct = {
tokenize: tokenizeContinuation,
partial: true
};
function resolveContent(events) {
subtokenize(events);
return events;
}
function tokenizeContent(effects, ok) {
let previous2;
return start;
function start(code) {
effects.enter("content");
previous2 = effects.enter("chunkContent", {
contentType: "content"
});
return data(code);
}
function data(code) {
if (code === null) {
return contentEnd(code);
}
if (markdownLineEnding(code)) {
return effects.check(continuationConstruct, contentContinue, contentEnd)(code);
}
effects.consume(code);
return data;
}
function contentEnd(code) {
effects.exit("chunkContent");
effects.exit("content");
return ok(code);
}
function contentContinue(code) {
effects.consume(code);
effects.exit("chunkContent");
previous2.next = effects.enter("chunkContent", {
contentType: "content",
previous: previous2
});
previous2 = previous2.next;
return data;
}
}
function tokenizeContinuation(effects, ok, nok) {
const self2 = this;
return startLookahead;
function startLookahead(code) {
effects.exit("chunkContent");
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return factorySpace(effects, prefixed, "linePrefix");
}
function prefixed(code) {
if (code === null || markdownLineEnding(code)) {
return nok(code);
}
const tail = self2.events[self2.events.length - 1];
if (!self2.parser.constructs.disable.null.includes("codeIndented") && tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4) {
return ok(code);
}
return effects.interrupt(self2.parser.constructs.flow, nok, ok)(code);
}
}
function factoryDestination(effects, ok, nok, type, literalType, literalMarkerType, rawType, stringType, max) {
const limit = max || Number.POSITIVE_INFINITY;
let balance = 0;
return start;
function start(code) {
if (code === 60) {
effects.enter(type);
effects.enter(literalType);
effects.enter(literalMarkerType);
effects.consume(code);
effects.exit(literalMarkerType);
return destinationEnclosedBefore;
}
if (code === null || code === 41 || asciiControl(code)) {
return nok(code);
}
effects.enter(type);
effects.enter(rawType);
effects.enter(stringType);
effects.enter("chunkString", {
contentType: "string"
});
return destinationRaw(code);
}
function destinationEnclosedBefore(code) {
if (code === 62) {
effects.enter(literalMarkerType);
effects.consume(code);
effects.exit(literalMarkerType);
effects.exit(literalType);
effects.exit(type);
return ok;
}
effects.enter(stringType);
effects.enter("chunkString", {
contentType: "string"
});
return destinationEnclosed(code);
}
function destinationEnclosed(code) {
if (code === 62) {
effects.exit("chunkString");
effects.exit(stringType);
return destinationEnclosedBefore(code);
}
if (code === null || code === 60 || markdownLineEnding(code)) {
return nok(code);
}
effects.consume(code);
return code === 92 ? destinationEnclosedEscape : destinationEnclosed;
}
function destinationEnclosedEscape(code) {
if (code === 60 || code === 62 || code === 92) {
effects.consume(code);
return destinationEnclosed;
}
return destinationEnclosed(code);
}
function destinationRaw(code) {
if (code === 40) {
if (++balance > limit)
return nok(code);
effects.consume(code);
return destinationRaw;
}
if (code === 41) {
if (!balance--) {
effects.exit("chunkString");
effects.exit(stringType);
effects.exit(rawType);
effects.exit(type);
return ok(code);
}
effects.consume(code);
return destinationRaw;
}
if (code === null || markdownLineEndingOrSpace(code)) {
if (balance)
return nok(code);
effects.exit("chunkString");
effects.exit(stringType);
effects.exit(rawType);
effects.exit(type);
return ok(code);
}
if (asciiControl(code))
return nok(code);
effects.consume(code);
return code === 92 ? destinationRawEscape : destinationRaw;
}
function destinationRawEscape(code) {
if (code === 40 || code === 41 || code === 92) {
effects.consume(code);
return destinationRaw;
}
return destinationRaw(code);
}
}
function factoryLabel(effects, ok, nok, type, markerType, stringType) {
const self2 = this;
let size = 0;
let data;
return start;
function start(code) {
effects.enter(type);
effects.enter(markerType);
effects.consume(code);
effects.exit(markerType);
effects.enter(stringType);
return atBreak;
}
function atBreak(code) {
if (code === null || code === 91 || code === 93 && !data || code === 94 && !size && "_hiddenFootnoteSupport" in self2.parser.constructs || size > 999) {
return nok(code);
}
if (code === 93) {
effects.exit(stringType);
effects.enter(markerType);
effects.consume(code);
effects.exit(markerType);
effects.exit(type);
return ok;
}
if (markdownLineEnding(code)) {
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return atBreak;
}
effects.enter("chunkString", {
contentType: "string"
});
return label(code);
}
function label(code) {
if (code === null || code === 91 || code === 93 || markdownLineEnding(code) || size++ > 999) {
effects.exit("chunkString");
return atBreak(code);
}
effects.consume(code);
data = data || !markdownSpace(code);
return code === 92 ? labelEscape : label;
}
function labelEscape(code) {
if (code === 91 || code === 92 || code === 93) {
effects.consume(code);
size++;
return label;
}
return label(code);
}
}
function factoryTitle(effects, ok, nok, type, markerType, stringType) {
let marker;
return start;
function start(code) {
effects.enter(type);
effects.enter(markerType);
effects.consume(code);
effects.exit(markerType);
marker = code === 40 ? 41 : code;
return atFirstTitleBreak;
}
function atFirstTitleBreak(code) {
if (code === marker) {
effects.enter(markerType);
effects.consume(code);
effects.exit(markerType);
effects.exit(type);
return ok;
}
effects.enter(stringType);
return atTitleBreak(code);
}
function atTitleBreak(code) {
if (code === marker) {
effects.exit(stringType);
return atFirstTitleBreak(marker);
}
if (code === null) {
return nok(code);
}
if (markdownLineEnding(code)) {
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return factorySpace(effects, atTitleBreak, "linePrefix");
}
effects.enter("chunkString", {
contentType: "string"
});
return title(code);
}
function title(code) {
if (code === marker || code === null || markdownLineEnding(code)) {
effects.exit("chunkString");
return atTitleBreak(code);
}
effects.consume(code);
return code === 92 ? titleEscape : title;
}
function titleEscape(code) {
if (code === marker || code === 92) {
effects.consume(code);
return title;
}
return title(code);
}
}
function factoryWhitespace(effects, ok) {
let seen;
return start;
function start(code) {
if (markdownLineEnding(code)) {
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
seen = true;
return start;
}
if (markdownSpace(code)) {
return factorySpace(effects, start, seen ? "linePrefix" : "lineSuffix")(code);
}
return ok(code);
}
}
function normalizeIdentifier(value2) {
return value2.replace(/[\t\n\r ]+/g, " ").replace(/^ | $/g, "").toLowerCase().toUpperCase();
}
var definition = {
name: "definition",
tokenize: tokenizeDefinition
};
var titleConstruct = {
tokenize: tokenizeTitle,
partial: true
};
function tokenizeDefinition(effects, ok, nok) {
const self2 = this;
let identifier;
return start;
function start(code) {
effects.enter("definition");
return factoryLabel.call(self2, effects, labelAfter, nok, "definitionLabel", "definitionLabelMarker", "definitionLabelString")(code);
}
function labelAfter(code) {
identifier = normalizeIdentifier(self2.sliceSerialize(self2.events[self2.events.length - 1][1]).slice(1, -1));
if (code === 58) {
effects.enter("definitionMarker");
effects.consume(code);
effects.exit("definitionMarker");
return factoryWhitespace(effects, factoryDestination(effects, effects.attempt(titleConstruct, factorySpace(effects, after, "whitespace"), factorySpace(effects, after, "whitespace")), nok, "definitionDestination", "definitionDestinationLiteral", "definitionDestinationLiteralMarker", "definitionDestinationRaw", "definitionDestinationString"));
}
return nok(code);
}
function after(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("definition");
if (!self2.parser.defined.includes(identifier)) {
self2.parser.defined.push(identifier);
}
return ok(code);
}
return nok(code);
}
}
function tokenizeTitle(effects, ok, nok) {
return start;
function start(code) {
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, before)(code) : nok(code);
}
function before(code) {
if (code === 34 || code === 39 || code === 40) {
return factoryTitle(effects, factorySpace(effects, after, "whitespace"), nok, "definitionTitle", "definitionTitleMarker", "definitionTitleString")(code);
}
return nok(code);
}
function after(code) {
return code === null || markdownLineEnding(code) ? ok(code) : nok(code);
}
}
var hardBreakEscape = {
name: "hardBreakEscape",
tokenize: tokenizeHardBreakEscape
};
function tokenizeHardBreakEscape(effects, ok, nok) {
return start;
function start(code) {
effects.enter("hardBreakEscape");
effects.enter("escapeMarker");
effects.consume(code);
return open;
}
function open(code) {
if (markdownLineEnding(code)) {
effects.exit("escapeMarker");
effects.exit("hardBreakEscape");
return ok(code);
}
return nok(code);
}
}
var headingAtx = {
name: "headingAtx",
tokenize: tokenizeHeadingAtx,
resolve: resolveHeadingAtx
};
function resolveHeadingAtx(events, context) {
let contentEnd = events.length - 2;
let contentStart = 3;
let content3;
let text3;
if (events[contentStart][1].type === "whitespace") {
contentStart += 2;
}
if (contentEnd - 2 > contentStart && events[contentEnd][1].type === "whitespace") {
contentEnd -= 2;
}
if (events[contentEnd][1].type === "atxHeadingSequence" && (contentStart === contentEnd - 1 || contentEnd - 4 > contentStart && events[contentEnd - 2][1].type === "whitespace")) {
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4;
}
if (contentEnd > contentStart) {
content3 = {
type: "atxHeadingText",
start: events[contentStart][1].start,
end: events[contentEnd][1].end
};
text3 = {
type: "chunkText",
start: events[contentStart][1].start,
end: events[contentEnd][1].end,
contentType: "text"
};
splice(events, contentStart, contentEnd - contentStart + 1, [
["enter", content3, context],
["enter", text3, context],
["exit", text3, context],
["exit", content3, context]
]);
}
return events;
}
function tokenizeHeadingAtx(effects, ok, nok) {
const self2 = this;
let size = 0;
return start;
function start(code) {
effects.enter("atxHeading");
effects.enter("atxHeadingSequence");
return fenceOpenInside(code);
}
function fenceOpenInside(code) {
if (code === 35 && size++ < 6) {
effects.consume(code);
return fenceOpenInside;
}
if (code === null || markdownLineEndingOrSpace(code)) {
effects.exit("atxHeadingSequence");
return self2.interrupt ? ok(code) : headingBreak(code);
}
return nok(code);
}
function headingBreak(code) {
if (code === 35) {
effects.enter("atxHeadingSequence");
return sequence(code);
}
if (code === null || markdownLineEnding(code)) {
effects.exit("atxHeading");
return ok(code);
}
if (markdownSpace(code)) {
return factorySpace(effects, headingBreak, "whitespace")(code);
}
effects.enter("atxHeadingText");
return data(code);
}
function sequence(code) {
if (code === 35) {
effects.consume(code);
return sequence;
}
effects.exit("atxHeadingSequence");
return headingBreak(code);
}
function data(code) {
if (code === null || code === 35 || markdownLineEndingOrSpace(code)) {
effects.exit("atxHeadingText");
return headingBreak(code);
}
effects.consume(code);
return data;
}
}
var htmlBlockNames = [
"address",
"article",
"aside",
"base",
"basefont",
"blockquote",
"body",
"caption",
"center",
"col",
"colgroup",
"dd",
"details",
"dialog",
"dir",
"div",
"dl",
"dt",
"fieldset",
"figcaption",
"figure",
"footer",
"form",
"frame",
"frameset",
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"head",
"header",
"hr",
"html",
"iframe",
"legend",
"li",
"link",
"main",
"menu",
"menuitem",
"nav",
"noframes",
"ol",
"optgroup",
"option",
"p",
"param",
"section",
"source",
"summary",
"table",
"tbody",
"td",
"tfoot",
"th",
"thead",
"title",
"tr",
"track",
"ul"
];
var htmlRawNames = ["pre", "script", "style", "textarea"];
var htmlFlow = {
name: "htmlFlow",
tokenize: tokenizeHtmlFlow,
resolveTo: resolveToHtmlFlow,
concrete: true
};
var nextBlankConstruct = {
tokenize: tokenizeNextBlank,
partial: true
};
function resolveToHtmlFlow(events) {
let index2 = events.length;
while (index2--) {
if (events[index2][0] === "enter" && events[index2][1].type === "htmlFlow") {
break;
}
}
if (index2 > 1 && events[index2 - 2][1].type === "linePrefix") {
events[index2][1].start = events[index2 - 2][1].start;
events[index2 + 1][1].start = events[index2 - 2][1].start;
events.splice(index2 - 2, 2);
}
return events;
}
function tokenizeHtmlFlow(effects, ok, nok) {
const self2 = this;
let kind;
let startTag;
let buffer2;
let index2;
let marker;
return start;
function start(code) {
effects.enter("htmlFlow");
effects.enter("htmlFlowData");
effects.consume(code);
return open;
}
function open(code) {
if (code === 33) {
effects.consume(code);
return declarationStart;
}
if (code === 47) {
effects.consume(code);
return tagCloseStart;
}
if (code === 63) {
effects.consume(code);
kind = 3;
return self2.interrupt ? ok : continuationDeclarationInside;
}
if (asciiAlpha(code)) {
effects.consume(code);
buffer2 = String.fromCharCode(code);
startTag = true;
return tagName;
}
return nok(code);
}
function declarationStart(code) {
if (code === 45) {
effects.consume(code);
kind = 2;
return commentOpenInside;
}
if (code === 91) {
effects.consume(code);
kind = 5;
buffer2 = "CDATA[";
index2 = 0;
return cdataOpenInside;
}
if (asciiAlpha(code)) {
effects.consume(code);
kind = 4;
return self2.interrupt ? ok : continuationDeclarationInside;
}
return nok(code);
}
function commentOpenInside(code) {
if (code === 45) {
effects.consume(code);
return self2.interrupt ? ok : continuationDeclarationInside;
}
return nok(code);
}
function cdataOpenInside(code) {
if (code === buffer2.charCodeAt(index2++)) {
effects.consume(code);
return index2 === buffer2.length ? self2.interrupt ? ok : continuation : cdataOpenInside;
}
return nok(code);
}
function tagCloseStart(code) {
if (asciiAlpha(code)) {
effects.consume(code);
buffer2 = String.fromCharCode(code);
return tagName;
}
return nok(code);
}
function tagName(code) {
if (code === null || code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {
if (code !== 47 && startTag && htmlRawNames.includes(buffer2.toLowerCase())) {
kind = 1;
return self2.interrupt ? ok(code) : continuation(code);
}
if (htmlBlockNames.includes(buffer2.toLowerCase())) {
kind = 6;
if (code === 47) {
effects.consume(code);
return basicSelfClosing;
}
return self2.interrupt ? ok(code) : continuation(code);
}
kind = 7;
return self2.interrupt && !self2.parser.lazy[self2.now().line] ? nok(code) : startTag ? completeAttributeNameBefore(code) : completeClosingTagAfter(code);
}
if (code === 45 || asciiAlphanumeric(code)) {
effects.consume(code);
buffer2 += String.fromCharCode(code);
return tagName;
}
return nok(code);
}
function basicSelfClosing(code) {
if (code === 62) {
effects.consume(code);
return self2.interrupt ? ok : continuation;
}
return nok(code);
}
function completeClosingTagAfter(code) {
if (markdownSpace(code)) {
effects.consume(code);
return completeClosingTagAfter;
}
return completeEnd(code);
}
function completeAttributeNameBefore(code) {
if (code === 47) {
effects.consume(code);
return completeEnd;
}
if (code === 58 || code === 95 || asciiAlpha(code)) {
effects.consume(code);
return completeAttributeName;
}
if (markdownSpace(code)) {
effects.consume(code);
return completeAttributeNameBefore;
}
return completeEnd(code);
}
function completeAttributeName(code) {
if (code === 45 || code === 46 || code === 58 || code === 95 || asciiAlphanumeric(code)) {
effects.consume(code);
return completeAttributeName;
}
return completeAttributeNameAfter(code);
}
function completeAttributeNameAfter(code) {
if (code === 61) {
effects.consume(code);
return completeAttributeValueBefore;
}
if (markdownSpace(code)) {
effects.consume(code);
return completeAttributeNameAfter;
}
return completeAttributeNameBefore(code);
}
function completeAttributeValueBefore(code) {
if (code === null || code === 60 || code === 61 || code === 62 || code === 96) {
return nok(code);
}
if (code === 34 || code === 39) {
effects.consume(code);
marker = code;
return completeAttributeValueQuoted;
}
if (markdownSpace(code)) {
effects.consume(code);
return completeAttributeValueBefore;
}
marker = null;
return completeAttributeValueUnquoted(code);
}
function completeAttributeValueQuoted(code) {
if (code === null || markdownLineEnding(code)) {
return nok(code);
}
if (code === marker) {
effects.consume(code);
return completeAttributeValueQuotedAfter;
}
effects.consume(code);
return completeAttributeValueQuoted;
}
function completeAttributeValueUnquoted(code) {
if (code === null || code === 34 || code === 39 || code === 60 || code === 61 || code === 62 || code === 96 || markdownLineEndingOrSpace(code)) {
return completeAttributeNameAfter(code);
}
effects.consume(code);
return completeAttributeValueUnquoted;
}
function completeAttributeValueQuotedAfter(code) {
if (code === 47 || code === 62 || markdownSpace(code)) {
return completeAttributeNameBefore(code);
}
return nok(code);
}
function completeEnd(code) {
if (code === 62) {
effects.consume(code);
return completeAfter;
}
return nok(code);
}
function completeAfter(code) {
if (markdownSpace(code)) {
effects.consume(code);
return completeAfter;
}
return code === null || markdownLineEnding(code) ? continuation(code) : nok(code);
}
function continuation(code) {
if (code === 45 && kind === 2) {
effects.consume(code);
return continuationCommentInside;
}
if (code === 60 && kind === 1) {
effects.consume(code);
return continuationRawTagOpen;
}
if (code === 62 && kind === 4) {
effects.consume(code);
return continuationClose;
}
if (code === 63 && kind === 3) {
effects.consume(code);
return continuationDeclarationInside;
}
if (code === 93 && kind === 5) {
effects.consume(code);
return continuationCharacterDataInside;
}
if (markdownLineEnding(code) && (kind === 6 || kind === 7)) {
return effects.check(nextBlankConstruct, continuationClose, continuationAtLineEnding)(code);
}
if (code === null || markdownLineEnding(code)) {
return continuationAtLineEnding(code);
}
effects.consume(code);
return continuation;
}
function continuationAtLineEnding(code) {
effects.exit("htmlFlowData");
return htmlContinueStart(code);
}
function htmlContinueStart(code) {
if (code === null) {
return done(code);
}
if (markdownLineEnding(code)) {
return effects.attempt({
tokenize: htmlLineEnd,
partial: true
}, htmlContinueStart, done)(code);
}
effects.enter("htmlFlowData");
return continuation(code);
}
function htmlLineEnd(effects2, ok2, nok2) {
return start2;
function start2(code) {
effects2.enter("lineEnding");
effects2.consume(code);
effects2.exit("lineEnding");
return lineStart;
}
function lineStart(code) {
return self2.parser.lazy[self2.now().line] ? nok2(code) : ok2(code);
}
}
function continuationCommentInside(code) {
if (code === 45) {
effects.consume(code);
return continuationDeclarationInside;
}
return continuation(code);
}
function continuationRawTagOpen(code) {
if (code === 47) {
effects.consume(code);
buffer2 = "";
return continuationRawEndTag;
}
return continuation(code);
}
function continuationRawEndTag(code) {
if (code === 62 && htmlRawNames.includes(buffer2.toLowerCase())) {
effects.consume(code);
return continuationClose;
}
if (asciiAlpha(code) && buffer2.length < 8) {
effects.consume(code);
buffer2 += String.fromCharCode(code);
return continuationRawEndTag;
}
return continuation(code);
}
function continuationCharacterDataInside(code) {
if (code === 93) {
effects.consume(code);
return continuationDeclarationInside;
}
return continuation(code);
}
function continuationDeclarationInside(code) {
if (code === 62) {
effects.consume(code);
return continuationClose;
}
return continuation(code);
}
function continuationClose(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("htmlFlowData");
return done(code);
}
effects.consume(code);
return continuationClose;
}
function done(code) {
effects.exit("htmlFlow");
return ok(code);
}
}
function tokenizeNextBlank(effects, ok, nok) {
return start;
function start(code) {
effects.exit("htmlFlowData");
effects.enter("lineEndingBlank");
effects.consume(code);
effects.exit("lineEndingBlank");
return effects.attempt(blankLine, ok, nok);
}
}
var htmlText = {
name: "htmlText",
tokenize: tokenizeHtmlText
};
function tokenizeHtmlText(effects, ok, nok) {
const self2 = this;
let marker;
let buffer2;
let index2;
let returnState;
return start;
function start(code) {
effects.enter("htmlText");
effects.enter("htmlTextData");
effects.consume(code);
return open;
}
function open(code) {
if (code === 33) {
effects.consume(code);
return declarationOpen;
}
if (code === 47) {
effects.consume(code);
return tagCloseStart;
}
if (code === 63) {
effects.consume(code);
return instruction;
}
if (asciiAlpha(code)) {
effects.consume(code);
return tagOpen;
}
return nok(code);
}
function declarationOpen(code) {
if (code === 45) {
effects.consume(code);
return commentOpen;
}
if (code === 91) {
effects.consume(code);
buffer2 = "CDATA[";
index2 = 0;
return cdataOpen;
}
if (asciiAlpha(code)) {
effects.consume(code);
return declaration;
}
return nok(code);
}
function commentOpen(code) {
if (code === 45) {
effects.consume(code);
return commentStart;
}
return nok(code);
}
function commentStart(code) {
if (code === null || code === 62) {
return nok(code);
}
if (code === 45) {
effects.consume(code);
return commentStartDash;
}
return comment(code);
}
function commentStartDash(code) {
if (code === null || code === 62) {
return nok(code);
}
return comment(code);
}
function comment(code) {
if (code === null) {
return nok(code);
}
if (code === 45) {
effects.consume(code);
return commentClose;
}
if (markdownLineEnding(code)) {
returnState = comment;
return atLineEnding(code);
}
effects.consume(code);
return comment;
}
function commentClose(code) {
if (code === 45) {
effects.consume(code);
return end;
}
return comment(code);
}
function cdataOpen(code) {
if (code === buffer2.charCodeAt(index2++)) {
effects.consume(code);
return index2 === buffer2.length ? cdata : cdataOpen;
}
return nok(code);
}
function cdata(code) {
if (code === null) {
return nok(code);
}
if (code === 93) {
effects.consume(code);
return cdataClose;
}
if (markdownLineEnding(code)) {
returnState = cdata;
return atLineEnding(code);
}
effects.consume(code);
return cdata;
}
function cdataClose(code) {
if (code === 93) {
effects.consume(code);
return cdataEnd;
}
return cdata(code);
}
function cdataEnd(code) {
if (code === 62) {
return end(code);
}
if (code === 93) {
effects.consume(code);
return cdataEnd;
}
return cdata(code);
}
function declaration(code) {
if (code === null || code === 62) {
return end(code);
}
if (markdownLineEnding(code)) {
returnState = declaration;
return atLineEnding(code);
}
effects.consume(code);
return declaration;
}
function instruction(code) {
if (code === null) {
return nok(code);
}
if (code === 63) {
effects.consume(code);
return instructionClose;
}
if (markdownLineEnding(code)) {
returnState = instruction;
return atLineEnding(code);
}
effects.consume(code);
return instruction;
}
function instructionClose(code) {
return code === 62 ? end(code) : instruction(code);
}
function tagCloseStart(code) {
if (asciiAlpha(code)) {
effects.consume(code);
return tagClose;
}
return nok(code);
}
function tagClose(code) {
if (code === 45 || asciiAlphanumeric(code)) {
effects.consume(code);
return tagClose;
}
return tagCloseBetween(code);
}
function tagCloseBetween(code) {
if (markdownLineEnding(code)) {
returnState = tagCloseBetween;
return atLineEnding(code);
}
if (markdownSpace(code)) {
effects.consume(code);
return tagCloseBetween;
}
return end(code);
}
function tagOpen(code) {
if (code === 45 || asciiAlphanumeric(code)) {
effects.consume(code);
return tagOpen;
}
if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code);
}
return nok(code);
}
function tagOpenBetween(code) {
if (code === 47) {
effects.consume(code);
return end;
}
if (code === 58 || code === 95 || asciiAlpha(code)) {
effects.consume(code);
return tagOpenAttributeName;
}
if (markdownLineEnding(code)) {
returnState = tagOpenBetween;
return atLineEnding(code);
}
if (markdownSpace(code)) {
effects.consume(code);
return tagOpenBetween;
}
return end(code);
}
function tagOpenAttributeName(code) {
if (code === 45 || code === 46 || code === 58 || code === 95 || asciiAlphanumeric(code)) {
effects.consume(code);
return tagOpenAttributeName;
}
return tagOpenAttributeNameAfter(code);
}
function tagOpenAttributeNameAfter(code) {
if (code === 61) {
effects.consume(code);
return tagOpenAttributeValueBefore;
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeNameAfter;
return atLineEnding(code);
}
if (markdownSpace(code)) {
effects.consume(code);
return tagOpenAttributeNameAfter;
}
return tagOpenBetween(code);
}
function tagOpenAttributeValueBefore(code) {
if (code === null || code === 60 || code === 61 || code === 62 || code === 96) {
return nok(code);
}
if (code === 34 || code === 39) {
effects.consume(code);
marker = code;
return tagOpenAttributeValueQuoted;
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueBefore;
return atLineEnding(code);
}
if (markdownSpace(code)) {
effects.consume(code);
return tagOpenAttributeValueBefore;
}
effects.consume(code);
marker = void 0;
return tagOpenAttributeValueUnquoted;
}
function tagOpenAttributeValueQuoted(code) {
if (code === marker) {
effects.consume(code);
return tagOpenAttributeValueQuotedAfter;
}
if (code === null) {
return nok(code);
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueQuoted;
return atLineEnding(code);
}
effects.consume(code);
return tagOpenAttributeValueQuoted;
}
function tagOpenAttributeValueQuotedAfter(code) {
if (code === 62 || code === 47 || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code);
}
return nok(code);
}
function tagOpenAttributeValueUnquoted(code) {
if (code === null || code === 34 || code === 39 || code === 60 || code === 61 || code === 96) {
return nok(code);
}
if (code === 62 || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code);
}
effects.consume(code);
return tagOpenAttributeValueUnquoted;
}
function atLineEnding(code) {
effects.exit("htmlTextData");
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return factorySpace(effects, afterPrefix, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4);
}
function afterPrefix(code) {
effects.enter("htmlTextData");
return returnState(code);
}
function end(code) {
if (code === 62) {
effects.consume(code);
effects.exit("htmlTextData");
effects.exit("htmlText");
return ok;
}
return nok(code);
}
}
var labelEnd = {
name: "labelEnd",
tokenize: tokenizeLabelEnd,
resolveTo: resolveToLabelEnd,
resolveAll: resolveAllLabelEnd
};
var resourceConstruct = {
tokenize: tokenizeResource
};
var fullReferenceConstruct = {
tokenize: tokenizeFullReference
};
var collapsedReferenceConstruct = {
tokenize: tokenizeCollapsedReference
};
function resolveAllLabelEnd(events) {
let index2 = -1;
let token;
while (++index2 < events.length) {
token = events[index2][1];
if (token.type === "labelImage" || token.type === "labelLink" || token.type === "labelEnd") {
events.splice(index2 + 1, token.type === "labelImage" ? 4 : 2);
token.type = "data";
index2++;
}
}
return events;
}
function resolveToLabelEnd(events, context) {
let index2 = events.length;
let offset = 0;
let token;
let open;
let close2;
let media;
while (index2--) {
token = events[index2][1];
if (open) {
if (token.type === "link" || token.type === "labelLink" && token._inactive) {
break;
}
if (events[index2][0] === "enter" && token.type === "labelLink") {
token._inactive = true;
}
} else if (close2) {
if (events[index2][0] === "enter" && (token.type === "labelImage" || token.type === "labelLink") && !token._balanced) {
open = index2;
if (token.type !== "labelLink") {
offset = 2;
break;
}
}
} else if (token.type === "labelEnd") {
close2 = index2;
}
}
const group = {
type: events[open][1].type === "labelLink" ? "link" : "image",
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[events.length - 1][1].end)
};
const label = {
type: "label",
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[close2][1].end)
};
const text3 = {
type: "labelText",
start: Object.assign({}, events[open + offset + 2][1].end),
end: Object.assign({}, events[close2 - 2][1].start)
};
media = [
["enter", group, context],
["enter", label, context]
];
media = push(media, events.slice(open + 1, open + offset + 3));
media = push(media, [["enter", text3, context]]);
media = push(media, resolveAll(context.parser.constructs.insideSpan.null, events.slice(open + offset + 4, close2 - 3), context));
media = push(media, [
["exit", text3, context],
events[close2 - 2],
events[close2 - 1],
["exit", label, context]
]);
media = push(media, events.slice(close2 + 1));
media = push(media, [["exit", group, context]]);
splice(events, open, events.length, media);
return events;
}
function tokenizeLabelEnd(effects, ok, nok) {
const self2 = this;
let index2 = self2.events.length;
let labelStart;
let defined;
while (index2--) {
if ((self2.events[index2][1].type === "labelImage" || self2.events[index2][1].type === "labelLink") && !self2.events[index2][1]._balanced) {
labelStart = self2.events[index2][1];
break;
}
}
return start;
function start(code) {
if (!labelStart) {
return nok(code);
}
if (labelStart._inactive)
return balanced(code);
defined = self2.parser.defined.includes(normalizeIdentifier(self2.sliceSerialize({
start: labelStart.end,
end: self2.now()
})));
effects.enter("labelEnd");
effects.enter("labelMarker");
effects.consume(code);
effects.exit("labelMarker");
effects.exit("labelEnd");
return afterLabelEnd;
}
function afterLabelEnd(code) {
if (code === 40) {
return effects.attempt(resourceConstruct, ok, defined ? ok : balanced)(code);
}
if (code === 91) {
return effects.attempt(fullReferenceConstruct, ok, defined ? effects.attempt(collapsedReferenceConstruct, ok, balanced) : balanced)(code);
}
return defined ? ok(code) : balanced(code);
}
function balanced(code) {
labelStart._balanced = true;
return nok(code);
}
}
function tokenizeResource(effects, ok, nok) {
return start;
function start(code) {
effects.enter("resource");
effects.enter("resourceMarker");
effects.consume(code);
effects.exit("resourceMarker");
return factoryWhitespace(effects, open);
}
function open(code) {
if (code === 41) {
return end(code);
}
return factoryDestination(effects, destinationAfter, nok, "resourceDestination", "resourceDestinationLiteral", "resourceDestinationLiteralMarker", "resourceDestinationRaw", "resourceDestinationString", 3)(code);
}
function destinationAfter(code) {
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, between)(code) : end(code);
}
function between(code) {
if (code === 34 || code === 39 || code === 40) {
return factoryTitle(effects, factoryWhitespace(effects, end), nok, "resourceTitle", "resourceTitleMarker", "resourceTitleString")(code);
}
return end(code);
}
function end(code) {
if (code === 41) {
effects.enter("resourceMarker");
effects.consume(code);
effects.exit("resourceMarker");
effects.exit("resource");
return ok;
}
return nok(code);
}
}
function tokenizeFullReference(effects, ok, nok) {
const self2 = this;
return start;
function start(code) {
return factoryLabel.call(self2, effects, afterLabel, nok, "reference", "referenceMarker", "referenceString")(code);
}
function afterLabel(code) {
return self2.parser.defined.includes(normalizeIdentifier(self2.sliceSerialize(self2.events[self2.events.length - 1][1]).slice(1, -1))) ? ok(code) : nok(code);
}
}
function tokenizeCollapsedReference(effects, ok, nok) {
return start;
function start(code) {
effects.enter("reference");
effects.enter("referenceMarker");
effects.consume(code);
effects.exit("referenceMarker");
return open;
}
function open(code) {
if (code === 93) {
effects.enter("referenceMarker");
effects.consume(code);
effects.exit("referenceMarker");
effects.exit("reference");
return ok;
}
return nok(code);
}
}
var labelStartImage = {
name: "labelStartImage",
tokenize: tokenizeLabelStartImage,
resolveAll: labelEnd.resolveAll
};
function tokenizeLabelStartImage(effects, ok, nok) {
const self2 = this;
return start;
function start(code) {
effects.enter("labelImage");
effects.enter("labelImageMarker");
effects.consume(code);
effects.exit("labelImageMarker");
return open;
}
function open(code) {
if (code === 91) {
effects.enter("labelMarker");
effects.consume(code);
effects.exit("labelMarker");
effects.exit("labelImage");
return after;
}
return nok(code);
}
function after(code) {
return code === 94 && "_hiddenFootnoteSupport" in self2.parser.constructs ? nok(code) : ok(code);
}
}
var labelStartLink = {
name: "labelStartLink",
tokenize: tokenizeLabelStartLink,
resolveAll: labelEnd.resolveAll
};
function tokenizeLabelStartLink(effects, ok, nok) {
const self2 = this;
return start;
function start(code) {
effects.enter("labelLink");
effects.enter("labelMarker");
effects.consume(code);
effects.exit("labelMarker");
effects.exit("labelLink");
return after;
}
function after(code) {
return code === 94 && "_hiddenFootnoteSupport" in self2.parser.constructs ? nok(code) : ok(code);
}
}
var lineEnding = {
name: "lineEnding",
tokenize: tokenizeLineEnding
};
function tokenizeLineEnding(effects, ok) {
return start;
function start(code) {
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
return factorySpace(effects, ok, "linePrefix");
}
}
var thematicBreak = {
name: "thematicBreak",
tokenize: tokenizeThematicBreak
};
function tokenizeThematicBreak(effects, ok, nok) {
let size = 0;
let marker;
return start;
function start(code) {
effects.enter("thematicBreak");
marker = code;
return atBreak(code);
}
function atBreak(code) {
if (code === marker) {
effects.enter("thematicBreakSequence");
return sequence(code);
}
if (markdownSpace(code)) {
return factorySpace(effects, atBreak, "whitespace")(code);
}
if (size < 3 || code !== null && !markdownLineEnding(code)) {
return nok(code);
}
effects.exit("thematicBreak");
return ok(code);
}
function sequence(code) {
if (code === marker) {
effects.consume(code);
size++;
return sequence;
}
effects.exit("thematicBreakSequence");
return atBreak(code);
}
}
var list = {
name: "list",
tokenize: tokenizeListStart,
continuation: {
tokenize: tokenizeListContinuation
},
exit: tokenizeListEnd
};
var listItemPrefixWhitespaceConstruct = {
tokenize: tokenizeListItemPrefixWhitespace,
partial: true
};
var indentConstruct = {
tokenize: tokenizeIndent,
partial: true
};
function tokenizeListStart(effects, ok, nok) {
const self2 = this;
const tail = self2.events[self2.events.length - 1];
let initialSize = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
let size = 0;
return start;
function start(code) {
const kind = self2.containerState.type || (code === 42 || code === 43 || code === 45 ? "listUnordered" : "listOrdered");
if (kind === "listUnordered" ? !self2.containerState.marker || code === self2.containerState.marker : asciiDigit(code)) {
if (!self2.containerState.type) {
self2.containerState.type = kind;
effects.enter(kind, {
_container: true
});
}
if (kind === "listUnordered") {
effects.enter("listItemPrefix");
return code === 42 || code === 45 ? effects.check(thematicBreak, nok, atMarker)(code) : atMarker(code);
}
if (!self2.interrupt || code === 49) {
effects.enter("listItemPrefix");
effects.enter("listItemValue");
return inside(code);
}
}
return nok(code);
}
function inside(code) {
if (asciiDigit(code) && ++size < 10) {
effects.consume(code);
return inside;
}
if ((!self2.interrupt || size < 2) && (self2.containerState.marker ? code === self2.containerState.marker : code === 41 || code === 46)) {
effects.exit("listItemValue");
return atMarker(code);
}
return nok(code);
}
function atMarker(code) {
effects.enter("listItemMarker");
effects.consume(code);
effects.exit("listItemMarker");
self2.containerState.marker = self2.containerState.marker || code;
return effects.check(blankLine, self2.interrupt ? nok : onBlank, effects.attempt(listItemPrefixWhitespaceConstruct, endOfPrefix, otherPrefix));
}
function onBlank(code) {
self2.containerState.initialBlankLine = true;
initialSize++;
return endOfPrefix(code);
}
function otherPrefix(code) {
if (markdownSpace(code)) {
effects.enter("listItemPrefixWhitespace");
effects.consume(code);
effects.exit("listItemPrefixWhitespace");
return endOfPrefix;
}
return nok(code);
}
function endOfPrefix(code) {
self2.containerState.size = initialSize + self2.sliceSerialize(effects.exit("listItemPrefix"), true).length;
return ok(code);
}
}
function tokenizeListContinuation(effects, ok, nok) {
const self2 = this;
self2.containerState._closeFlow = void 0;
return effects.check(blankLine, onBlank, notBlank);
function onBlank(code) {
self2.containerState.furtherBlankLines = self2.containerState.furtherBlankLines || self2.containerState.initialBlankLine;
return factorySpace(effects, ok, "listItemIndent", self2.containerState.size + 1)(code);
}
function notBlank(code) {
if (self2.containerState.furtherBlankLines || !markdownSpace(code)) {
self2.containerState.furtherBlankLines = void 0;
self2.containerState.initialBlankLine = void 0;
return notInCurrentItem(code);
}
self2.containerState.furtherBlankLines = void 0;
self2.containerState.initialBlankLine = void 0;
return effects.attempt(indentConstruct, ok, notInCurrentItem)(code);
}
function notInCurrentItem(code) {
self2.containerState._closeFlow = true;
self2.interrupt = void 0;
return factorySpace(effects, effects.attempt(list, ok, nok), "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code);
}
}
function tokenizeIndent(effects, ok, nok) {
const self2 = this;
return factorySpace(effects, afterPrefix, "listItemIndent", self2.containerState.size + 1);
function afterPrefix(code) {
const tail = self2.events[self2.events.length - 1];
return tail && tail[1].type === "listItemIndent" && tail[2].sliceSerialize(tail[1], true).length === self2.containerState.size ? ok(code) : nok(code);
}
}
function tokenizeListEnd(effects) {
effects.exit(this.containerState.type);
}
function tokenizeListItemPrefixWhitespace(effects, ok, nok) {
const self2 = this;
return factorySpace(effects, afterPrefix, "listItemPrefixWhitespace", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4 + 1);
function afterPrefix(code) {
const tail = self2.events[self2.events.length - 1];
return !markdownSpace(code) && tail && tail[1].type === "listItemPrefixWhitespace" ? ok(code) : nok(code);
}
}
var setextUnderline = {
name: "setextUnderline",
tokenize: tokenizeSetextUnderline,
resolveTo: resolveToSetextUnderline
};
function resolveToSetextUnderline(events, context) {
let index2 = events.length;
let content3;
let text3;
let definition2;
while (index2--) {
if (events[index2][0] === "enter") {
if (events[index2][1].type === "content") {
content3 = index2;
break;
}
if (events[index2][1].type === "paragraph") {
text3 = index2;
}
} else {
if (events[index2][1].type === "content") {
events.splice(index2, 1);
}
if (!definition2 && events[index2][1].type === "definition") {
definition2 = index2;
}
}
}
const heading = {
type: "setextHeading",
start: Object.assign({}, events[text3][1].start),
end: Object.assign({}, events[events.length - 1][1].end)
};
events[text3][1].type = "setextHeadingText";
if (definition2) {
events.splice(text3, 0, ["enter", heading, context]);
events.splice(definition2 + 1, 0, ["exit", events[content3][1], context]);
events[content3][1].end = Object.assign({}, events[definition2][1].end);
} else {
events[content3][1] = heading;
}
events.push(["exit", heading, context]);
return events;
}
function tokenizeSetextUnderline(effects, ok, nok) {
const self2 = this;
let index2 = self2.events.length;
let marker;
let paragraph;
while (index2--) {
if (self2.events[index2][1].type !== "lineEnding" && self2.events[index2][1].type !== "linePrefix" && self2.events[index2][1].type !== "content") {
paragraph = self2.events[index2][1].type === "paragraph";
break;
}
}
return start;
function start(code) {
if (!self2.parser.lazy[self2.now().line] && (self2.interrupt || paragraph)) {
effects.enter("setextHeadingLine");
effects.enter("setextHeadingLineSequence");
marker = code;
return closingSequence(code);
}
return nok(code);
}
function closingSequence(code) {
if (code === marker) {
effects.consume(code);
return closingSequence;
}
effects.exit("setextHeadingLineSequence");
return factorySpace(effects, closingSequenceEnd, "lineSuffix")(code);
}
function closingSequenceEnd(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("setextHeadingLine");
return ok(code);
}
return nok(code);
}
}
var flow = {
tokenize: initializeFlow
};
function initializeFlow(effects) {
const self2 = this;
const initial = effects.attempt(blankLine, atBlankEnding, effects.attempt(this.parser.constructs.flowInitial, afterConstruct, factorySpace(effects, effects.attempt(this.parser.constructs.flow, afterConstruct, effects.attempt(content2, afterConstruct)), "linePrefix")));
return initial;
function atBlankEnding(code) {
if (code === null) {
effects.consume(code);
return;
}
effects.enter("lineEndingBlank");
effects.consume(code);
effects.exit("lineEndingBlank");
self2.currentConstruct = void 0;
return initial;
}
function afterConstruct(code) {
if (code === null) {
effects.consume(code);
return;
}
effects.enter("lineEnding");
effects.consume(code);
effects.exit("lineEnding");
self2.currentConstruct = void 0;
return initial;
}
}
var resolver = {
resolveAll: createResolver()
};
var string = initializeFactory("string");
var text = initializeFactory("text");
function initializeFactory(field) {
return {
tokenize: initializeText,
resolveAll: createResolver(field === "text" ? resolveAllLineSuffixes : void 0)
};
function initializeText(effects) {
const self2 = this;
const constructs2 = this.parser.constructs[field];
const text3 = effects.attempt(constructs2, start, notText);
return start;
function start(code) {
return atBreak(code) ? text3(code) : notText(code);
}
function notText(code) {
if (code === null) {
effects.consume(code);
return;
}
effects.enter("data");
effects.consume(code);
return data;
}
function data(code) {
if (atBreak(code)) {
effects.exit("data");
return text3(code);
}
effects.consume(code);
return data;
}
function atBreak(code) {
if (code === null) {
return true;
}
const list2 = constructs2[code];
let index2 = -1;
if (list2) {
while (++index2 < list2.length) {
const item = list2[index2];
if (!item.previous || item.previous.call(self2, self2.previous)) {
return true;
}
}
}
return false;
}
}
}
function createResolver(extraResolver) {
return resolveAllText;
function resolveAllText(events, context) {
let index2 = -1;
let enter;
while (++index2 <= events.length) {
if (enter === void 0) {
if (events[index2] && events[index2][1].type === "data") {
enter = index2;
index2++;
}
} else if (!events[index2] || events[index2][1].type !== "data") {
if (index2 !== enter + 2) {
events[enter][1].end = events[index2 - 1][1].end;
events.splice(enter + 2, index2 - enter - 2);
index2 = enter + 2;
}
enter = void 0;
}
}
return extraResolver ? extraResolver(events, context) : events;
}
}
function resolveAllLineSuffixes(events, context) {
let eventIndex = -1;
while (++eventIndex <= events.length) {
if ((eventIndex === events.length || events[eventIndex][1].type === "lineEnding") && events[eventIndex - 1][1].type === "data") {
const data = events[eventIndex - 1][1];
const chunks = context.sliceStream(data);
let index2 = chunks.length;
let bufferIndex = -1;
let size = 0;
let tabs;
while (index2--) {
const chunk = chunks[index2];
if (typeof chunk === "string") {
bufferIndex = chunk.length;
while (chunk.charCodeAt(bufferIndex - 1) === 32) {
size++;
bufferIndex--;
}
if (bufferIndex)
break;
bufferIndex = -1;
} else if (chunk === -2) {
tabs = true;
size++;
} else if (chunk === -1) {
} else {
index2++;
break;
}
}
if (size) {
const token = {
type: eventIndex === events.length || tabs || size < 2 ? "lineSuffix" : "hardBreakTrailing",
start: {
line: data.end.line,
column: data.end.column - size,
offset: data.end.offset - size,
_index: data.start._index + index2,
_bufferIndex: index2 ? bufferIndex : data.start._bufferIndex + bufferIndex
},
end: Object.assign({}, data.end)
};
data.end = Object.assign({}, token.start);
if (data.start.offset === data.end.offset) {
Object.assign(data, token);
} else {
events.splice(eventIndex, 0, ["enter", token, context], ["exit", token, context]);
eventIndex += 2;
}
}
eventIndex++;
}
}
return events;
}
function createTokenizer(parser, initialize, from) {
let point2 = Object.assign(from ? Object.assign({}, from) : {
line: 1,
column: 1,
offset: 0
}, {
_index: 0,
_bufferIndex: -1
});
const columnStart = {};
const resolveAllConstructs = [];
let chunks = [];
let stack = [];
let consumed = true;
const effects = {
consume,
enter,
exit: exit2,
attempt: constructFactory(onsuccessfulconstruct),
check: constructFactory(onsuccessfulcheck),
interrupt: constructFactory(onsuccessfulcheck, {
interrupt: true
})
};
const context = {
previous: null,
code: null,
containerState: {},
events: [],
parser,
sliceStream,
sliceSerialize,
now,
defineSkip,
write
};
let state = initialize.tokenize.call(context, effects);
let expectedCode;
if (initialize.resolveAll) {
resolveAllConstructs.push(initialize);
}
return context;
function write(slice) {
chunks = push(chunks, slice);
main();
if (chunks[chunks.length - 1] !== null) {
return [];
}
addResult(initialize, 0);
context.events = resolveAll(resolveAllConstructs, context.events, context);
return context.events;
}
function sliceSerialize(token, expandTabs) {
return serializeChunks(sliceStream(token), expandTabs);
}
function sliceStream(token) {
return sliceChunks(chunks, token);
}
function now() {
return Object.assign({}, point2);
}
function defineSkip(value2) {
columnStart[value2.line] = value2.column;
accountForPotentialSkip();
}
function main() {
let chunkIndex;
while (point2._index < chunks.length) {
const chunk = chunks[point2._index];
if (typeof chunk === "string") {
chunkIndex = point2._index;
if (point2._bufferIndex < 0) {
point2._bufferIndex = 0;
}
while (point2._index === chunkIndex && point2._bufferIndex < chunk.length) {
go(chunk.charCodeAt(point2._bufferIndex));
}
} else {
go(chunk);
}
}
}
function go(code) {
consumed = void 0;
expectedCode = code;
state = state(code);
}
function consume(code) {
if (markdownLineEnding(code)) {
point2.line++;
point2.column = 1;
point2.offset += code === -3 ? 2 : 1;
accountForPotentialSkip();
} else if (code !== -1) {
point2.column++;
point2.offset++;
}
if (point2._bufferIndex < 0) {
point2._index++;
} else {
point2._bufferIndex++;
if (point2._bufferIndex === chunks[point2._index].length) {
point2._bufferIndex = -1;
point2._index++;
}
}
context.previous = code;
consumed = true;
}
function enter(type, fields) {
const token = fields || {};
token.type = type;
token.start = now();
context.events.push(["enter", token, context]);
stack.push(token);
return token;
}
function exit2(type) {
const token = stack.pop();
token.end = now();
context.events.push(["exit", token, context]);
return token;
}
function onsuccessfulconstruct(construct, info) {
addResult(construct, info.from);
}
function onsuccessfulcheck(_, info) {
info.restore();
}
function constructFactory(onreturn, fields) {
return hook;
function hook(constructs2, returnState, bogusState) {
let listOfConstructs;
let constructIndex;
let currentConstruct;
let info;
return Array.isArray(constructs2) ? handleListOfConstructs(constructs2) : "tokenize" in constructs2 ? handleListOfConstructs([constructs2]) : handleMapOfConstructs(constructs2);
function handleMapOfConstructs(map) {
return start;
function start(code) {
const def = code !== null && map[code];
const all2 = code !== null && map.null;
const list2 = [
...Array.isArray(def) ? def : def ? [def] : [],
...Array.isArray(all2) ? all2 : all2 ? [all2] : []
];
return handleListOfConstructs(list2)(code);
}
}
function handleListOfConstructs(list2) {
listOfConstructs = list2;
constructIndex = 0;
if (list2.length === 0) {
return bogusState;
}
return handleConstruct(list2[constructIndex]);
}
function handleConstruct(construct) {
return start;
function start(code) {
info = store();
currentConstruct = construct;
if (!construct.partial) {
context.currentConstruct = construct;
}
if (construct.name && context.parser.constructs.disable.null.includes(construct.name)) {
return nok(code);
}
return construct.tokenize.call(fields ? Object.assign(Object.create(context), fields) : context, effects, ok, nok)(code);
}
}
function ok(code) {
consumed = true;
onreturn(currentConstruct, info);
return returnState;
}
function nok(code) {
consumed = true;
info.restore();
if (++constructIndex < listOfConstructs.length) {
return handleConstruct(listOfConstructs[constructIndex]);
}
return bogusState;
}
}
}
function addResult(construct, from2) {
if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
resolveAllConstructs.push(construct);
}
if (construct.resolve) {
splice(context.events, from2, context.events.length - from2, construct.resolve(context.events.slice(from2), context));
}
if (construct.resolveTo) {
context.events = construct.resolveTo(context.events, context);
}
}
function store() {
const startPoint = now();
const startPrevious = context.previous;
const startCurrentConstruct = context.currentConstruct;
const startEventsIndex = context.events.length;
const startStack = Array.from(stack);
return {
restore,
from: startEventsIndex
};
function restore() {
point2 = startPoint;
context.previous = startPrevious;
context.currentConstruct = startCurrentConstruct;
context.events.length = startEventsIndex;
stack = startStack;
accountForPotentialSkip();
}
}
function accountForPotentialSkip() {
if (point2.line in columnStart && point2.column < 2) {
point2.column = columnStart[point2.line];
point2.offset += columnStart[point2.line] - 1;
}
}
}
function sliceChunks(chunks, token) {
const startIndex = token.start._index;
const startBufferIndex = token.start._bufferIndex;
const endIndex = token.end._index;
const endBufferIndex = token.end._bufferIndex;
let view;
if (startIndex === endIndex) {
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)];
} else {
view = chunks.slice(startIndex, endIndex);
if (startBufferIndex > -1) {
view[0] = view[0].slice(startBufferIndex);
}
if (endBufferIndex > 0) {
view.push(chunks[endIndex].slice(0, endBufferIndex));
}
}
return view;
}
function serializeChunks(chunks, expandTabs) {
let index2 = -1;
const result = [];
let atTab;
while (++index2 < chunks.length) {
const chunk = chunks[index2];
let value2;
if (typeof chunk === "string") {
value2 = chunk;
} else
switch (chunk) {
case -5: {
value2 = "\r";
break;
}
case -4: {
value2 = "\n";
break;
}
case -3: {
value2 = "\r\n";
break;
}
case -2: {
value2 = expandTabs ? " " : " ";
break;
}
case -1: {
if (!expandTabs && atTab)
continue;
value2 = " ";
break;
}
default: {
value2 = String.fromCharCode(chunk);
}
}
atTab = chunk === -2;
result.push(value2);
}
return result.join("");
}
var constructs_exports = {};
__export(constructs_exports, {
attentionMarkers: () => attentionMarkers,
contentInitial: () => contentInitial,
disable: () => disable,
document: () => document3,
flow: () => flow2,
flowInitial: () => flowInitial,
insideSpan: () => insideSpan,
string: () => string2,
text: () => text2
});
var document3 = {
[42]: list,
[43]: list,
[45]: list,
[48]: list,
[49]: list,
[50]: list,
[51]: list,
[52]: list,
[53]: list,
[54]: list,
[55]: list,
[56]: list,
[57]: list,
[62]: blockQuote
};
var contentInitial = {
[91]: definition
};
var flowInitial = {
[-2]: codeIndented,
[-1]: codeIndented,
[32]: codeIndented
};
var flow2 = {
[35]: headingAtx,
[42]: thematicBreak,
[45]: [setextUnderline, thematicBreak],
[60]: htmlFlow,
[61]: setextUnderline,
[95]: thematicBreak,
[96]: codeFenced,
[126]: codeFenced
};
var string2 = {
[38]: characterReference,
[92]: characterEscape
};
var text2 = {
[-5]: lineEnding,
[-4]: lineEnding,
[-3]: lineEnding,
[33]: labelStartImage,
[38]: characterReference,
[42]: attention,
[60]: [autolink, htmlText],
[91]: labelStartLink,
[92]: [hardBreakEscape, characterEscape],
[93]: labelEnd,
[95]: attention,
[96]: codeText
};
var insideSpan = {
null: [attention, resolver]
};
var attentionMarkers = {
null: [42, 95]
};
var disable = {
null: []
};
function parse2(options = {}) {
const constructs2 = combineExtensions([constructs_exports].concat(options.extensions || []));
const parser = {
defined: [],
lazy: {},
constructs: constructs2,
content: create2(content),
document: create2(document2),
flow: create2(flow),
string: create2(string),
text: create2(text)
};
return parser;
function create2(initial) {
return creator;
function creator(from) {
return createTokenizer(parser, initial, from);
}
}
}
var search = /[\0\t\n\r]/g;
function preprocess() {
let column = 1;
let buffer2 = "";
let start = true;
let atCarriageReturn;
return preprocessor;
function preprocessor(value2, encoding, end) {
const chunks = [];
let match;
let next;
let startPosition;
let endPosition;
let code;
value2 = buffer2 + value2.toString(encoding);
startPosition = 0;
buffer2 = "";
if (start) {
if (value2.charCodeAt(0) === 65279) {
startPosition++;
}
start = void 0;
}
while (startPosition < value2.length) {
search.lastIndex = startPosition;
match = search.exec(value2);
endPosition = match && match.index !== void 0 ? match.index : value2.length;
code = value2.charCodeAt(endPosition);
if (!match) {
buffer2 = value2.slice(startPosition);
break;
}
if (code === 10 && startPosition === endPosition && atCarriageReturn) {
chunks.push(-3);
atCarriageReturn = void 0;
} else {
if (atCarriageReturn) {
chunks.push(-5);
atCarriageReturn = void 0;
}
if (startPosition < endPosition) {
chunks.push(value2.slice(startPosition, endPosition));
column += endPosition - startPosition;
}
switch (code) {
case 0: {
chunks.push(65533);
column++;
break;
}
case 9: {
next = Math.ceil(column / 4) * 4;
chunks.push(-2);
while (column++ < next)
chunks.push(-1);
break;
}
case 10: {
chunks.push(-4);
column = 1;
break;
}
default: {
atCarriageReturn = true;
column = 1;
}
}
}
startPosition = endPosition + 1;
}
if (end) {
if (atCarriageReturn)
chunks.push(-5);
if (buffer2)
chunks.push(buffer2);
chunks.push(null);
}
return chunks;
}
}
function postprocess(events) {
while (!subtokenize(events)) {
}
return events;
}
function decodeNumericCharacterReference(value2, base2) {
const code = Number.parseInt(value2, base2);
if (code < 9 || code === 11 || code > 13 && code < 32 || code > 126 && code < 160 || code > 55295 && code < 57344 || code > 64975 && code < 65008 || (code & 65535) === 65535 || (code & 65535) === 65534 || code > 1114111) {
return "\uFFFD";
}
return String.fromCharCode(code);
}
var characterEscapeOrReference = /\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi;
function decodeString(value2) {
return value2.replace(characterEscapeOrReference, decode);
}
function decode($0, $1, $2) {
if ($1) {
return $1;
}
const head = $2.charCodeAt(0);
if (head === 35) {
const head2 = $2.charCodeAt(1);
const hex = head2 === 120 || head2 === 88;
return decodeNumericCharacterReference($2.slice(hex ? 2 : 1), hex ? 16 : 10);
}
return decodeEntity($2) || $0;
}
var own2 = {}.hasOwnProperty;
function stringifyPosition(value2) {
if (!value2 || typeof value2 !== "object") {
return "";
}
if (own2.call(value2, "position") || own2.call(value2, "type")) {
return position(value2.position);
}
if (own2.call(value2, "start") || own2.call(value2, "end")) {
return position(value2);
}
if (own2.call(value2, "line") || own2.call(value2, "column")) {
return point(value2);
}
return "";
}
function point(point2) {
return index(point2 && point2.line) + ":" + index(point2 && point2.column);
}
function position(pos) {
return point(pos && pos.start) + "-" + point(pos && pos.end);
}
function index(value2) {
return value2 && typeof value2 === "number" ? value2 : 1;
}
var own3 = {}.hasOwnProperty;
var fromMarkdown = function(value2, encoding, options) {
if (typeof encoding !== "string") {
options = encoding;
encoding = void 0;
}
return compiler(options)(postprocess(parse2(options).document().write(preprocess()(value2, encoding, true))));
};
function compiler(options = {}) {
const config = configure({
transforms: [],
canContainEols: [
"emphasis",
"fragment",
"heading",
"paragraph",
"strong"
],
enter: {
autolink: opener2(link),
autolinkProtocol: onenterdata,
autolinkEmail: onenterdata,
atxHeading: opener2(heading),
blockQuote: opener2(blockQuote2),
characterEscape: onenterdata,
characterReference: onenterdata,
codeFenced: opener2(codeFlow),
codeFencedFenceInfo: buffer2,
codeFencedFenceMeta: buffer2,
codeIndented: opener2(codeFlow, buffer2),
codeText: opener2(codeText2, buffer2),
codeTextData: onenterdata,
data: onenterdata,
codeFlowValue: onenterdata,
definition: opener2(definition2),
definitionDestinationString: buffer2,
definitionLabelString: buffer2,
definitionTitleString: buffer2,
emphasis: opener2(emphasis),
hardBreakEscape: opener2(hardBreak),
hardBreakTrailing: opener2(hardBreak),
htmlFlow: opener2(html, buffer2),
htmlFlowData: onenterdata,
htmlText: opener2(html, buffer2),
htmlTextData: onenterdata,
image: opener2(image),
label: buffer2,
link: opener2(link),
listItem: opener2(listItem),
listItemValue: onenterlistitemvalue,
listOrdered: opener2(list2, onenterlistordered),
listUnordered: opener2(list2),
paragraph: opener2(paragraph),
reference: onenterreference,
referenceString: buffer2,
resourceDestinationString: buffer2,
resourceTitleString: buffer2,
setextHeading: opener2(heading),
strong: opener2(strong),
thematicBreak: opener2(thematicBreak2)
},
exit: {
atxHeading: closer(),
atxHeadingSequence: onexitatxheadingsequence,
autolink: closer(),
autolinkEmail: onexitautolinkemail,
autolinkProtocol: onexitautolinkprotocol,
blockQuote: closer(),
characterEscapeValue: onexitdata,
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
characterReferenceValue: onexitcharacterreferencevalue,
codeFenced: closer(onexitcodefenced),
codeFencedFence: onexitcodefencedfence,
codeFencedFenceInfo: onexitcodefencedfenceinfo,
codeFencedFenceMeta: onexitcodefencedfencemeta,
codeFlowValue: onexitdata,
codeIndented: closer(onexitcodeindented),
codeText: closer(onexitcodetext),
codeTextData: onexitdata,
data: onexitdata,
definition: closer(),
definitionDestinationString: onexitdefinitiondestinationstring,
definitionLabelString: onexitdefinitionlabelstring,
definitionTitleString: onexitdefinitiontitlestring,
emphasis: closer(),
hardBreakEscape: closer(onexithardbreak),
hardBreakTrailing: closer(onexithardbreak),
htmlFlow: closer(onexithtmlflow),
htmlFlowData: onexitdata,
htmlText: closer(onexithtmltext),
htmlTextData: onexitdata,
image: closer(onexitimage),
label: onexitlabel,
labelText: onexitlabeltext,
lineEnding: onexitlineending,
link: closer(onexitlink),
listItem: closer(),
listOrdered: closer(),
listUnordered: closer(),
paragraph: closer(),
referenceString: onexitreferencestring,
resourceDestinationString: onexitresourcedestinationstring,
resourceTitleString: onexitresourcetitlestring,
resource: onexitresource,
setextHeading: closer(onexitsetextheading),
setextHeadingLineSequence: onexitsetextheadinglinesequence,
setextHeadingText: onexitsetextheadingtext,
strong: closer(),
thematicBreak: closer()
}
}, options.mdastExtensions || []);
const data = {};
return compile;
function compile(events) {
let tree = {
type: "root",
children: []
};
const stack = [tree];
const tokenStack = [];
const listStack = [];
const context = {
stack,
tokenStack,
config,
enter,
exit: exit2,
buffer: buffer2,
resume,
setData,
getData
};
let index2 = -1;
while (++index2 < events.length) {
if (events[index2][1].type === "listOrdered" || events[index2][1].type === "listUnordered") {
if (events[index2][0] === "enter") {
listStack.push(index2);
} else {
const tail = listStack.pop();
index2 = prepareList(events, tail, index2);
}
}
}
index2 = -1;
while (++index2 < events.length) {
const handler2 = config[events[index2][0]];
if (own3.call(handler2, events[index2][1].type)) {
handler2[events[index2][1].type].call(Object.assign({
sliceSerialize: events[index2][2].sliceSerialize
}, context), events[index2][1]);
}
}
if (tokenStack.length > 0) {
throw new Error("Cannot close document, a token (`" + tokenStack[tokenStack.length - 1].type + "`, " + stringifyPosition({
start: tokenStack[tokenStack.length - 1].start,
end: tokenStack[tokenStack.length - 1].end
}) + ") is still open");
}
tree.position = {
start: point2(events.length > 0 ? events[0][1].start : {
line: 1,
column: 1,
offset: 0
}),
end: point2(events.length > 0 ? events[events.length - 2][1].end : {
line: 1,
column: 1,
offset: 0
})
};
index2 = -1;
while (++index2 < config.transforms.length) {
tree = config.transforms[index2](tree) || tree;
}
return tree;
}
function prepareList(events, start, length) {
let index2 = start - 1;
let containerBalance = -1;
let listSpread = false;
let listItem2;
let lineIndex;
let firstBlankLineIndex;
let atMarker;
while (++index2 <= length) {
const event = events[index2];
if (event[1].type === "listUnordered" || event[1].type === "listOrdered" || event[1].type === "blockQuote") {
if (event[0] === "enter") {
containerBalance++;
} else {
containerBalance--;
}
atMarker = void 0;
} else if (event[1].type === "lineEndingBlank") {
if (event[0] === "enter") {
if (listItem2 && !atMarker && !containerBalance && !firstBlankLineIndex) {
firstBlankLineIndex = index2;
}
atMarker = void 0;
}
} else if (event[1].type === "linePrefix" || event[1].type === "listItemValue" || event[1].type === "listItemMarker" || event[1].type === "listItemPrefix" || event[1].type === "listItemPrefixWhitespace") {
} else {
atMarker = void 0;
}
if (!containerBalance && event[0] === "enter" && event[1].type === "listItemPrefix" || containerBalance === -1 && event[0] === "exit" && (event[1].type === "listUnordered" || event[1].type === "listOrdered")) {
if (listItem2) {
let tailIndex = index2;
lineIndex = void 0;
while (tailIndex--) {
const tailEvent = events[tailIndex];
if (tailEvent[1].type === "lineEnding" || tailEvent[1].type === "lineEndingBlank") {
if (tailEvent[0] === "exit")
continue;
if (lineIndex) {
events[lineIndex][1].type = "lineEndingBlank";
listSpread = true;
}
tailEvent[1].type = "lineEnding";
lineIndex = tailIndex;
} else if (tailEvent[1].type === "linePrefix" || tailEvent[1].type === "blockQuotePrefix" || tailEvent[1].type === "blockQuotePrefixWhitespace" || tailEvent[1].type === "blockQuoteMarker" || tailEvent[1].type === "listItemIndent") {
} else {
break;
}
}
if (firstBlankLineIndex && (!lineIndex || firstBlankLineIndex < lineIndex)) {
listItem2._spread = true;
}
listItem2.end = Object.assign({}, lineIndex ? events[lineIndex][1].start : event[1].end);
events.splice(lineIndex || index2, 0, ["exit", listItem2, event[2]]);
index2++;
length++;
}
if (event[1].type === "listItemPrefix") {
listItem2 = {
type: "listItem",
_spread: false,
start: Object.assign({}, event[1].start)
};
events.splice(index2, 0, ["enter", listItem2, event[2]]);
index2++;
length++;
firstBlankLineIndex = void 0;
atMarker = true;
}
}
}
events[start][1]._spread = listSpread;
return length;
}
function setData(key, value2) {
data[key] = value2;
}
function getData(key) {
return data[key];
}
function point2(d) {
return {
line: d.line,
column: d.column,
offset: d.offset
};
}
function opener2(create2, and) {
return open;
function open(token) {
enter.call(this, create2(token), token);
if (and)
and.call(this, token);
}
}
function buffer2() {
this.stack.push({
type: "fragment",
children: []
});
}
function enter(node, token) {
const parent = this.stack[this.stack.length - 1];
parent.children.push(node);
this.stack.push(node);
this.tokenStack.push(token);
node.position = {
start: point2(token.start)
};
return node;
}
function closer(and) {
return close2;
function close2(token) {
if (and)
and.call(this, token);
exit2.call(this, token);
}
}
function exit2(token) {
const node = this.stack.pop();
const open = this.tokenStack.pop();
if (!open) {
throw new Error("Cannot close `" + token.type + "` (" + stringifyPosition({
start: token.start,
end: token.end
}) + "): it\u2019s not open");
} else if (open.type !== token.type) {
throw new Error("Cannot close `" + token.type + "` (" + stringifyPosition({
start: token.start,
end: token.end
}) + "): a different token (`" + open.type + "`, " + stringifyPosition({
start: open.start,
end: open.end
}) + ") is open");
}
node.position.end = point2(token.end);
return node;
}
function resume() {
return toString(this.stack.pop());
}
function onenterlistordered() {
setData("expectingFirstListItemValue", true);
}
function onenterlistitemvalue(token) {
if (getData("expectingFirstListItemValue")) {
const ancestor = this.stack[this.stack.length - 2];
ancestor.start = Number.parseInt(this.sliceSerialize(token), 10);
setData("expectingFirstListItemValue");
}
}
function onexitcodefencedfenceinfo() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.lang = data2;
}
function onexitcodefencedfencemeta() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.meta = data2;
}
function onexitcodefencedfence() {
if (getData("flowCodeInside"))
return;
this.buffer();
setData("flowCodeInside", true);
}
function onexitcodefenced() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.value = data2.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, "");
setData("flowCodeInside");
}
function onexitcodeindented() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.value = data2.replace(/(\r?\n|\r)$/g, "");
}
function onexitdefinitionlabelstring(token) {
const label = this.resume();
const node = this.stack[this.stack.length - 1];
node.label = label;
node.identifier = normalizeIdentifier(this.sliceSerialize(token)).toLowerCase();
}
function onexitdefinitiontitlestring() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.title = data2;
}
function onexitdefinitiondestinationstring() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.url = data2;
}
function onexitatxheadingsequence(token) {
const node = this.stack[this.stack.length - 1];
if (!node.depth) {
const depth = this.sliceSerialize(token).length;
node.depth = depth;
}
}
function onexitsetextheadingtext() {
setData("setextHeadingSlurpLineEnding", true);
}
function onexitsetextheadinglinesequence(token) {
const node = this.stack[this.stack.length - 1];
node.depth = this.sliceSerialize(token).charCodeAt(0) === 61 ? 1 : 2;
}
function onexitsetextheading() {
setData("setextHeadingSlurpLineEnding");
}
function onenterdata(token) {
const parent = this.stack[this.stack.length - 1];
let tail = parent.children[parent.children.length - 1];
if (!tail || tail.type !== "text") {
tail = text3();
tail.position = {
start: point2(token.start)
};
parent.children.push(tail);
}
this.stack.push(tail);
}
function onexitdata(token) {
const tail = this.stack.pop();
tail.value += this.sliceSerialize(token);
tail.position.end = point2(token.end);
}
function onexitlineending(token) {
const context = this.stack[this.stack.length - 1];
if (getData("atHardBreak")) {
const tail = context.children[context.children.length - 1];
tail.position.end = point2(token.end);
setData("atHardBreak");
return;
}
if (!getData("setextHeadingSlurpLineEnding") && config.canContainEols.includes(context.type)) {
onenterdata.call(this, token);
onexitdata.call(this, token);
}
}
function onexithardbreak() {
setData("atHardBreak", true);
}
function onexithtmlflow() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.value = data2;
}
function onexithtmltext() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.value = data2;
}
function onexitcodetext() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.value = data2;
}
function onexitlink() {
const context = this.stack[this.stack.length - 1];
if (getData("inReference")) {
context.type += "Reference";
context.referenceType = getData("referenceType") || "shortcut";
delete context.url;
delete context.title;
} else {
delete context.identifier;
delete context.label;
}
setData("referenceType");
}
function onexitimage() {
const context = this.stack[this.stack.length - 1];
if (getData("inReference")) {
context.type += "Reference";
context.referenceType = getData("referenceType") || "shortcut";
delete context.url;
delete context.title;
} else {
delete context.identifier;
delete context.label;
}
setData("referenceType");
}
function onexitlabeltext(token) {
const ancestor = this.stack[this.stack.length - 2];
const string3 = this.sliceSerialize(token);
ancestor.label = decodeString(string3);
ancestor.identifier = normalizeIdentifier(string3).toLowerCase();
}
function onexitlabel() {
const fragment = this.stack[this.stack.length - 1];
const value2 = this.resume();
const node = this.stack[this.stack.length - 1];
setData("inReference", true);
if (node.type === "link") {
node.children = fragment.children;
} else {
node.alt = value2;
}
}
function onexitresourcedestinationstring() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.url = data2;
}
function onexitresourcetitlestring() {
const data2 = this.resume();
const node = this.stack[this.stack.length - 1];
node.title = data2;
}
function onexitresource() {
setData("inReference");
}
function onenterreference() {
setData("referenceType", "collapsed");
}
function onexitreferencestring(token) {
const label = this.resume();
const node = this.stack[this.stack.length - 1];
node.label = label;
node.identifier = normalizeIdentifier(this.sliceSerialize(token)).toLowerCase();
setData("referenceType", "full");
}
function onexitcharacterreferencemarker(token) {
setData("characterReferenceType", token.type);
}
function onexitcharacterreferencevalue(token) {
const data2 = this.sliceSerialize(token);
const type = getData("characterReferenceType");
let value2;
if (type) {
value2 = decodeNumericCharacterReference(data2, type === "characterReferenceMarkerNumeric" ? 10 : 16);
setData("characterReferenceType");
} else {
value2 = decodeEntity(data2);
}
const tail = this.stack.pop();
tail.value += value2;
tail.position.end = point2(token.end);
}
function onexitautolinkprotocol(token) {
onexitdata.call(this, token);
const node = this.stack[this.stack.length - 1];
node.url = this.sliceSerialize(token);
}
function onexitautolinkemail(token) {
onexitdata.call(this, token);
const node = this.stack[this.stack.length - 1];
node.url = "mailto:" + this.sliceSerialize(token);
}
function blockQuote2() {
return {
type: "blockquote",
children: []
};
}
function codeFlow() {
return {
type: "code",
lang: null,
meta: null,
value: ""
};
}
function codeText2() {
return {
type: "inlineCode",
value: ""
};
}
function definition2() {
return {
type: "definition",
identifier: "",
label: null,
title: null,
url: ""
};
}
function emphasis() {
return {
type: "emphasis",
children: []
};
}
function heading() {
return {
type: "heading",
depth: void 0,
children: []
};
}
function hardBreak() {
return {
type: "break"
};
}
function html() {
return {
type: "html",
value: ""
};
}
function image() {
return {
type: "image",
title: null,
url: "",
alt: null
};
}
function link() {
return {
type: "link",
title: null,
url: "",
children: []
};
}
function list2(token) {
return {
type: "list",
ordered: token.type === "listOrdered",
start: null,
spread: token._spread,
children: []
};
}
function listItem(token) {
return {
type: "listItem",
spread: token._spread,
checked: null,
children: []
};
}
function paragraph() {
return {
type: "paragraph",
children: []
};
}
function strong() {
return {
type: "strong",
children: []
};
}
function text3() {
return {
type: "text",
value: ""
};
}
function thematicBreak2() {
return {
type: "thematicBreak"
};
}
}
function configure(combined, extensions) {
let index2 = -1;
while (++index2 < extensions.length) {
const value2 = extensions[index2];
if (Array.isArray(value2)) {
configure(combined, value2);
} else {
extension(combined, value2);
}
}
return combined;
}
function extension(combined, extension2) {
let key;
for (key in extension2) {
if (own3.call(extension2, key)) {
const list2 = key === "canContainEols" || key === "transforms";
const maybe = own3.call(combined, key) ? combined[key] : void 0;
const left = maybe || (combined[key] = list2 ? [] : {});
const right = extension2[key];
if (right) {
if (list2) {
combined[key] = [...left, ...right];
} else {
Object.assign(left, right);
}
}
}
}
}
function remarkParse(options) {
const parser = (doc) => {
const settings = this.data("settings");
return fromMarkdown(doc, Object.assign({}, settings, options, {
extensions: this.data("micromarkExtensions") || [],
mdastExtensions: this.data("fromMarkdownExtensions") || []
}));
};
Object.assign(this, { Parser: parser });
}
var remark_parse_default = remarkParse;
function bail(error) {
if (error) {
throw error;
}
}
var import_is_buffer2 = __toModule(require_is_buffer());
var import_extend = __toModule(require_extend());
function isPlainObject(value2) {
if (Object.prototype.toString.call(value2) !== "[object Object]") {
return false;
}
const prototype = Object.getPrototypeOf(value2);
return prototype === null || prototype === Object.prototype;
}
function trough() {
const fns = [];
const pipeline = { run, use };
return pipeline;
function run(...values) {
let middlewareIndex = -1;
const callback = values.pop();
if (typeof callback !== "function") {
throw new TypeError("Expected function as last argument, not " + callback);
}
next(null, ...values);
function next(error, ...output) {
const fn = fns[++middlewareIndex];
let index2 = -1;
if (error) {
callback(error);
return;
}
while (++index2 < values.length) {
if (output[index2] === null || output[index2] === void 0) {
output[index2] = values[index2];
}
}
values = output;
if (fn) {
wrap(fn, next)(...output);
} else {
callback(null, ...output);
}
}
}
function use(middelware) {
if (typeof middelware !== "function") {
throw new TypeError("Expected `middelware` to be a function, not " + middelware);
}
fns.push(middelware);
return pipeline;
}
}
function wrap(middleware, callback) {
let called;
return wrapped;
function wrapped(...parameters) {
const fnExpectsCallback = middleware.length > parameters.length;
let result;
if (fnExpectsCallback) {
parameters.push(done);
}
try {
result = middleware(...parameters);
} catch (error) {
const exception = error;
if (fnExpectsCallback && called) {
throw exception;
}
return done(exception);
}
if (!fnExpectsCallback) {
if (result instanceof Promise) {
result.then(then, done);
} else if (result instanceof Error) {
done(result);
} else {
then(result);
}
}
}
function done(error, ...output) {
if (!called) {
called = true;
callback(error, ...output);
}
}
function then(value2) {
done(null, value2);
}
}
var import_is_buffer = __toModule(require_is_buffer());
var VFileMessage = class extends Error {
constructor(reason, place, origin) {
var parts = [null, null];
var position2 = {
start: { line: null, column: null },
end: { line: null, column: null }
};
var index2;
super();
if (typeof place === "string") {
origin = place;
place = null;
}
if (typeof origin === "string") {
index2 = origin.indexOf(":");
if (index2 === -1) {
parts[1] = origin;
} else {
parts[0] = origin.slice(0, index2);
parts[1] = origin.slice(index2 + 1);
}
}
if (place) {
if ("type" in place || "position" in place) {
if (place.position) {
position2 = place.position;
}
} else if ("start" in place || "end" in place) {
position2 = place;
} else if ("line" in place || "column" in place) {
position2.start = place;
}
}
this.name = stringifyPosition(place) || "1:1";
this.message = typeof reason === "object" ? reason.message : reason;
this.stack = typeof reason === "object" ? reason.stack : "";
this.reason = this.message;
this.line = position2.start.line;
this.column = position2.start.column;
this.source = parts[0];
this.ruleId = parts[1];
this.position = position2;
this.file;
this.fatal;
this.url;
this.note;
}
};
VFileMessage.prototype.file = "";
VFileMessage.prototype.name = "";
VFileMessage.prototype.reason = "";
VFileMessage.prototype.message = "";
VFileMessage.prototype.stack = "";
VFileMessage.prototype.fatal = null;
VFileMessage.prototype.column = null;
VFileMessage.prototype.line = null;
VFileMessage.prototype.source = null;
VFileMessage.prototype.ruleId = null;
VFileMessage.prototype.position = null;
var path = { basename, dirname, extname, join, sep: "/" };
function basename(path2, ext) {
if (ext !== void 0 && typeof ext !== "string") {
throw new TypeError('"ext" argument must be a string');
}
assertPath(path2);
let start = 0;
let end = -1;
let index2 = path2.length;
let seenNonSlash;
if (ext === void 0 || ext.length === 0 || ext.length > path2.length) {
while (index2--) {
if (path2.charCodeAt(index2) === 47) {
if (seenNonSlash) {
start = index2 + 1;
break;
}
} else if (end < 0) {
seenNonSlash = true;
end = index2 + 1;
}
}
return end < 0 ? "" : path2.slice(start, end);
}
if (ext === path2) {
return "";
}
let firstNonSlashEnd = -1;
let extIndex = ext.length - 1;
while (index2--) {
if (path2.charCodeAt(index2) === 47) {
if (seenNonSlash) {
start = index2 + 1;
break;
}
} else {
if (firstNonSlashEnd < 0) {
seenNonSlash = true;
firstNonSlashEnd = index2 + 1;
}
if (extIndex > -1) {
if (path2.charCodeAt(index2) === ext.charCodeAt(extIndex--)) {
if (extIndex < 0) {
end = index2;
}
} else {
extIndex = -1;
end = firstNonSlashEnd;
}
}
}
}
if (start === end) {
end = firstNonSlashEnd;
} else if (end < 0) {
end = path2.length;
}
return path2.slice(start, end);
}
function dirname(path2) {
assertPath(path2);
if (path2.length === 0) {
return ".";
}
let end = -1;
let index2 = path2.length;
let unmatchedSlash;
while (--index2) {
if (path2.charCodeAt(index2) === 47) {
if (unmatchedSlash) {
end = index2;
break;
}
} else if (!unmatchedSlash) {
unmatchedSlash = true;
}
}
return end < 0 ? path2.charCodeAt(0) === 47 ? "/" : "." : end === 1 && path2.charCodeAt(0) === 47 ? "//" : path2.slice(0, end);
}
function extname(path2) {
assertPath(path2);
let index2 = path2.length;
let end = -1;
let startPart = 0;
let startDot = -1;
let preDotState = 0;
let unmatchedSlash;
while (index2--) {
const code = path2.charCodeAt(index2);
if (code === 47) {
if (unmatchedSlash) {
startPart = index2 + 1;
break;
}
continue;
}
if (end < 0) {
unmatchedSlash = true;
end = index2 + 1;
}
if (code === 46) {
if (startDot < 0) {
startDot = index2;
} else if (preDotState !== 1) {
preDotState = 1;
}
} else if (startDot > -1) {
preDotState = -1;
}
}
if (startDot < 0 || end < 0 || preDotState === 0 || preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) {
return "";
}
return path2.slice(startDot, end);
}
function join(...segments) {
let index2 = -1;
let joined;
while (++index2 < segments.length) {
assertPath(segments[index2]);
if (segments[index2]) {
joined = joined === void 0 ? segments[index2] : joined + "/" + segments[index2];
}
}
return joined === void 0 ? "." : normalize(joined);
}
function normalize(path2) {
assertPath(path2);
const absolute = path2.charCodeAt(0) === 47;
let value2 = normalizeString(path2, !absolute);
if (value2.length === 0 && !absolute) {
value2 = ".";
}
if (value2.length > 0 && path2.charCodeAt(path2.length - 1) === 47) {
value2 += "/";
}
return absolute ? "/" + value2 : value2;
}
function normalizeString(path2, allowAboveRoot) {
let result = "";
let lastSegmentLength = 0;
let lastSlash = -1;
let dots = 0;
let index2 = -1;
let code;
let lastSlashIndex;
while (++index2 <= path2.length) {
if (index2 < path2.length) {
code = path2.charCodeAt(index2);
} else if (code === 47) {
break;
} else {
code = 47;
}
if (code === 47) {
if (lastSlash === index2 - 1 || dots === 1) {
} else if (lastSlash !== index2 - 1 && dots === 2) {
if (result.length < 2 || lastSegmentLength !== 2 || result.charCodeAt(result.length - 1) !== 46 || result.charCodeAt(result.length - 2) !== 46) {
if (result.length > 2) {
lastSlashIndex = result.lastIndexOf("/");
if (lastSlashIndex !== result.length - 1) {
if (lastSlashIndex < 0) {
result = "";
lastSegmentLength = 0;
} else {
result = result.slice(0, lastSlashIndex);
lastSegmentLength = result.length - 1 - result.lastIndexOf("/");
}
lastSlash = index2;
dots = 0;
continue;
}
} else if (result.length > 0) {
result = "";
lastSegmentLength = 0;
lastSlash = index2;
dots = 0;
continue;
}
}
if (allowAboveRoot) {
result = result.length > 0 ? result + "/.." : "..";
lastSegmentLength = 2;
}
} else {
if (result.length > 0) {
result += "/" + path2.slice(lastSlash + 1, index2);
} else {
result = path2.slice(lastSlash + 1, index2);
}
lastSegmentLength = index2 - lastSlash - 1;
}
lastSlash = index2;
dots = 0;
} else if (code === 46 && dots > -1) {
dots++;
} else {
dots = -1;
}
}
return result;
}
function assertPath(path2) {
if (typeof path2 !== "string") {
throw new TypeError("Path must be a string. Received " + JSON.stringify(path2));
}
}
var proc = { cwd };
function cwd() {
return "/";
}
function isUrl(fileURLOrPath) {
return fileURLOrPath !== null && typeof fileURLOrPath === "object" && fileURLOrPath.href && fileURLOrPath.origin;
}
function urlToPath(path2) {
if (typeof path2 === "string") {
path2 = new URL(path2);
} else if (!isUrl(path2)) {
const error = new TypeError('The "path" argument must be of type string or an instance of URL. Received `' + path2 + "`");
error.code = "ERR_INVALID_ARG_TYPE";
throw error;
}
if (path2.protocol !== "file:") {
const error = new TypeError("The URL must be of scheme file");
error.code = "ERR_INVALID_URL_SCHEME";
throw error;
}
return getPathFromURLPosix(path2);
}
function getPathFromURLPosix(url) {
if (url.hostname !== "") {
const error = new TypeError('File URL host must be "localhost" or empty on darwin');
error.code = "ERR_INVALID_FILE_URL_HOST";
throw error;
}
const pathname = url.pathname;
let index2 = -1;
while (++index2 < pathname.length) {
if (pathname.charCodeAt(index2) === 37 && pathname.charCodeAt(index2 + 1) === 50) {
const third = pathname.charCodeAt(index2 + 2);
if (third === 70 || third === 102) {
const error = new TypeError("File URL path must not include encoded / characters");
error.code = "ERR_INVALID_FILE_URL_PATH";
throw error;
}
}
}
return decodeURIComponent(pathname);
}
var order = ["history", "path", "basename", "stem", "extname", "dirname"];
var VFile = class {
constructor(value2) {
let options;
if (!value2) {
options = {};
} else if (typeof value2 === "string" || (0, import_is_buffer.default)(value2)) {
options = { value: value2 };
} else if (isUrl(value2)) {
options = { path: value2 };
} else {
options = value2;
}
this.data = {};
this.messages = [];
this.history = [];
this.cwd = proc.cwd();
this.value;
this.stored;
this.result;
this.map;
let index2 = -1;
while (++index2 < order.length) {
const prop2 = order[index2];
if (prop2 in options && options[prop2] !== void 0) {
this[prop2] = prop2 === "history" ? [...options[prop2]] : options[prop2];
}
}
let prop;
for (prop in options) {
if (!order.includes(prop))
this[prop] = options[prop];
}
}
get path() {
return this.history[this.history.length - 1];
}
set path(path2) {
if (isUrl(path2)) {
path2 = urlToPath(path2);
}
assertNonEmpty(path2, "path");
if (this.path !== path2) {
this.history.push(path2);
}
}
get dirname() {
return typeof this.path === "string" ? path.dirname(this.path) : void 0;
}
set dirname(dirname2) {
assertPath2(this.basename, "dirname");
this.path = path.join(dirname2 || "", this.basename);
}
get basename() {
return typeof this.path === "string" ? path.basename(this.path) : void 0;
}
set basename(basename2) {
assertNonEmpty(basename2, "basename");
assertPart(basename2, "basename");
this.path = path.join(this.dirname || "", basename2);
}
get extname() {
return typeof this.path === "string" ? path.extname(this.path) : void 0;
}
set extname(extname2) {
assertPart(extname2, "extname");
assertPath2(this.dirname, "extname");
if (extname2) {
if (extname2.charCodeAt(0) !== 46) {
throw new Error("`extname` must start with `.`");
}
if (extname2.includes(".", 1)) {
throw new Error("`extname` cannot contain multiple dots");
}
}
this.path = path.join(this.dirname, this.stem + (extname2 || ""));
}
get stem() {
return typeof this.path === "string" ? path.basename(this.path, this.extname) : void 0;
}
set stem(stem) {
assertNonEmpty(stem, "stem");
assertPart(stem, "stem");
this.path = path.join(this.dirname || "", stem + (this.extname || ""));
}
toString(encoding) {
return (this.value || "").toString(encoding);
}
message(reason, place, origin) {
const message = new VFileMessage(reason, place, origin);
if (this.path) {
message.name = this.path + ":" + message.name;
message.file = this.path;
}
message.fatal = false;
this.messages.push(message);
return message;
}
info(reason, place, origin) {
const message = this.message(reason, place, origin);
message.fatal = null;
return message;
}
fail(reason, place, origin) {
const message = this.message(reason, place, origin);
message.fatal = true;
throw message;
}
};
function assertPart(part, name) {
if (part && part.includes(path.sep)) {
throw new Error("`" + name + "` cannot be a path: did not expect `" + path.sep + "`");
}
}
function assertNonEmpty(part, name) {
if (!part) {
throw new Error("`" + name + "` cannot be empty");
}
}
function assertPath2(path2, name) {
if (!path2) {
throw new Error("Setting `" + name + "` requires `path` to be set too");
}
}
var unified = base().freeze();
var own4 = {}.hasOwnProperty;
function base() {
const transformers = trough();
const attachers = [];
let namespace = {};
let frozen;
let freezeIndex = -1;
processor.data = data;
processor.Parser = void 0;
processor.Compiler = void 0;
processor.freeze = freeze;
processor.attachers = attachers;
processor.use = use;
processor.parse = parse3;
processor.stringify = stringify;
processor.run = run;
processor.runSync = runSync;
processor.process = process;
processor.processSync = processSync;
return processor;
function processor() {
const destination = base();
let index2 = -1;
while (++index2 < attachers.length) {
destination.use(...attachers[index2]);
}
destination.data((0, import_extend.default)(true, {}, namespace));
return destination;
}
function data(key, value2) {
if (typeof key === "string") {
if (arguments.length === 2) {
assertUnfrozen("data", frozen);
namespace[key] = value2;
return processor;
}
return own4.call(namespace, key) && namespace[key] || null;
}
if (key) {
assertUnfrozen("data", frozen);
namespace = key;
return processor;
}
return namespace;
}
function freeze() {
if (frozen) {
return processor;
}
while (++freezeIndex < attachers.length) {
const [attacher, ...options] = attachers[freezeIndex];
if (options[0] === false) {
continue;
}
if (options[0] === true) {
options[1] = void 0;
}
const transformer = attacher.call(processor, ...options);
if (typeof transformer === "function") {
transformers.use(transformer);
}
}
frozen = true;
freezeIndex = Number.POSITIVE_INFINITY;
return processor;
}
function use(value2, ...options) {
let settings;
assertUnfrozen("use", frozen);
if (value2 === null || value2 === void 0) {
} else if (typeof value2 === "function") {
addPlugin(value2, ...options);
} else if (typeof value2 === "object") {
if (Array.isArray(value2)) {
addList(value2);
} else {
addPreset(value2);
}
} else {
throw new TypeError("Expected usable value, not `" + value2 + "`");
}
if (settings) {
namespace.settings = Object.assign(namespace.settings || {}, settings);
}
return processor;
function add(value3) {
if (typeof value3 === "function") {
addPlugin(value3);
} else if (typeof value3 === "object") {
if (Array.isArray(value3)) {
const [plugin, ...options2] = value3;
addPlugin(plugin, ...options2);
} else {
addPreset(value3);
}
} else {
throw new TypeError("Expected usable value, not `" + value3 + "`");
}
}
function addPreset(result) {
addList(result.plugins);
if (result.settings) {
settings = Object.assign(settings || {}, result.settings);
}
}
function addList(plugins) {
let index2 = -1;
if (plugins === null || plugins === void 0) {
} else if (Array.isArray(plugins)) {
while (++index2 < plugins.length) {
const thing = plugins[index2];
add(thing);
}
} else {
throw new TypeError("Expected a list of plugins, not `" + plugins + "`");
}
}
function addPlugin(plugin, value3) {
let index2 = -1;
let entry;
while (++index2 < attachers.length) {
if (attachers[index2][0] === plugin) {
entry = attachers[index2];
break;
}
}
if (entry) {
if (isPlainObject(entry[1]) && isPlainObject(value3)) {
value3 = (0, import_extend.default)(true, entry[1], value3);
}
entry[1] = value3;
} else {
attachers.push([...arguments]);
}
}
}
function parse3(doc) {
processor.freeze();
const file = vfile(doc);
const Parser = processor.Parser;
assertParser("parse", Parser);
if (newable(Parser, "parse")) {
return new Parser(String(file), file).parse();
}
return Parser(String(file), file);
}
function stringify(node, doc) {
processor.freeze();
const file = vfile(doc);
const Compiler = processor.Compiler;
assertCompiler("stringify", Compiler);
assertNode(node);
if (newable(Compiler, "compile")) {
return new Compiler(node, file).compile();
}
return Compiler(node, file);
}
function run(node, doc, callback) {
assertNode(node);
processor.freeze();
if (!callback && typeof doc === "function") {
callback = doc;
doc = void 0;
}
if (!callback) {
return new Promise(executor);
}
executor(null, callback);
function executor(resolve, reject) {
transformers.run(node, vfile(doc), done);
function done(error, tree, file) {
tree = tree || node;
if (error) {
reject(error);
} else if (resolve) {
resolve(tree);
} else {
callback(null, tree, file);
}
}
}
}
function runSync(node, file) {
let result;
let complete;
processor.run(node, file, done);
assertDone("runSync", "run", complete);
return result;
function done(error, tree) {
bail(error);
result = tree;
complete = true;
}
}
function process(doc, callback) {
processor.freeze();
assertParser("process", processor.Parser);
assertCompiler("process", processor.Compiler);
if (!callback) {
return new Promise(executor);
}
executor(null, callback);
function executor(resolve, reject) {
const file = vfile(doc);
processor.run(processor.parse(file), file, (error, tree, file2) => {
if (error || !tree || !file2) {
done(error);
} else {
const result = processor.stringify(tree, file2);
if (result === void 0 || result === null) {
} else if (looksLikeAVFileValue(result)) {
file2.value = result;
} else {
file2.result = result;
}
done(error, file2);
}
});
function done(error, file2) {
if (error || !file2) {
reject(error);
} else if (resolve) {
resolve(file2);
} else {
callback(null, file2);
}
}
}
}
function processSync(doc) {
let complete;
processor.freeze();
assertParser("processSync", processor.Parser);
assertCompiler("processSync", processor.Compiler);
const file = vfile(doc);
processor.process(file, done);
assertDone("processSync", "process", complete);
return file;
function done(error) {
complete = true;
bail(error);
}
}
}
function newable(value2, name) {
return typeof value2 === "function" && value2.prototype && (keys(value2.prototype) || name in value2.prototype);
}
function keys(value2) {
let key;
for (key in value2) {
if (own4.call(value2, key)) {
return true;
}
}
return false;
}
function assertParser(name, value2) {
if (typeof value2 !== "function") {
throw new TypeError("Cannot `" + name + "` without `Parser`");
}
}
function assertCompiler(name, value2) {
if (typeof value2 !== "function") {
throw new TypeError("Cannot `" + name + "` without `Compiler`");
}
}
function assertUnfrozen(name, frozen) {
if (frozen) {
throw new Error("Cannot call `" + name + "` on a frozen processor.\nCreate a new processor first, by calling it: use `processor()` instead of `processor`.");
}
}
function assertNode(node) {
if (!isPlainObject(node) || typeof node.type !== "string") {
throw new TypeError("Expected node, got `" + node + "`");
}
}
function assertDone(name, asyncName, complete) {
if (!complete) {
throw new Error("`" + name + "` finished async. Use `" + asyncName + "` instead");
}
}
function vfile(value2) {
return looksLikeAVFile(value2) ? value2 : new VFile(value2);
}
function looksLikeAVFile(value2) {
return Boolean(value2 && typeof value2 === "object" && "message" in value2 && "messages" in value2);
}
function looksLikeAVFileValue(value2) {
return typeof value2 === "string" || (0, import_is_buffer2.default)(value2);
}
var defaults2 = {
children(node) {
return defaults.children(node);
},
annotatetextnode(node, text3) {
return defaults.annotatetextnode(node, text3);
},
interpretmarkup(text3 = "") {
return "\n".repeat((text3.match(/\n/g) || []).length);
},
remarkoptions: {}
};
function build2(text3, options = defaults2) {
const processor = unified().use(remark_parse_default, options.remarkoptions).use(remarkFrontmatter, ["yaml", "toml"]);
return build(text3, processor.parse, options);
}
var prepareMarkdown = (text3) => JSON.stringify(build2(text3));
var prepareMarkdown_default = prepareMarkdown;
}
});
// src/requests/checkViaAPI.js
var require_checkViaAPI = __commonJS({
"src/requests/checkViaAPI.js"(exports, module) {
var queryString = require_query_string();
var initialConfig = require_initialConfig();
var prepareMarkdown = require_prepareMarkdown().default;
var addWordFields = (matches) => {
return matches.map((match) => {
const word = match.context.text.substr(match.context.offset, match.context.length);
return { ...match, word };
});
};
var removeFalsePositives = (matches, dictionary, disabledRules) => {
return matches.filter((match) => !disabledRules.includes(match.rule.category.id) && !(match.rule.issueType === "misspelling" && dictionary.includes(match.word)));
};
var MAX_REPLACEMENTS = 30;
var checkViaAPI = async (text, options = {}) => {
const cfg = { ...initialConfig, ...options };
const disabledRules = Object.entries(cfg.rules).filter(([rule, value]) => value === false).map(([rule]) => rule.toUpperCase());
const disabledRulesEntry = disabledRules.length === 0 || cfg.api_url.includes("grammarbot") ? {} : { disabledCategories: disabledRules.join(",") };
const input = options.markdown ? { data: prepareMarkdown(text) } : { text };
const postData = queryString.stringify({
api_key: cfg.api_key,
language: cfg.language,
...input,
...disabledRulesEntry
});
const response = await fetch(cfg.api_url, {
headers: {
"Content-Type": "application/x-www-form-urlencoded"
},
body: postData,
method: "POST"
});
const body = await response.text();
let result;
try {
result = JSON.parse(body);
} catch (e) {
if (cfg.api_url.includes("grammarbot")) {
throw new Error("Language not available at grammarbot.io.\nPlease consider installing a local LanguageTool server:\nhttps://github.com/caderek/gramma#installing-local-server");
} else {
throw new Error(body);
}
}
const resultWithWords = {
...result,
matches: removeFalsePositives(addWordFields(result.matches), cfg.dictionary, cfg.api_url === initialConfig.api_url ? disabledRules : [])
};
resultWithWords.matches.forEach((match) => {
if (match.replacements.length > MAX_REPLACEMENTS) {
match.replacements.length = MAX_REPLACEMENTS;
}
});
return resultWithWords;
};
module.exports = checkViaAPI;
}
});
// src/text-manipulation/replace.js
var require_replace = __commonJS({
"src/text-manipulation/replace.js"(exports, module) {
var replace = (text, change, offset, length) => {
const before = text.slice(0, offset);
const mistake = text.slice(offset, offset + length);
const after = text.slice(offset + length);
const newPhrase = typeof change === "function" ? change(mistake) : change;
return `${before}${newPhrase}${after}`;
};
module.exports = replace;
}
});
// src/text-manipulation/replaceAll.js
var require_replaceAll = __commonJS({
"src/text-manipulation/replaceAll.js"(exports, module) {
var replace = require_replace();
var replaceAll = (text, transformations) => {
return transformations.sort((a, b) => b.offset - a.offset).reduce((previousText, { change, offset, length }) => {
return replace(previousText, change, offset, length);
}, text);
};
module.exports = replaceAll;
}
});
// src/index.js
var require_src = __commonJS({
"src/index.js"(exports, module) {
require_fetch_npm_browserify();
var check = require_checkViaAPI();
var replaceAll = require_replaceAll();
module.exports = {
check,
replaceAll
};
}
});
export default require_src();
/*!
* Determine if an object is a Buffer
*
* @author Feross Aboukhadijeh
* @license MIT
*/
================================================
FILE: data/languages.json
================================================
[
{
"name": "Arabic",
"code": "ar",
"longCode": "ar",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Asturian",
"code": "ast",
"longCode": "ast-ES",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Belarusian",
"code": "be",
"longCode": "be-BY",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Breton",
"code": "br",
"longCode": "br-FR",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Catalan",
"code": "ca",
"longCode": "ca-ES",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Catalan (Valencian)",
"code": "ca",
"longCode": "ca-ES-valencia",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Chinese",
"code": "zh",
"longCode": "zh-CN",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Danish",
"code": "da",
"longCode": "da-DK",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Dutch",
"code": "nl",
"longCode": "nl",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Dutch (Belgium)",
"code": "nl",
"longCode": "nl-BE",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "English",
"code": "en",
"longCode": "en",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "English (Australian)",
"code": "en",
"longCode": "en-AU",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "English (Canadian)",
"code": "en",
"longCode": "en-CA",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "English (GB)",
"code": "en",
"longCode": "en-GB",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "English (New Zealand)",
"code": "en",
"longCode": "en-NZ",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "English (South African)",
"code": "en",
"longCode": "en-ZA",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "English (US)",
"code": "en",
"longCode": "en-US",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Esperanto",
"code": "eo",
"longCode": "eo",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "French",
"code": "fr",
"longCode": "fr",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Galician",
"code": "gl",
"longCode": "gl-ES",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "German",
"code": "de",
"longCode": "de",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "German (Austria)",
"code": "de",
"longCode": "de-AT",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "German (Germany)",
"code": "de",
"longCode": "de-DE",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "German (Swiss)",
"code": "de",
"longCode": "de-CH",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Greek",
"code": "el",
"longCode": "el-GR",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Irish",
"code": "ga",
"longCode": "ga-IE",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Italian",
"code": "it",
"longCode": "it",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Japanese",
"code": "ja",
"longCode": "ja-JP",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Khmer",
"code": "km",
"longCode": "km-KH",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Persian",
"code": "fa",
"longCode": "fa",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Polish",
"code": "pl",
"longCode": "pl-PL",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Portuguese",
"code": "pt",
"longCode": "pt",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Portuguese (Angola preAO)",
"code": "pt",
"longCode": "pt-AO",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Portuguese (Brazil)",
"code": "pt",
"longCode": "pt-BR",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Portuguese (Moçambique preAO)",
"code": "pt",
"longCode": "pt-MZ",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Portuguese (Portugal)",
"code": "pt",
"longCode": "pt-PT",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Romanian",
"code": "ro",
"longCode": "ro-RO",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Russian",
"code": "ru",
"longCode": "ru-RU",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Simple German",
"code": "de-DE-x-simple-language",
"longCode": "de-DE-x-simple-language",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Slovak",
"code": "sk",
"longCode": "sk-SK",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Slovenian",
"code": "sl",
"longCode": "sl-SI",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Spanish",
"code": "es",
"longCode": "es",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Spanish (voseo)",
"code": "es",
"longCode": "es-AR",
"grammarbotIo": false,
"languagetoolOrg": true
},
{
"name": "Swedish",
"code": "sv",
"longCode": "sv",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Tagalog",
"code": "tl",
"longCode": "tl-PH",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Tamil",
"code": "ta",
"longCode": "ta-IN",
"grammarbotIo": true,
"languagetoolOrg": true
},
{
"name": "Ukrainian",
"code": "uk",
"longCode": "uk-UA",
"grammarbotIo": true,
"languagetoolOrg": true
}
]
================================================
FILE: data/rules.json
================================================
[
{
"id": "CASING",
"description": "Detecting uppercase words where lowercase is required and vice versa."
},
{
"id": "COLLOQUIALISMS",
"description": "Colloquial style."
},
{
"id": "COMPOUNDING",
"description": "Rules about spelling terms as one word or as as separate words."
},
{
"id": "CONFUSED_WORDS",
"description": "Words that are easily confused, like 'there' and 'their' in English."
},
{
"id": "FALSE_FRIENDS",
"description": "Words easily confused by language learners because a similar word exists in their native language."
},
{
"id": "GENDER_NEUTRALITY",
"description": ""
},
{
"id": "GRAMMAR",
"description": ""
},
{
"id": "MISC",
"description": "Miscellaneous rules that don't fit elsewhere."
},
{
"id": "PUNCTUATION",
"description": ""
},
{
"id": "REDUNDANCY",
"description": ""
},
{
"id": "REGIONALISMS",
"description": "Words used only in another language variant or used with different meanings."
},
{
"id": "REPETITIONS",
"description": ""
},
{
"id": "SEMANTICS",
"description": "Logic, content, and consistency problems."
},
{
"id": "STYLE",
"description": "General style issues not covered by other categories, like overly verbose wording."
},
{
"id": "TYPOGRAPHY",
"description": "Problems like incorrectly used dash or quote characters."
},
{
"id": "TYPOS",
"description": "Spelling issues."
}
]
================================================
FILE: examples/api-markdown.js
================================================
const { check } = require("../src")
const main = async () => {
const { language, matches } = await check(`Helo worlt!`, {
markdown: true,
})
console.log({ lang: language.name, mistakes: matches.length })
}
main()
================================================
FILE: examples/api-plain.js
================================================
const { check } = require("../src")
const main = async () => {
const response = await check(`Helo worlt!`, {
markdown: true,
})
console.dir(response, { depth: null })
}
main()
================================================
FILE: examples/api-simple.js
================================================
const { check } = require("../src")
const main = async () => {
const { language, matches } = await check("Some wrongg text to check.")
console.log({ lang: language.name, mistakes: matches.length })
}
main()
================================================
FILE: hello.md
================================================
Hello world!
================================================
FILE: lib/findUpSync.mjs
================================================
// esbuild findUpSync.mjs --bundle --outfile=src/utils/findUpSync.js --format=cjs --platform=node
import { findUpSync } from "find-up"
export default findUpSync
================================================
FILE: lib/package.json
================================================
{
"name": "lib",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"annotatedtext-remark": "^1.0.1",
"find-up": "^6.2.0"
}
}
================================================
FILE: lib/prepareMarkdown.mjs
================================================
import * as builder from "annotatedtext-remark"
const prepareMarkdown = (text) => JSON.stringify(builder.build(text))
export default prepareMarkdown
================================================
FILE: package.json
================================================
{
"name": "gramma",
"version": "1.6.0",
"license": "ISC",
"repository": "https://github.com/caderek/gramma",
"homepage": "https://caderek.github.io/gramma/",
"description": "Command line grammar checker",
"main": "src/index.js",
"bin": "src/cli.js",
"exports": {
".": "./src/index.js",
"./esm": "./bundle/gramma.esm.js",
"./esm-min": "./bundle/gramma.esm.min.js",
"./iife": "./bundle/gramma.min.js"
},
"types": "src/index.d.ts",
"scripts": {
"build": "rm -rf bin; yarn run build:win64; yarn run build:macos; yarn run build:linux64; yarn run build:zip; yarn run build:bundles",
"build:win64": "pkg -c package.json -t node16-win-x64 --out-path bin/windows64 src/cli.js",
"build:macos": "pkg -c package.json -t node16-macos-x64 --out-path bin/macos src/cli.js",
"build:linux64": "pkg -c package.json -t node16-linux-x64 --out-path bin/linux64 src/cli.js",
"build:bundles": "yarn run build:esm; yarn run build:esm-min; yarn run build:iife",
"build:esm": "esbuild src/index.js --bundle --outfile=bundle/gramma.esm.js --format=esm",
"build:esm-min": "esbuild src/index.js --bundle --outfile=bundle/gramma.esm.min.js --format=esm --minify",
"build:iife": "esbuild src/index.js --bundle --outfile=bundle/gramma.min.js --format=iife --minify --global-name=gramma",
"build:zip": "node scripts/zipBinaries.js",
"format": "prettier --write \"src/**/*.js\"",
"lint": "eslint src/**",
"test": "jest",
"test:ci": "jest --coverage && cat ./coverage/lcov.info | codacy-coverage",
"check:langs": "node scripts/checkLanguagesSupport.js",
"prepare": "husky install",
"definitions": "tsc"
},
"keywords": [
"grammar",
"command-line",
"checker"
],
"author": "Maciej Cąderek | maciej.caderek@gmail.com",
"dependencies": {
"cli-progress": "^3.9.1",
"decompress": "^4.2.1",
"decompress-unzip": "^4.0.1",
"dotenv": "^10.0.0",
"intercept-stdout": "^0.1.2",
"isomorphic-fetch": "^3.0.0",
"kleur": "^4.1.4",
"portfinder": "^1.0.28",
"progress-stream": "^2.0.0",
"prompts": "^2.4.1",
"query-string": "^7.0.1",
"rimraf": "^3.0.2",
"tcp-port-used": "^1.0.2",
"yargs": "^17.2.1"
},
"devDependencies": {
"@types/jest": "^27.0.2",
"codacy-coverage": "^3.4.0",
"esbuild": "^0.13.4",
"eslint": "^7.32.0",
"eslint-config-airbnb": "^18.2.1",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-import": "^2.24.2",
"eslint-plugin-jsx-a11y": "^6.4.1",
"eslint-plugin-react": "^7.26.1",
"gramma": "^1.6.0",
"husky": "^7.0.0",
"jest": "^27.2.4",
"pkg": "^5.3.3",
"prettier": "^2.4.1",
"shelljs": "^0.8.4",
"typescript": "^4.4.3"
},
"jest": {
"verbose": true,
"testMatch": [
"**/?(*.)(spec|test).?(m)js"
]
},
"engines": {
"node": ">=12.0.0"
}
}
================================================
FILE: scripts/checkLanguagesSupport.js
================================================
const fs = require("fs")
const querystring = require("querystring")
const fetch = require("node-fetch")
const LOCAL_API_URL = "http://localhost:8082/v2/languages"
const checkSupport = async (language, api) => {
const postData = querystring.stringify({
api_key: "",
language,
text: "abc",
// ...disabledRulesEntry,
})
const response = await fetch(api, {
credentials: "include",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
body: postData,
method: "POST",
})
console.log(`Checking ${language} at ${api}`)
try {
await response.json()
} catch (e) {
return false
}
return true
}
const LANGUAGETOOL_ORG_LIMIT = 20 // req/min
const interval = (60 * 1000) / LANGUAGETOOL_ORG_LIMIT
const delay = () => new Promise((resolve) => setTimeout(resolve, interval))
const main = async () => {
const res = await fetch(LOCAL_API_URL)
const languages = await res.json()
for (const language of languages) {
language.grammarbotIo = await checkSupport(
language.longCode,
"http://api.grammarbot.io/v2/check",
)
language.languagetoolOrg = await checkSupport(
language.longCode,
"https://api.languagetool.org/v2/check",
)
await delay()
}
fs.writeFileSync("data/languages.json", JSON.stringify(languages, null, 2))
const entries = languages
.map(
({ name, longCode, grammarbotIo, languagetoolOrg }) =>
`
${longCode}
${name}
${languagetoolOrg ? "✔" : "-"}
${grammarbotIo ? "✔" : "-"}
✔
`,
)
.join("")
const docs = `\n${entries}\n `.replace(
/^\s*[\r\n]/gm,
"",
)
const readme = fs
.readFileSync("README.md")
.toString()
.replace(/(.|\n)+/, docs)
fs.writeFileSync("README.md", readme)
console.log("README entries updated!")
}
main()
================================================
FILE: scripts/zipBinaries.js
================================================
const fs = require("fs")
const { execSync } = require("child_process")
const { version } = require("../package.json")
const cmd = (name) =>
`zip -9 -j bin/gramma-${name}-v${version}.zip bin/${name}/gramma${
name.includes("windows") ? ".exe" : ""
}`
const main = () => {
const folders = fs.readdirSync("bin").filter((name) => !name.includes(".zip"))
folders.forEach((folder) => {
console.log(`Creating zip file for ${folder}...`)
execSync(cmd(folder))
console.log(`Zip file for ${folder} created!`)
})
const versionRegex = /v\d\.\d\.\d/g
const readme = fs
.readFileSync("README.md")
.toString()
.replace(versionRegex, `v${version}`)
fs.writeFileSync("README.md", readme)
console.log("README links updated!")
const website = fs
.readFileSync("_layouts/default.html")
.toString()
.replace(versionRegex, `v${version}`)
fs.writeFileSync("_layouts/default.html", website)
console.log("Website links updated!")
}
main()
================================================
FILE: src/actions/checkInteractively.js
================================================
const kleur = require("kleur")
const checkWithFallback = require("../requests/checkWithFallback")
const Mistake = require("../components/Mistake")
const handleMistake = require("../prompts/handleMistake")
const replaceAll = require("../text-manipulation/replaceAll")
const equal = require("../utils/equal")
const configure = require("./configure")
const { displayUpdates } = require("../requests/updates")
const checkInteractively = async (text, cfg) => {
if (!text || text.trim().length === 0) {
console.log(kleur.yellow("Nothing to check!"))
return { changed: false }
}
const result = await checkWithFallback(text, cfg)
if (result.matches.length === 0) {
console.log(kleur.green("No mistakes found!"))
await displayUpdates(cfg.paths.globalConfigDir)
return { changed: false, text }
}
console.log(
`Found ${result.matches.length} potential mistake${
result.matches.length === 1 ? "" : "s"
}`,
)
let { matches } = result
const total = matches.length
const transformations = []
while (matches.length > 0) {
console.clear()
console.log(`Language: ${result.language.name}`)
console.log(
`Resolved: ${total - matches.length} | Pending: ${matches.length}`,
)
const currentMatch = matches.shift()
console.log(Mistake(currentMatch))
// eslint-disable-next-line no-await-in-loop
const { option, replacement } = await handleMistake(
currentMatch.replacements,
currentMatch.rule.issueType,
)
if (option === "l") {
configure("dictionary", currentMatch.word, cfg, false)
} else if (option === "g") {
configure("dictionary", currentMatch.word, cfg, true)
}
if (["i", "l", "g"].includes(option)) {
matches = matches.filter((match) => {
return !equal(
[
match.message,
match.shortMessage,
match.replacements,
match.type,
match.rule,
match.word,
],
[
currentMatch.message,
currentMatch.shortMessage,
currentMatch.replacements,
currentMatch.type,
currentMatch.rule,
currentMatch.word,
],
)
})
} else if (option === "n") {
matches.push(currentMatch)
} else if (option === "0") {
transformations.push({
change: replacement,
offset: currentMatch.offset,
length: currentMatch.length,
})
} else {
try {
transformations.push({
change: currentMatch.replacements[Number(option) - 1].value,
offset: currentMatch.offset,
length: currentMatch.length,
})
} catch (e) {
// It prevents from displaying error when users aborts with Ctrl-c
if (e.message === "Cannot read property 'value' of undefined") {
console.clear()
process.exit(0)
}
console.error(e)
}
}
}
return { changed: true, text: replaceAll(text, transformations) }
}
module.exports = checkInteractively
================================================
FILE: src/actions/checkNonInteractively.js
================================================
const kleur = require("kleur")
const checkWithFallback = require("../requests/checkWithFallback")
const Mistake = require("../components/Mistake")
const { displayUpdates } = require("../requests/updates")
const print = (result, styles) => {
if (result.matches.length === 0) {
console.log(kleur.green("No mistakes found!"))
} else {
console.log(
`Found ${result.matches.length} potential mistake${
result.matches.length === 1 ? "" : "s"
}`,
)
console.log()
console.log(
result.matches.map((match) => Mistake(match, styles)).join("\n"),
)
}
}
const checkNonInteractively = async (text, cfg, styles = true) => {
if (!text || text.trim().length === 0) {
console.log(kleur.yellow("Nothing to check!"))
return 0
}
const result = await checkWithFallback(text, cfg)
console.log(`Language: ${result.language.name}`)
print(result, styles)
await displayUpdates(cfg.paths.globalConfigDir)
return result.matches.length === 0 ? 0 : 1
}
module.exports = checkNonInteractively
================================================
FILE: src/actions/configure.js
================================================
const fs = require("fs")
const kleur = require("kleur")
const { isRule, ruleOptions } = require("../validators/rules")
const { isLanguage, languageOptions } = require("../validators/languages")
const availableOptions = [
"api_key",
"api_url",
"dictionary",
"server_once",
"language",
"enable",
"disable",
]
const addToDictionary = (dictionary, word) => {
const dict = Array.isArray(dictionary) ? dictionary : []
if (dict.includes(word)) {
return dict
}
return [...dict, word].sort()
}
const changeRule = (rules, ruleName, isEnabled) => {
return { ...rules, [ruleName]: isEnabled }
}
const prepareEntry = (key, value, cfg) => {
if (key === "dictionary") {
return { dictionary: addToDictionary(cfg.dictionary, value) }
}
if (key === "enable" || key === "disable") {
if (!isRule(value)) {
console.log(kleur.red("There is no such rule"))
console.log(`Available options: ${ruleOptions.join(", ")}`)
process.exit(1)
}
return { rules: changeRule(cfg.rules, value, key === "enable") }
}
if (key === "language" && !isLanguage(value)) {
console.log(kleur.red("There is no such language option"))
console.log(`Available options: ${languageOptions.join(", ")}`)
process.exit(1)
}
return { [key]: value }
}
const configure = (key, value, cfg, isGlobal = false, internal = false) => {
if (!availableOptions.includes(key) && !internal) {
console.log(kleur.red(`There is no '${key}' option!`))
console.log("Available options:")
console.log(availableOptions.join("\n"))
process.exit(1)
}
if (key === "server_once" && !isGlobal) {
console.log(
kleur.red("This setting can be used only with -g (--global) flag"),
)
process.exit(1)
}
const currentConfig = isGlobal ? cfg.global : cfg.local
const configFilePath = isGlobal
? cfg.paths.globalConfigFile
: cfg.paths.localConfigFile
const entry = prepareEntry(key, value, currentConfig)
const updatedConfig = { ...currentConfig, ...entry }
if (isGlobal) {
// eslint-disable-next-line no-param-reassign
cfg.global = updatedConfig
} else {
// eslint-disable-next-line no-param-reassign
cfg.local = updatedConfig
}
fs.writeFileSync(configFilePath, JSON.stringify(updatedConfig, null, 2))
}
module.exports = configure
================================================
FILE: src/actions/save.js
================================================
const path = require("path")
const fs = require("fs")
const kleur = require("kleur")
const { homedir } = require("os")
const handleSave = require("../prompts/handleSave")
const save = async (text, mode, filePath = null) => {
const originalFile = filePath ? path.basename(filePath) : null
console.clear()
console.log("All mistakes fixed!")
const { saveOption, fileName } = await handleSave(mode, originalFile)
if (saveOption === "replace") {
fs.writeFileSync(filePath, text)
console.clear()
console.log(kleur.green("Saved!"))
} else if (saveOption === "save-as") {
const resolvedFileName = fileName.replace("~", homedir())
const newPath = path.resolve(process.cwd(), resolvedFileName)
fs.writeFileSync(newPath, text)
console.clear()
console.log(kleur.green(`Saved as ${newPath}`))
} else {
console.clear()
console.log(
`---------------------------------\n\n${text}\n\n---------------------------------\n${kleur.green(
"Done!",
)}`,
)
}
}
module.exports = save
================================================
FILE: src/actions/saveNow.js
================================================
const fs = require("fs")
const saveNow = async (text, filePath) => {
fs.writeFileSync(filePath, text)
console.clear()
}
module.exports = saveNow
================================================
FILE: src/boot/load.js
================================================
const prepareConfig = require("./prepareConfig")
const load = (action) => (argv) => {
if (argv.file && argv.file.endsWith(".md")) {
argv.markdown = true // eslint-disable-line
}
const cfg = prepareConfig(argv)
action(argv, cfg)
}
module.exports = load
================================================
FILE: src/boot/prepareConfig.js
================================================
const fs = require("fs")
const path = require("path")
const { platform, homedir } = require("os")
const findUpSync = require("../utils/findUpSync").default
const initialConfig = require("../initialConfig")
const configBasePath = {
linux: ".config",
darwin: "Library/Preferences",
win32: "AppData/Roaming",
}
const home = homedir()
const globalConfigDir = path.join(home, configBasePath[platform()], "gramma")
const globalConfigFile = path.join(globalConfigDir, "gramma.json")
const localConfigFile = findUpSync(".gramma.json")
if (!fs.existsSync(globalConfigDir)) {
fs.mkdirSync(globalConfigDir, { recursive: true })
}
if (!fs.existsSync(globalConfigFile)) {
fs.writeFileSync(globalConfigFile, JSON.stringify(initialConfig, null, 2))
}
const loadEnvironmentVariables = (configText) => {
const items = configText.match(/\${[a-z0-9\-_.]*}/gi)
if (!items) {
return configText
}
let text = configText
items.forEach((item) => {
const envVarName = item.slice(2, -1)
const envVar = process.env[envVarName]
if (envVar) {
text = text.replace(item, envVar)
}
})
return text
}
const prepareFileConfig = (filePath) => {
if (!filePath) return null
return fs.existsSync(filePath)
? JSON.parse(loadEnvironmentVariables(fs.readFileSync(filePath).toString()))
: null
}
const prepareArgvConfig = ({ language, disable, enable, global, markdown }) => {
const disabledRules = Array.isArray(disable) ? disable : [disable]
const enabledRules = Array.isArray(enable) ? enable : [enable]
const rules = {}
disabledRules.forEach((rule) => {
rules[rule] = false
})
enabledRules.forEach((rule) => {
rules[rule] = true
})
return {
language,
rules,
markdown,
modifiers: {
global,
},
}
}
const prepareConfig = (paths) => (argv) => {
const globalConfig = prepareFileConfig(paths.globalConfigFile)
const localConfig = prepareFileConfig(paths.localConfigFile)
if (localConfig && localConfig.api_url === "localhost") {
localConfig.api_url = "inherit"
}
const argvConfig = prepareArgvConfig(argv)
const fileConfig = localConfig || globalConfig || {}
// File configs replace one another,
// so user's and project's configs won't mix
const cfg = {
...initialConfig,
...fileConfig,
}
// If local config has api_url set to 'inherit'
// then Gramma will use global settings (if set) or initial settings.
// This allows to use dynamic url of the local server
// eslint-disable-next-line camelcase
const api_url =
cfg.api_url === "inherit"
? (globalConfig || {}).api_url || initialConfig.api_url
: cfg.api_url
// Argv config alters nested values,
// so you can change some rules for specific checks,
// without erasing other rules defined in config files
const sessionConfig = {
...cfg,
language:
argvConfig.language === "config" ? cfg.language : argvConfig.language,
rules: { ...cfg.rules, ...argvConfig.rules },
modifiers: argvConfig.modifiers,
api_url,
markdown: argvConfig.markdown,
}
return {
initial: initialConfig,
global: globalConfig || {},
local: localConfig || {},
session: sessionConfig,
paths,
}
}
module.exports = prepareConfig({
globalConfigDir,
globalConfigFile,
localConfigFile,
home,
serverDownload: "https://languagetool.org/download/LanguageTool-stable.zip",
})
================================================
FILE: src/cli.js
================================================
#!/usr/bin/env node
require("dotenv").config()
require("isomorphic-fetch")
const yargs = require("yargs")
const { version } = require("../package.json")
const load = require("./boot/load")
const check = require("./commands/check")
const listen = require("./commands/listen")
const commit = require("./commands/commit")
const init = require("./commands/init")
const config = require("./commands/config")
const paths = require("./commands/paths")
const server = require("./commands/server")
const { hook } = require("./commands/hook")
const { languageOptions } = require("./validators/languages")
const { ruleOptions } = require("./validators/rules")
// eslint-disable-next-line no-unused-expressions
yargs
.command(
"check ",
"check file for writing mistakes",
(yargsCtx) => {
yargsCtx.positional("text", {
describe: "file to check",
})
},
load(check),
)
.command(
"listen ",
"check text for writing mistakes",
(yargsCtx) => {
yargsCtx.positional("text", {
describe: "text to check",
})
},
load(listen),
)
.command(
"commit ",
"git commit -m with grammar check",
(yargsCtx) => {
yargsCtx.positional("text", {
describe: "commit message to check",
})
},
load(commit),
)
.command(
"hook",
"toggles Git hook",
(yargsCtx) => {
yargsCtx.positional("text", {
describe: "commit message file",
})
},
load(hook),
)
.command(
"init",
"create local config with default settings",
() => {},
load(init),
)
.command(
"config ",
"set config entry",
(yargsCtx) => {
yargsCtx
.positional("key", {
describe: "name of the config entry",
})
.positional("value", {
describe: "value of the config entry",
})
},
load(config),
)
.command("paths", "show paths used by Gramma", () => {}, load(paths))
.command(
"server ",
"manage local API server",
(yargsCtx) => {
yargsCtx.positional("action", {
describe: "action to take (install / start / stop / pid / gui)",
})
},
load(server),
)
.alias("help", "h")
.version(`v${version}`)
.alias("version", "v")
.hide("paths")
.option("print", {
alias: "p",
type: "boolean",
default: false,
describe: "Print mistakes non-interactively",
})
.option("no-colors", {
alias: "n",
type: "boolean",
default: false,
describe: "Disable output colors",
})
.option("language", {
alias: "l",
type: "string",
default: "config",
describe: "Set the language of the text",
choices: languageOptions,
})
.option("disable", {
alias: "d",
type: "string",
describe: "Disable specific rule",
default: [],
choices: ruleOptions,
})
.option("enable", {
alias: "e",
type: "string",
describe: "Enable specific rule",
default: [],
choices: ruleOptions,
})
.option("all", {
alias: "a",
type: "boolean",
default: false,
describe: "Add -a flag to git commit command",
})
.option("global", {
alias: "g",
type: "boolean",
default: false,
describe: "Use global configuration file with 'config' command",
})
.option("markdown", {
alias: "m",
type: "boolean",
default: false,
describe: "Treat the text as markdown",
})
.option("port", {
type: "number",
describe: "Set the port number of local API server",
})
.demandCommand().argv
================================================
FILE: src/cli.test.js
================================================
const shell = require("shelljs")
const fs = require("fs")
const prepareData = (text) => {
if (!fs.existsSync("test-temp")) {
fs.mkdirSync("test-temp")
}
fs.writeFileSync("test-temp/example.txt", text)
}
const removeData = () => {
shell.rm("-rf", "test-temp")
}
describe("'listen' command", () => {
it("prints potential mistakes with '--print' option", () => {
const result = shell.exec(
"node src/cli.js listen --print 'There is a mistkae'",
)
expect(result.code).toEqual(1)
expect(result.stderr).toEqual("")
expect(result.grep("Context")).not.toEqual("")
expect(result.grep("Suggested fix")).not.toEqual("")
})
it("prints potential mistakes with '-p' option", () => {
const result = shell.exec("node src/cli.js listen -p 'There is a mistkae'")
expect(result.code).toEqual(1)
expect(result.stderr).toEqual("")
expect(result.grep("Context")).not.toEqual("")
expect(result.grep("Suggested fix")).not.toEqual("")
})
it("prints no mistakes with '--print' option", () => {
const result = shell.exec(
"node src/cli.js listen --print 'There are no mistakes'",
)
expect(result.code).toEqual(0)
expect(result.stderr).toEqual("")
expect(result.grep("No mistakes found!")).not.toEqual("")
})
it("prints no mistakes with '-p' option", () => {
const result = shell.exec(
"node src/cli.js listen -p 'There are no mistakes'",
)
expect(result.code).toEqual(0)
expect(result.stderr).toEqual("")
expect(result.grep("No mistakes found!")).not.toEqual("")
})
})
describe("'check' command", () => {
it("prints potential mistakes with '--print' option", () => {
prepareData("There is a mistkae")
const result = shell.exec(
"node src/cli.js check --print test-temp/example.txt",
)
expect(result.code).toEqual(1)
expect(result.stderr).toEqual("")
expect(result.grep("Context")).not.toEqual("")
expect(result.grep("Suggested fix")).not.toEqual("")
removeData()
})
it("prints potential mistakes with '-p' option", () => {
prepareData("There is a mistkae")
const result = shell.exec("node src/cli.js check -p test-temp/example.txt")
expect(result.code).toEqual(1)
expect(result.stderr).toEqual("")
expect(result.grep("Context")).not.toEqual("")
expect(result.grep("Suggested fix")).not.toEqual("")
removeData()
})
it("prints no mistakes with '--print' option", () => {
prepareData("There are no mistakes")
const result = shell.exec(
"node src/cli.js check --print test-temp/example.txt",
)
expect(result.code).toEqual(0)
expect(result.stderr).toEqual("")
expect(result.grep("No mistakes found!")).not.toEqual("")
removeData()
})
it("prints no mistakes with '-p' option", () => {
prepareData("There are no mistakes")
const result = shell.exec("node src/cli.js check -p test-temp/example.txt")
expect(result.code).toEqual(0)
expect(result.stderr).toEqual("")
expect(result.grep("No mistakes found!")).not.toEqual("")
removeData()
})
})
================================================
FILE: src/commands/check.js
================================================
const intercept = require("intercept-stdout")
const kleur = require("kleur")
const fs = require("fs")
const checkNonInteractively = require("../actions/checkNonInteractively")
const checkInteractively = require("../actions/checkInteractively")
const save = require("../actions/save")
const stripStyles = require("../utils/stripStyles")
const check = async (argv, cfg) => {
if (!argv.file) {
console.log(kleur.red("Please provide a file path."))
process.exit(1)
}
if (!fs.existsSync(argv.file) || argv.file === "." || argv.file === "..") {
console.log(kleur.red("There is no such file!"))
process.exit(1)
}
const initialText = fs.readFileSync(argv.file).toString()
if (argv.print) {
const noColors = argv["no-colors"]
if (noColors) {
intercept(stripStyles)
}
const status = await checkNonInteractively(initialText, cfg, !noColors)
process.exit(status)
} else {
const { changed, text } = await checkInteractively(initialText, cfg)
if (changed) {
await save(text, "FILE", argv.file)
}
process.exit()
}
}
module.exports = check
================================================
FILE: src/commands/commit.js
================================================
const fs = require("fs")
const { execSync } = require("child_process")
const path = require("path")
const checkInteractively = require("../actions/checkInteractively")
const commit = async (argv, cfg) => {
const { text } = await checkInteractively(argv.text, cfg)
try {
if (fs.existsSync(path.join(process.cwd(), ".gramma.json"))) {
execSync(`git add .gramma.json`)
}
const output = argv.all
? execSync(`git commit -am "${text}"`)
: execSync(`git commit -m "${text}"`)
process.stdout.write(output)
} catch (error) {
process.stderr.write(error.stdout)
}
process.exit()
}
module.exports = commit
================================================
FILE: src/commands/config.js
================================================
const kleur = require("kleur")
const configure = require("../actions/configure")
const confirmConfig = require("../prompts/confirmConfig")
const config = async (argv, cfg) => {
if (!argv.global && !cfg.paths.localConfigFile) {
const { useGlobal } = await confirmConfig()
if (useGlobal) {
argv.global = true // eslint-disable-line
} else {
console.log(kleur.yellow("Aborting"))
process.exit()
}
}
configure(argv.key, argv.value, cfg, argv.global)
console.log(kleur.green("Done!"))
}
module.exports = config
================================================
FILE: src/commands/debug.js
================================================
const debug = async (argv, cfg) => {
console.log("config:")
console.log(cfg)
console.log("------------------------------------")
console.log("argv:")
console.log(argv)
console.log("------------------------------------")
}
module.exports = debug
================================================
FILE: src/commands/hook.js
================================================
const kleur = require("kleur")
const fs = require("fs")
const path = require("path")
const os = require("os")
const { execSync } = require("child_process")
const checkInteractively = require("../actions/checkInteractively")
const saveNow = require("../actions/saveNow")
const appLocation = require("../utils/appLocation")
const sys = os.platform()
const REDIRECT_STDIN = "\n\nexec < /dev/tty"
const getHookCode = (command, stdin = true) => {
const stdinCode = stdin ? REDIRECT_STDIN : ""
return {
linux: {
full: `#!/bin/sh${stdinCode}\n\n${command}\n`,
partial: `${stdinCode}\n\n${command}\n`,
},
darwin: {
full: `#!/bin/sh${stdinCode}\n\n${command}\n`,
partial: `${stdinCode}\n\n${command}\n`,
},
win32: {
full: `#!/bin/sh${stdinCode}\n\n${command}\n`.replace(/\\/g, "/"),
partial: `${stdinCode}\n\n${command}\n`.replace(/\\/g, "/"),
},
}
}
const gitRoot = path.join(process.cwd(), ".git")
const checkGit = () => {
return fs.existsSync(gitRoot)
}
const createEmptyFile = (file) => {
if (!fs.existsSync(file)) {
fs.closeSync(fs.openSync(file, "w"))
}
}
const addHookCode = (hookFile, hookCode, onlyCreate, name) => {
if (fs.existsSync(hookFile)) {
const content = fs.readFileSync(hookFile).toString()
const alreadyExists = content.includes(hookCode[sys].partial)
if (alreadyExists && !onlyCreate) {
const newContent = content.replace(hookCode[sys].partial, "")
fs.writeFileSync(hookFile, newContent)
console.log(kleur.green(`Hook (${name}) removed!`))
} else if (alreadyExists) {
console.log(kleur.yellow(`Hook (${name}) already exists!`))
} else {
fs.appendFileSync(hookFile, hookCode[sys].partial)
console.log(kleur.green(`Hook (${name}) created!`))
}
} else {
fs.writeFileSync(hookFile, hookCode[sys].full)
fs.chmodSync(hookFile, "755")
console.log(kleur.green(`Hook (${name}) created!`))
}
}
const addHooksCode = (onlyCreate = false) => {
const hasGit = checkGit()
if (!hasGit) {
console.log(kleur.red("No .git in this directory"))
process.exit(1)
}
const hooksConfig = fs
.readFileSync(path.join(gitRoot, "config"))
.toString()
.match(/hooksPath *=.*/gi)
const hooksFolder = hooksConfig && hooksConfig[0].split("=")[1].trim()
const hookFileCommitMsg = hooksFolder
? path.resolve(process.cwd(), hooksFolder, "commit-msg")
: path.resolve(process.cwd(), ".git", "hooks", "commit-msg")
const hookFilePostCommit = hooksFolder
? path.resolve(process.cwd(), hooksFolder, "post-commit")
: path.resolve(process.cwd(), ".git", "hooks", "post-commit")
const commandCommitMsg = fs.existsSync("node_modules")
? "npx gramma hook $1"
: `${appLocation} hook $1`
const commandPostCommit = fs.existsSync("node_modules")
? "npx gramma hook cleanup"
: `${appLocation} hook cleanup`
const hookCodeCommitMsg = getHookCode(commandCommitMsg)
const hookCodePostCommit = getHookCode(commandPostCommit, false)
addHookCode(hookFileCommitMsg, hookCodeCommitMsg, onlyCreate, "commit-msg")
addHookCode(hookFilePostCommit, hookCodePostCommit, onlyCreate, "post-commit")
}
const hook = async (argv, cfg) => {
const arg =
process.argv[process.argv.length - 1] !== "hook"
? process.argv[process.argv.length - 1]
: null
// No arg - execute the default command
if (!arg) {
addHooksCode()
process.exit()
}
// Temporary file to coordinate git hooks
// See: https://stackoverflow.com/a/12802592/4713502
const tempFile = path.join(cfg.paths.globalConfigDir, ".commit")
// Code executed by `post-commit` hook
if (arg === "cleanup") {
if (cfg.paths.localConfigFile && fs.existsSync(tempFile)) {
fs.unlinkSync(tempFile)
try {
execSync(`git add ${cfg.paths.localConfigFile}`)
execSync(`git commit --amend --no-edit --no-verify`)
} catch (e) {} // eslint-disable-line
}
process.exit()
}
// Code executed by `commit-msg` hook
createEmptyFile(tempFile)
const file = arg
const commitText = fs
.readFileSync(file)
.toString()
.replace(/# ------------------------ >8[\S\s]*/m, "") // Remove diff part on --verbose
.replace(/#.*/g, "") // Remove other comments
const { changed, text } = await checkInteractively(commitText, cfg)
if (changed) {
await saveNow(text, file)
}
process.exit()
}
exports.checkGit = checkGit
exports.addHookCode = addHooksCode
exports.hook = hook
================================================
FILE: src/commands/init.js
================================================
const kleur = require("kleur")
const fs = require("fs")
const path = require("path")
const initialConfig = require("../initialConfig")
const confirmInit = require("../prompts/confirmInit")
const { addHookCode, checkGit } = require("./hook")
const localConfigFile = path.join(process.cwd(), ".gramma.json")
const init = async (argv, cfg) => {
if (!fs.existsSync(cfg.paths.localConfigFile)) {
const hasGit = checkGit()
const { hook, api } = await confirmInit(hasGit)
if (!api) {
console.log(kleur.yellow("Aborting!"))
process.exit(1)
}
const content = JSON.stringify({ ...initialConfig, api_url: api }, null, 2)
fs.writeFileSync(localConfigFile, content)
console.log(kleur.green("Gramma config created!"))
if (hook) {
addHookCode(true)
}
} else {
console.log(kleur.red("Gramma config already exists for this project!"))
}
}
module.exports = init
================================================
FILE: src/commands/listen.js
================================================
const intercept = require("intercept-stdout")
const checkNonInteractively = require("../actions/checkNonInteractively")
const checkInteractively = require("../actions/checkInteractively")
const save = require("../actions/save")
const stripStyles = require("../utils/stripStyles")
const listen = async (argv, cfg) => {
if (argv.print) {
const noColors = argv["no-colors"]
if (noColors) {
intercept(stripStyles)
}
const status = await checkNonInteractively(argv.text, cfg, !noColors)
process.exit(status)
} else {
const { changed, text } = await checkInteractively(argv.text, cfg)
if (changed) {
await save(text, "TEXT")
}
process.exit()
}
}
module.exports = listen
================================================
FILE: src/commands/paths.js
================================================
const appLocation = require("../utils/appLocation")
const paths = (argv, cfg) => {
console.log(`Global config: ${cfg.paths.globalConfigFile}`)
console.log(`App location: ${appLocation}`)
console.log(`Local server: ${cfg.global.server_path || "not installed"}`)
}
module.exports = paths
================================================
FILE: src/commands/server.js
================================================
const kleur = require("kleur")
const installServer = require("../server/installServer")
const startServer = require("../server/startServer")
const stopServer = require("../server/stopServer")
const getServerPID = require("../server/getServerPID")
const getServerInfo = require("../server/getServerInfo")
const showServerGUI = require("../server/showServerGUI")
const server = async (argv, cfg) => {
const availableOptions = ["install", "start", "stop", "pid", "info", "gui"]
if (!availableOptions.includes(argv.action)) {
console.log(kleur.red("There is no such command!"))
console.log(
`Available options for gramma server: ${availableOptions.join(" | ")}`,
)
process.exit(1)
}
if (argv.action === "install") {
await installServer(cfg)
process.exit()
}
if (argv.action === "start") {
await startServer(cfg, {
port: argv.port,
viaCommand: true,
})
process.exit()
}
if (argv.action === "stop") {
await stopServer(cfg)
process.exit()
}
if (argv.action === "pid") {
getServerPID(cfg)
process.exit()
}
if (argv.action === "info") {
getServerInfo(cfg)
process.exit()
}
if (argv.action === "gui") {
showServerGUI(cfg)
process.exit()
}
}
module.exports = server
================================================
FILE: src/components/FixMenu.js
================================================
const kleur = require("kleur")
const FixOptions = (fixes) => {
if (fixes.length === 0) {
return ""
}
if (fixes.length === 1) {
return kleur.bold().green("1") + kleur.reset(`: fix\n`)
}
return kleur.bold().green(`1-${fixes.length}`) + kleur.reset(`: choose fix\n`)
}
const FixMenu = (fixes, issue) => {
const defaultFix = kleur.bold().green(fixes.length > 0 ? 1 : 0)
// prettier-ignore
const dictionaryOptions =
issue === "misspelling"
? `${kleur.bold().green("l")
}${kleur.reset(`: add to local dictionary\n`)
}${kleur.bold().green("g")
}${kleur.reset(`: add to global dictionary\n`)}`
: ""
// prettier-ignore
return (
`What do you want to do?\n${
kleur.bold().green("Enter")
}${kleur.reset(`: default (${defaultFix})\n`)
}${FixOptions(fixes)
}${kleur.bold().green("0")
}${kleur.reset(`: custom fix\n`)
}${kleur.bold().green("i")
}${kleur.reset(`: ignore\n`)
}${dictionaryOptions
}${kleur.bold().green("n")
}${kleur.reset(`: next\n`)}`
)
}
module.exports = FixMenu
================================================
FILE: src/components/FixMenu.test.js
================================================
const stripStyles = require("../utils/stripStyles")
const FixMenu = require("./FixMenu")
describe("FixMenu component", () => {
it("renders menu with multiple fix propositions for mistake", () => {
const expected =
"What do you want to do?\n" +
"Enter: default (1)\n" +
"1-3: choose fix\n" +
"0: custom fix\n" +
"i: ignore\n" +
"n: next\n"
const result = FixMenu([{}, {}, {}])
const rawResult = stripStyles(result)
expect(rawResult).toEqual(expected)
})
it("renders menu with single fix proposition for mistake", () => {
const expected =
"What do you want to do?\n" +
"Enter: default (1)\n" +
"1: fix\n" +
"0: custom fix\n" +
"i: ignore\n" +
"n: next\n"
const result = FixMenu([{}])
const rawResult = stripStyles(result)
expect(rawResult).toEqual(expected)
})
it("renders menu with no fix propositions for mistake", () => {
const expected =
"What do you want to do?\n" +
"Enter: default (0)\n" +
"0: custom fix\n" +
"i: ignore\n" +
"n: next\n"
const result = FixMenu([])
const rawResult = stripStyles(result)
expect(rawResult).toEqual(expected)
})
it("renders dictionary options on spelling mistake", () => {
const expected =
"What do you want to do?\n" +
"Enter: default (0)\n" +
"0: custom fix\n" +
"i: ignore\n" +
"l: add to local dictionary\n" +
"g: add to global dictionary\n" +
"n: next\n"
const result = FixMenu([], "misspelling")
const rawResult = stripStyles(result)
expect(rawResult).toEqual(expected)
})
})
================================================
FILE: src/components/Mistake.js
================================================
const kleur = require("kleur")
const replace = require("../text-manipulation/replace")
const getMistakeColor = (type) => {
if (type === "grammar") {
return "red"
}
if (type === "style") {
return "blue"
}
return "yellow"
}
const highlightMistake = (context, type, offset, length) => {
const color = getMistakeColor(type)
const change = (mistake) => kleur[color](mistake)
return replace(context, change, offset, length)
}
const Mistake = (match, style = true) => {
const context = highlightMistake(
match.context.text,
match.rule.issueType,
match.context.offset,
match.context.length,
)
const replacements = match.replacements
.map(
(replacement, index) =>
`${kleur.bold().green(index + 1)}) ${replacement.value}`,
)
.join(" ")
const fixes =
match.replacements.length > 0
? `${kleur.bold("Suggested fix:")} ${replacements}\n`
: ""
const word = style ? "" : `Word: ${match.word}\n`
// prettier-ignore
return (
`---------------------------------\n\n${
kleur.dim(`${kleur.bold("Rule:")} ${match.rule.category.id.toLowerCase()}\n`)
}${kleur.dim(`${kleur.bold("Explanation:")} ${match.message}\n\n`)
}${word
}${kleur.bold("Context:")} ${context}\n${
fixes}`
)
}
module.exports = Mistake
================================================
FILE: src/components/Mistake.test.js
================================================
const stripStyles = require("../utils/stripStyles")
const Mistake = require("./Mistake")
describe("Mistake component", () => {
it("renders info about mistake without suggestions", () => {
const expected =
`---------------------------------\n\n` +
`Rule: typos\n` +
`Explanation: Did you mean "is"?\n\n` +
`Context: It are a perfect English sentence. \n`
const result = Mistake({
message: 'Did you mean "is"?',
replacements: [],
context: {
text: " It are a perfect English sentence. ",
offset: 4,
length: 3,
},
rule: {
category: {
id: "typos",
},
},
})
const rawResult = stripStyles(result)
expect(rawResult).toEqual(expected)
})
it("renders info about mistake with single suggestion", () => {
const expected =
`---------------------------------\n\n` +
`Rule: typos\n` +
`Explanation: Some message\n\n` +
`Context: Some context\n` +
`Suggested fix: 1) foo\n`
const result = Mistake({
message: "Some message",
replacements: [{ value: "foo" }],
context: {
text: "Some context",
offset: 4,
length: 3,
},
rule: {
category: {
id: "typos",
},
},
})
const rawResult = stripStyles(result)
expect(rawResult).toEqual(expected)
})
it("renders info about mistake with multiple suggestions", () => {
const expected =
`---------------------------------\n\n` +
`Rule: typos\n` +
`Explanation: Some message\n\n` +
`Context: Some context\n` +
`Suggested fix: 1) foo 2) bar 3) baz\n`
const result = Mistake({
message: "Some message",
replacements: [{ value: "foo" }, { value: "bar" }, { value: "baz" }],
context: {
text: "Some context",
offset: 4,
length: 3,
},
rule: {
category: {
id: "typos",
},
},
})
const rawResult = stripStyles(result)
expect(rawResult).toEqual(expected)
})
})
================================================
FILE: src/context.js
================================================
const context = {
argv: null,
}
module.exports = context
================================================
FILE: src/index.d.ts
================================================
import check = require("./requests/checkViaAPI")
import replaceAll = require("./text-manipulation/replaceAll")
export { check, replaceAll }
================================================
FILE: src/index.js
================================================
require("isomorphic-fetch")
const check = require("./requests/checkViaAPI")
const replaceAll = require("./text-manipulation/replaceAll")
module.exports = {
check,
replaceAll,
}
================================================
FILE: src/initialConfig.js
================================================
const { ruleOptions } = require("./validators/rules")
const rules = {}
ruleOptions.forEach((rule) => {
rules[rule] = true
})
const initialConfig = {
api_url: "https://api.languagetool.org/v2/check",
api_key: "",
dictionary: [],
language: "en-US",
rules,
}
module.exports = initialConfig
================================================
FILE: src/prompts/confirmConfig.js
================================================
const prompts = require("prompts")
const confirmConfig = () => {
return prompts([
{
type: "toggle",
name: "useGlobal",
message:
"Local config not found. Should I use the global config instead?",
initial: true,
active: "yes",
inactive: "no",
},
])
}
module.exports = confirmConfig
================================================
FILE: src/prompts/confirmInit.js
================================================
const prompts = require("prompts")
const initialConfig = require("../initialConfig")
const confirmInit = (hasGit) => {
return prompts([
{
type: "select",
name: "api",
message: "Choose API url:",
choices: [
{ title: "languagetool.org", value: initialConfig.api_url },
{
title: "Inherit from global config",
value: "inherit",
},
],
initial: 0,
},
{
type: hasGit ? "toggle" : null,
name: "hook",
message: "Add Git hook?",
initial: true,
active: "yes",
inactive: "no",
},
])
}
module.exports = confirmInit
================================================
FILE: src/prompts/confirmPort.js
================================================
const prompts = require("prompts")
const confirmPort = () => {
return prompts([
{
type: "toggle",
name: "autoPort",
message: "Port is in use, should I automatically find another port?",
initial: true,
active: "yes",
inactive: "no",
},
])
}
module.exports = confirmPort
================================================
FILE: src/prompts/confirmServerReinstall.js
================================================
const prompts = require("prompts")
const confirmServerReinstall = () => {
return prompts([
{
type: "confirm",
name: "reinstall",
message: "Server already installed. Do you want to reinstall?",
initial: true,
},
])
}
module.exports = confirmServerReinstall
================================================
FILE: src/prompts/handleMistake.js
================================================
const prompts = require("prompts")
const FixMenu = require("../components/FixMenu")
const handleMistake = (fixes, issue) => {
console.log("---------------------------------")
const dictionaryOptions = issue === "misspelling" ? ["l", "g"] : []
const validInputs = [
...fixes.map((_, index) => String(index + 1)),
"0",
"i",
...dictionaryOptions,
"n",
]
const initialInput = fixes.length > 0 ? "1" : "0"
return prompts([
{
type: "text",
name: "option",
message: FixMenu(fixes, issue),
initial: initialInput,
validate(input) {
return validInputs.includes(input)
? true
: `Please enter a valid option...`
},
},
{
type: (prev) => (prev === "0" ? "text" : null),
name: "replacement",
message: "Provide replacement",
},
])
}
module.exports = handleMistake
================================================
FILE: src/prompts/handleSave.js
================================================
const prompts = require("prompts")
const { platform } = require("os")
const initialFileName = (originalFile) => {
const date =
platform() === "win32"
? new Date().toISOString().replace(/[.:-]/g, "")
: new Date().toISOString()
return originalFile ? `${date}-${originalFile}` : `${date}-gramma.txt`
}
const handleSave = (mode, originalFile = null) => {
const choices = [
...(mode === "FILE" ? [{ title: "replace file", value: "replace" }] : []),
{ title: "save as", value: "save-as" },
{ title: "print on screen", value: "print" },
]
const initialInput = mode === "FILE" ? 0 : 1
return prompts([
{
type: "select",
name: "saveOption",
message: "What do you want to do?",
initial: initialInput,
choices,
},
{
type: (prev) => (prev === "save-as" ? "text" : null),
name: "fileName",
initial: initialFileName(originalFile),
message: "Please provide a file path",
},
])
}
module.exports = handleSave
================================================
FILE: src/prompts/mainMenu.js
================================================
const prompts = require("prompts")
const mainMenu = () => {
const choices = [
{ title: "check file", value: "check" },
{ title: "check text", value: "listen" },
]
return prompts([
{
type: "select",
name: "saveOption",
message: "What do you want to do?",
choices,
},
{
type: (prev) => (prev === "check" ? "text" : null),
name: "fileName",
message: "Chose file path (relative or absolute)",
},
])
}
module.exports = mainMenu
================================================
FILE: src/requests/checkViaAPI.d.ts
================================================
export = checkViaAPI
/**
* Calls the provided LanguageTool API
* and returns grammar checker suggestions.
*
* @param text text to check
* @param options request config
*
* @returns grammar checker suggestions
*/
declare function checkViaAPI(
text: any,
options?: {
api_url?: string
api_key?: string
language?: string
rules?: { [ruleName: string]: boolean }
dictionary?: string[]
markdown?: boolean
},
): Promise<{
language: {
name: string
code: string
[key: string]: any
}
matches: {
message: string
shortMessage: string
replacements: { value: string; [key: string]: any }[]
offset: number
length: number
context: { text: string; offset: number; length: number }
sentence: string
type: { typeName: string }
rule: {
id: string
description: string
issueType: string
category: { id: string; name: string }
isPremium: false
}
word: string
[key: string]: any
}[]
[key: string]: any
}>
================================================
FILE: src/requests/checkViaAPI.js
================================================
const queryString = require("query-string")
const initialConfig = require("../initialConfig")
// @ts-ignore
const prepareMarkdown = require("../utils/prepareMarkdown").default
const addWordFields = (matches) => {
return matches.map((match) => {
const word = match.context.text.substr(
match.context.offset,
match.context.length,
)
return { ...match, word }
})
}
const removeFalsePositives = (matches, dictionary, disabledRules) => {
return matches.filter(
(match) =>
!disabledRules.includes(match.rule.category.id) &&
!(
match.rule.issueType === "misspelling" &&
dictionary.includes(match.word)
),
)
}
const MAX_REPLACEMENTS = 30
/**
* Calls the provided LanguageTool API
* and returns grammar checker suggestions.
*
* @param {string} text text to check
* @param {Object} options request config
*
* @returns {Promise} grammar checker suggestions
*/
const checkViaAPI = async (text, options = {}) => {
const cfg = { ...initialConfig, ...options }
const disabledRules = Object.entries(cfg.rules)
// eslint-disable-next-line no-unused-vars
.filter(([rule, value]) => value === false)
.map(([rule]) => rule.toUpperCase())
const disabledRulesEntry =
disabledRules.length === 0 || cfg.api_url.includes("grammarbot")
? {}
: { disabledCategories: disabledRules.join(",") }
const input = options.markdown ? { data: prepareMarkdown(text) } : { text }
const postData = queryString.stringify({
api_key: cfg.api_key,
language: cfg.language,
...input,
...disabledRulesEntry,
})
// eslint-disable-next-line
const response = await fetch(cfg.api_url, {
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
body: postData,
method: "POST",
})
const body = await response.text()
let result
try {
result = JSON.parse(body)
} catch (e) {
if (cfg.api_url.includes("grammarbot")) {
throw new Error(
"Language not available at grammarbot.io.\n" +
"Please consider installing a local LanguageTool server:\n" +
"https://github.com/caderek/gramma#installing-local-server",
)
} else {
throw new Error(body)
}
}
const resultWithWords = {
...result,
matches: removeFalsePositives(
addWordFields(result.matches),
cfg.dictionary,
cfg.api_url === initialConfig.api_url ? disabledRules : [],
),
}
resultWithWords.matches.forEach((match) => {
if (match.replacements.length > MAX_REPLACEMENTS) {
match.replacements.length = MAX_REPLACEMENTS // eslint-disable-line
}
})
return resultWithWords
}
module.exports = checkViaAPI
================================================
FILE: src/requests/checkViaCmd.js
================================================
const fs = require("fs")
const path = require("path")
const kleur = require("kleur")
const { execSync } = require("child_process")
const initialConfig = require("../initialConfig")
const addWordFields = (matches) => {
return matches.map((match) => {
const word = match.context.text.substr(
match.context.offset,
match.context.length,
)
return { ...match, word }
})
}
const removeFalsePositives = (matches, dictionary, disabledRules) => {
return matches.filter(
(match) =>
!disabledRules.includes(match.rule.category.id) &&
!(
match.rule.issueType === "misspelling" &&
dictionary.includes(match.word)
),
)
}
const createTempFile = (file, text) => {
fs.writeFileSync(file, text)
}
const removeTempFile = (file) => {
fs.unlinkSync(file)
}
const MAX_REPLACEMENTS = 30
/**
* Calls the provided LanguageTool API
* and returns grammar checker suggestions.
*
* @param {string} text text to check
* @param {Object} options request config
*
* @returns {Promise} grammar checker suggestions
*/
const checkViaCmd = async (
text,
options = {},
serverDirPath,
configDirPath,
) => {
const cfg = { ...initialConfig, ...options }
// console.log({ cfg, serverDirPath, configDirPath })
const disabledRules = Object.entries(cfg.rules)
// eslint-disable-next-line no-unused-vars
.filter(([rule, value]) => value === false)
.map(([rule]) => rule.toUpperCase())
const tempFile = path.join(configDirPath, ".temp")
createTempFile(tempFile, text)
const jar = path.join(serverDirPath, "languagetool-commandline.jar")
const lang = cfg.language === "auto" ? " -adl" : ` -l ${cfg.language}`
const disabled =
disabledRules.length === 0 ? "" : ` -d ${disabledRules.join(",")}`
const cmd = `java -jar ${jar}${lang}${disabled} --json ${tempFile}`
let response
let result
try {
response = execSync(cmd, { stdio: "pipe" })
response = response.toString().split("\n")
result = JSON.parse(response[response.length - 1])
} catch (e) {
removeTempFile(tempFile)
console.log(kleur.red("Cannot execute command via local LanguageTool cmd"))
console.log("Please check if your command if valid.")
process.exit(1)
}
removeTempFile(tempFile)
const resultWithWords = {
...result,
matches: removeFalsePositives(
addWordFields(result.matches),
cfg.dictionary,
cfg.api_url === initialConfig.api_url ? disabledRules : [],
),
}
resultWithWords.matches.forEach((match) => {
if (match.replacements.length > MAX_REPLACEMENTS) {
match.replacements.length = MAX_REPLACEMENTS // eslint-disable-line
}
})
return resultWithWords
}
module.exports = checkViaCmd
================================================
FILE: src/requests/checkWithFallback.js
================================================
const kleur = require("kleur")
const startServer = require("../server/startServer")
const checkViaAPI = require("./checkViaAPI")
const checkViaCmd = require("./checkViaCmd")
const stopServer = require("../server/stopServer")
const checkWithFallback = async (text, cfg) => {
const { session, global } = cfg
let response
try {
console.info(`Checking via ${cfg.session.api_url}...`)
response = await checkViaAPI(text, session)
if (
cfg.session.api_url.includes("localhost") &&
cfg.session.server_once === "true"
) {
await stopServer(cfg)
}
} catch (error) {
if (error.code === "ECONNREFUSED" || cfg.session.api_url === "localhost") {
if (global.server_path) {
if (!session.markdown) {
console.info(`Checking via local LanguageTool cmd...`)
response = await checkViaCmd(
text,
session,
global.server_path,
cfg.paths.globalConfigDir,
)
} else {
const { server, api_url } = await startServer(cfg)
console.clear()
const updatedSession = { ...session, api_url }
response = await checkViaAPI(text, updatedSession)
if (global.server_once === "true") {
server.kill()
}
}
} else {
console.log(kleur.red(`API server ${session.api_url} not available!`))
console.log("Please make sure that the server is running.")
console.log(
"TIP: Gramma is able to automatically start local API server if you install it via: gramma server install",
)
process.exit(1)
}
} else {
console.log("Gramma was unable to get a response from API server.")
console.log(`Details: ${error.message}`)
process.exit(1)
}
}
return response
}
module.exports = checkWithFallback
================================================
FILE: src/requests/updates.js
================================================
const fs = require("fs")
const path = require("path")
const kleur = require("kleur")
const { version } = require("../../package.json")
const checkForUpdates = async (configDir) => {
const updateFile = path.join(configDir, ".update")
if (fs.existsSync(updateFile)) {
const lastCheck = Number(fs.readFileSync(updateFile).toString())
const fullDay = 24 * 60 * 60 * 1000
if (Date.now() - lastCheck < fullDay) {
return { available: false }
}
}
const timeout = () => new Promise((_, reject) => setTimeout(reject, 1000))
try {
const response = await Promise.race([
fetch("https://api.github.com/repos/caderek/gramma/releases/latest"),
timeout(),
])
fs.writeFileSync(updateFile, String(Date.now()))
const data = await response.json()
const remoteVersion = data.tag_name
const [remoteMajor, remoteMinor, remotePatch] = remoteVersion
.slice(1)
.split(".")
.map(Number)
const [major, minor, patch] = version.split(".").map(Number)
const oldVersion = major * 1e8 + minor * 1e5 + patch * 1e2
const newVersion = remoteMajor * 1e8 + remoteMinor * 1e5 + remotePatch * 1e2
if (newVersion > oldVersion) {
return { available: true, newVersion: remoteVersion }
}
return { available: false }
} catch (e) {
return { available: false }
}
}
const displayUpdates = async (configDir) => {
const { available, newVersion } = await checkForUpdates(configDir)
if (available) {
console.log(
kleur.yellow(`
Update available: ${newVersion}
Install via NPM or download the new binary from:
https://caderek.github.io/gramma/
`),
)
}
}
exports.checkForUpdates = checkForUpdates
exports.displayUpdates = displayUpdates
================================================
FILE: src/server/getServerInfo.js
================================================
const kleur = require("kleur")
const getServerInfo = (cfg) => {
if (cfg.global.server_pid) {
console.log(kleur.green("PID: "), kleur.white(cfg.global.server_pid))
console.log(kleur.green("Url: "), kleur.white(cfg.global.api_url))
console.log(kleur.green("Path:"), kleur.white(cfg.global.server_path))
} else {
console.log(kleur.yellow("API server is not running!"))
}
}
module.exports = getServerInfo
================================================
FILE: src/server/getServerPID.js
================================================
const kleur = require("kleur")
const getServerPID = (cfg) => {
if (cfg.global.server_pid) {
console.log(kleur.green(`API server PID: ${cfg.global.server_pid}`))
} else {
console.log(kleur.yellow("API server is not running!"))
}
}
module.exports = getServerPID
================================================
FILE: src/server/installServer.js
================================================
const path = require("path")
const fs = require("fs")
const kleur = require("kleur")
const rimraf = require("rimraf")
const downloadFile = require("../utils/downloadFile")
const unzipFile = require("../utils/unzipFile")
const configure = require("../actions/configure")
const confirmServerReinstall = require("../prompts/confirmServerReinstall")
const installServer = async (cfg) => {
const serverDir = path.join(cfg.paths.home, ".languagetool")
const zipPath = path.join(serverDir, "languagetool.zip")
if (fs.existsSync(serverDir)) {
const { reinstall } = await confirmServerReinstall()
if (reinstall) {
rimraf.sync(serverDir)
} else {
console.log("Aborting!")
process.exit()
}
}
fs.mkdirSync(serverDir)
await downloadFile(
"https://languagetool.org/download/LanguageTool-stable.zip",
zipPath,
)
console.log("Unpacking...")
await unzipFile(zipPath, serverDir)
rimraf.sync(zipPath)
console.log("Configuring...")
const [unpackedDirName] = fs.readdirSync(serverDir)
const serverPath = path.join(serverDir, unpackedDirName)
configure("server_path", serverPath, cfg, true, true)
configure("api_url", "localhost", cfg, true, true)
console.log(kleur.green(`Server installed in: ${serverDir}`))
}
module.exports = installServer
================================================
FILE: src/server/showServerGUI.js
================================================
const { spawn } = require("child_process")
const path = require("path")
const kleur = require("kleur")
const showServerGUI = async (cfg) => {
if (!cfg.global.server_path) {
console.log(
kleur.red(`Please install local server via: gramma server install`),
)
return false
}
console.log("Starting local server GUI...")
const command = "java"
const params = ["-jar", path.join(cfg.global.server_path, "languagetool.jar")]
const gui = spawn(command, params, { windowsHide: true, detached: true })
gui.on("error", (error) => {
if (error) {
console.log(kleur.red("Cannot start local server GUI automatically."))
process.exit(1)
}
})
return true
}
module.exports = showServerGUI
================================================
FILE: src/server/startServer.js
================================================
const { spawn } = require("child_process")
const path = require("path")
const kleur = require("kleur")
const portfinder = require("portfinder")
const tcpPortUsed = require("tcp-port-used")
const configure = require("../actions/configure")
const confirmPort = require("../prompts/confirmPort")
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
const pingServer = async (url) => {
console.log("Waiting for local API server...")
const response = await fetch(`${url}?language=en-US&text=`).catch(() => {
return {
status: 500,
}
})
if (response.status === 200) {
return
}
await delay(1000)
await pingServer(url)
}
const startServer = async (cfg, { port = null, viaCommand = false } = {}) => {
if (!cfg.global.server_path) {
console.log(
kleur.red(`Please install local server via: gramma server install`),
)
process.exit(1)
}
if (port !== null) {
const inUse = tcpPortUsed.check(port)
if (inUse) {
const { autoPort } = await confirmPort()
if (!autoPort) {
console.log(kleur.yellow("Aborted!"))
process.exit(1)
}
}
}
console.log("Starting local API server...")
const PORT = await portfinder.getPortPromise({
port: port || 8081,
})
const command = "java"
const params = [
"-cp",
path.join(cfg.global.server_path, "languagetool-server.jar"),
"org.languagetool.server.HTTPServer",
"--port",
String(PORT),
"--allow-origin",
"'*'",
]
const server = spawn(command, params, { windowsHide: true, detached: true })
server.on("error", (error) => {
if (error) {
console.log(kleur.red("Cannot start local API server automatically."))
process.exit(1)
}
})
// eslint-disable-next-line camelcase
const api_url = `http://localhost:${PORT}/v2/check`
await pingServer(api_url)
configure("api_url", api_url, cfg, true, true)
if (cfg.global.server_once !== "true" || viaCommand) {
configure("server_pid", server.pid, cfg, true, true)
}
console.log(
kleur.green(`API server started!\nPID: ${server.pid}\nAPI URL: ${api_url}`),
)
return { server, api_url }
}
module.exports = startServer
================================================
FILE: src/server/stopServer.js
================================================
const { exec } = require("child_process")
const kleur = require("kleur")
const { platform } = require("os")
const configure = require("../actions/configure")
const stopServer = async (cfg) => {
if (cfg.global.server_pid) {
const command =
platform() === "win32"
? `taskkill /PID ${cfg.global.server_pid} /F`
: `kill ${cfg.global.server_pid}`
return new Promise((resolve, reject) => {
exec(command, (error) => {
if (error) {
reject(error)
} else {
resolve()
}
})
})
.then(() => {
console.log(kleur.green("API server stopped!"))
})
.catch(() => {
console.log(kleur.yellow("API server is not running!"))
})
.then(() => {
configure("server_pid", "", cfg, true, true)
})
}
console.log(kleur.yellow("API server is not running!"))
return false
}
module.exports = stopServer
================================================
FILE: src/text-manipulation/replace.js
================================================
const replace = (text, change, offset, length) => {
const before = text.slice(0, offset)
const mistake = text.slice(offset, offset + length)
const after = text.slice(offset + length)
const newPhrase = typeof change === "function" ? change(mistake) : change
return `${before}${newPhrase}${after}`
}
module.exports = replace
================================================
FILE: src/text-manipulation/replace.test.js
================================================
const replace = require("./replace")
describe("Replace", () => {
it("changes specified part of the text with provides word/phrase", () => {
const text = "Foo CHANGE_ME baz."
const change = "bar"
const offset = 4
const length = 9
const expected = "Foo bar baz."
const result = replace(text, change, offset, length)
expect(result).toEqual(expected)
})
it("changes specified part of the text according to provided function", () => {
const text = "Foo CHANGE_ME baz."
const change = (mistake) => mistake.toLowerCase()
const offset = 4
const length = 9
const expected = "Foo change_me baz."
const result = replace(text, change, offset, length)
expect(result).toEqual(expected)
})
})
================================================
FILE: src/text-manipulation/replaceAll.d.ts
================================================
export = replaceAll
/**
* Modifies provided text with specified transformations.
*
* @param text base text
* @param transformations descriptions of changes to the text
*/
declare function replaceAll(
text: string,
transformations: { offset: number; length: number; change: string }[],
): string
================================================
FILE: src/text-manipulation/replaceAll.js
================================================
const replace = require("./replace")
/**
* Modifies provided text with specified transformations.
*
* @param text base text
* @param transformations descriptions of changes to the text
*/
const replaceAll = (text, transformations) => {
return transformations
.sort((a, b) => b.offset - a.offset)
.reduce((previousText, { change, offset, length }) => {
return replace(previousText, change, offset, length)
}, text)
}
module.exports = replaceAll
================================================
FILE: src/text-manipulation/replaceAll.test.js
================================================
const replaceAll = require("./replaceAll")
describe("Replace all", () => {
it.only("changes all places according to provided transformations", () => {
const text = "Foo CHANGE_ONE baz CHANGE_TWO."
const transformations = [
{
offset: 4,
length: 10,
change: "bar",
},
{
offset: 19,
length: 10,
change: "bat",
},
]
const expected = "Foo bar baz bat."
const result = replaceAll(text, transformations)
expect(result).toEqual(expected)
})
})
================================================
FILE: src/utils/appLocation.js
================================================
const path = require("path")
const fs = require("fs")
const binDir = path.dirname(process.execPath)
const scriptDir = __dirname
let appLocation
if (scriptDir.includes("snapshot")) {
const executable = fs.readdirSync(binDir)[0]
appLocation = path.resolve(binDir, executable)
} else {
appLocation = path.resolve(scriptDir, "..", "cli.js")
}
module.exports = appLocation
================================================
FILE: src/utils/downloadFile.js
================================================
const fs = require("fs")
const progressStream = require("progress-stream")
const cliProgress = require("cli-progress")
const toMegabytes = (bytes) => {
return Number((bytes / (1000 * 1000)).toFixed(2))
}
const downloadFile = async (url, path) => {
const res = await fetch(url)
const dataLength = res.headers.get("content-length")
const bar = new cliProgress.Bar({
barCompleteChar: "#",
barIncompleteChar: ".",
format: "Downloading: [{bar}] {percentage}% | {value}/{total}MB",
})
bar.start(toMegabytes(dataLength), 0)
const str = progressStream({
length: dataLength,
time: 100,
}).on("progress", (progress) => bar.update(toMegabytes(progress.transferred)))
const fileStream = fs.createWriteStream(path)
return new Promise((resolve, reject) => {
res.body.pipe(str).pipe(fileStream)
res.body.on("error", (err) => {
reject(err)
})
fileStream.on("finish", () => {
bar.stop()
resolve()
})
})
}
module.exports = downloadFile
================================================
FILE: src/utils/equal.js
================================================
const { deepEqual } = require("assert")
const equal = (a, b) => {
try {
deepEqual(a, b)
return true
} catch (e) {
return false
}
}
module.exports = equal
================================================
FILE: src/utils/findUpSync.js
================================================
var __create = Object.create
var __defProp = Object.defineProperty
var __getOwnPropDesc = Object.getOwnPropertyDescriptor
var __getOwnPropNames = Object.getOwnPropertyNames
var __getProtoOf = Object.getPrototypeOf
var __hasOwnProp = Object.prototype.hasOwnProperty
var __markAsModule = (target) =>
__defProp(target, "__esModule", { value: true })
var __export = (target, all) => {
__markAsModule(target)
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true })
}
var __reExport = (target, module2, desc) => {
if (
(module2 && typeof module2 === "object") ||
typeof module2 === "function"
) {
for (let key of __getOwnPropNames(module2))
if (!__hasOwnProp.call(target, key) && key !== "default")
__defProp(target, key, {
get: () => module2[key],
enumerable:
!(desc = __getOwnPropDesc(module2, key)) || desc.enumerable,
})
}
return target
}
var __toModule = (module2) => {
return __reExport(
__markAsModule(
__defProp(
module2 != null ? __create(__getProtoOf(module2)) : {},
"default",
module2 && module2.__esModule && "default" in module2
? { get: () => module2.default, enumerable: true }
: { value: module2, enumerable: true },
),
),
module2,
)
}
// lib/findUpSync.mjs
__export(exports, {
default: () => findUpSync_default,
})
// lib/node_modules/find-up/index.js
var import_path = __toModule(require("path"))
// lib/node_modules/locate-path/index.js
var import_node_process = __toModule(require("process"))
var import_node_path = __toModule(require("path"))
var import_node_fs = __toModule(require("fs"))
// lib/node_modules/yocto-queue/index.js
var Node = class {
value
next
constructor(value) {
this.value = value
}
}
var Queue = class {
#head
#tail
#size
constructor() {
this.clear()
}
enqueue(value) {
const node = new Node(value)
if (this.#head) {
this.#tail.next = node
this.#tail = node
} else {
this.#head = node
this.#tail = node
}
this.#size++
}
dequeue() {
const current = this.#head
if (!current) {
return
}
this.#head = this.#head.next
this.#size--
return current.value
}
clear() {
this.#head = void 0
this.#tail = void 0
this.#size = 0
}
get size() {
return this.#size
}
*[Symbol.iterator]() {
let current = this.#head
while (current) {
yield current.value
current = current.next
}
}
}
// lib/node_modules/locate-path/index.js
var typeMappings = {
directory: "isDirectory",
file: "isFile",
}
function checkType(type) {
if (type in typeMappings) {
return
}
throw new Error(`Invalid type specified: ${type}`)
}
var matchType = (type, stat) => type === void 0 || stat[typeMappings[type]]()
function locatePathSync(
paths,
{
cwd = import_node_process.default.cwd(),
type = "file",
allowSymlinks = true,
} = {},
) {
checkType(type)
const statFunction = allowSymlinks
? import_node_fs.default.statSync
: import_node_fs.default.lstatSync
for (const path_ of paths) {
try {
const stat = statFunction(import_node_path.default.resolve(cwd, path_))
if (matchType(type, stat)) {
return path_
}
} catch {}
}
}
// lib/node_modules/path-exists/index.js
var import_node_fs2 = __toModule(require("fs"))
// lib/node_modules/find-up/index.js
var findUpStop = Symbol("findUpStop")
function findUpMultipleSync(name, options = {}) {
let directory = import_path.default.resolve(options.cwd || "")
const { root } = import_path.default.parse(directory)
const stopAt = options.stopAt || root
const limit = options.limit || Number.POSITIVE_INFINITY
const paths = [name].flat()
const runMatcher = (locateOptions) => {
if (typeof name !== "function") {
return locatePathSync(paths, locateOptions)
}
const foundPath = name(locateOptions.cwd)
if (typeof foundPath === "string") {
return locatePathSync([foundPath], locateOptions)
}
return foundPath
}
const matches = []
while (true) {
const foundPath = runMatcher({ ...options, cwd: directory })
if (foundPath === findUpStop) {
break
}
if (foundPath) {
matches.push(import_path.default.resolve(directory, foundPath))
}
if (directory === stopAt || matches.length >= limit) {
break
}
directory = import_path.default.dirname(directory)
}
return matches
}
function findUpSync(name, options = {}) {
const matches = findUpMultipleSync(name, { ...options, limit: 1 })
return matches[0]
}
// lib/findUpSync.mjs
var findUpSync_default = findUpSync
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {})
================================================
FILE: src/utils/prepareMarkdown.js
================================================
// @ts-nocheck
/**
* Generated via:
* esbuild lib/prepareMarkdown.mjs --bundle --outfile=src/utils/prepareMarkdown.js --format=cjs
*
* Do not edit directly!
*/
var __create = Object.create
var __defProp = Object.defineProperty
var __getOwnPropDesc = Object.getOwnPropertyDescriptor
var __getOwnPropNames = Object.getOwnPropertyNames
var __getProtoOf = Object.getPrototypeOf
var __hasOwnProp = Object.prototype.hasOwnProperty
var __markAsModule = (target) =>
__defProp(target, "__esModule", { value: true })
var __commonJS = (cb, mod) =>
function __require() {
return (
mod || (0, cb[Object.keys(cb)[0]])((mod = { exports: {} }).exports, mod),
mod.exports
)
}
var __export = (target, all2) => {
__markAsModule(target)
for (var name in all2)
__defProp(target, name, { get: all2[name], enumerable: true })
}
var __reExport = (target, module2, desc) => {
if (
(module2 && typeof module2 === "object") ||
typeof module2 === "function"
) {
for (let key of __getOwnPropNames(module2))
if (!__hasOwnProp.call(target, key) && key !== "default")
__defProp(target, key, {
get: () => module2[key],
enumerable:
!(desc = __getOwnPropDesc(module2, key)) || desc.enumerable,
})
}
return target
}
var __toModule = (module2) => {
return __reExport(
__markAsModule(
__defProp(
module2 != null ? __create(__getProtoOf(module2)) : {},
"default",
module2 && module2.__esModule && "default" in module2
? { get: () => module2.default, enumerable: true }
: { value: module2, enumerable: true },
),
),
module2,
)
}
// node_modules/format/format.js
var require_format = __commonJS({
"node_modules/format/format.js"(exports, module2) {
;(function () {
var namespace
if (typeof module2 !== "undefined") {
namespace = module2.exports = format
} else {
namespace = (function () {
return this || (1, eval)("this")
})()
}
namespace.format = format
namespace.vsprintf = vsprintf
if (typeof console !== "undefined" && typeof console.log === "function") {
namespace.printf = printf
}
function printf() {
console.log(format.apply(null, arguments))
}
function vsprintf(fmt, replacements) {
return format.apply(null, [fmt].concat(replacements))
}
function format(fmt) {
var argIndex = 1,
args = [].slice.call(arguments),
i = 0,
n = fmt.length,
result = "",
c,
escaped = false,
arg,
tmp,
leadingZero = false,
precision,
nextArg = function () {
return args[argIndex++]
},
slurpNumber = function () {
var digits = ""
while (/\d/.test(fmt[i])) {
digits += fmt[i++]
c = fmt[i]
}
return digits.length > 0 ? parseInt(digits) : null
}
for (; i < n; ++i) {
c = fmt[i]
if (escaped) {
escaped = false
if (c == ".") {
leadingZero = false
c = fmt[++i]
} else if (c == "0" && fmt[i + 1] == ".") {
leadingZero = true
i += 2
c = fmt[i]
} else {
leadingZero = true
}
precision = slurpNumber()
switch (c) {
case "b":
result += parseInt(nextArg(), 10).toString(2)
break
case "c":
arg = nextArg()
if (typeof arg === "string" || arg instanceof String)
result += arg
else result += String.fromCharCode(parseInt(arg, 10))
break
case "d":
result += parseInt(nextArg(), 10)
break
case "f":
tmp = String(parseFloat(nextArg()).toFixed(precision || 6))
result += leadingZero ? tmp : tmp.replace(/^0/, "")
break
case "j":
result += JSON.stringify(nextArg())
break
case "o":
result += "0" + parseInt(nextArg(), 10).toString(8)
break
case "s":
result += nextArg()
break
case "x":
result += "0x" + parseInt(nextArg(), 10).toString(16)
break
case "X":
result +=
"0x" + parseInt(nextArg(), 10).toString(16).toUpperCase()
break
default:
result += c
break
}
} else if (c === "%") {
escaped = true
} else {
result += c
}
}
return result
}
})()
},
})
// node_modules/is-buffer/index.js
var require_is_buffer = __commonJS({
"node_modules/is-buffer/index.js"(exports, module2) {
module2.exports = function isBuffer2(obj) {
return (
obj != null &&
obj.constructor != null &&
typeof obj.constructor.isBuffer === "function" &&
obj.constructor.isBuffer(obj)
)
}
},
})
// node_modules/extend/index.js
var require_extend = __commonJS({
"node_modules/extend/index.js"(exports, module2) {
"use strict"
var hasOwn = Object.prototype.hasOwnProperty
var toStr = Object.prototype.toString
var defineProperty = Object.defineProperty
var gOPD = Object.getOwnPropertyDescriptor
var isArray = function isArray2(arr) {
if (typeof Array.isArray === "function") {
return Array.isArray(arr)
}
return toStr.call(arr) === "[object Array]"
}
var isPlainObject2 = function isPlainObject3(obj) {
if (!obj || toStr.call(obj) !== "[object Object]") {
return false
}
var hasOwnConstructor = hasOwn.call(obj, "constructor")
var hasIsPrototypeOf =
obj.constructor &&
obj.constructor.prototype &&
hasOwn.call(obj.constructor.prototype, "isPrototypeOf")
if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {
return false
}
var key
for (key in obj) {
}
return typeof key === "undefined" || hasOwn.call(obj, key)
}
var setProperty = function setProperty2(target, options) {
if (defineProperty && options.name === "__proto__") {
defineProperty(target, options.name, {
enumerable: true,
configurable: true,
value: options.newValue,
writable: true,
})
} else {
target[options.name] = options.newValue
}
}
var getProperty = function getProperty2(obj, name) {
if (name === "__proto__") {
if (!hasOwn.call(obj, name)) {
return void 0
} else if (gOPD) {
return gOPD(obj, name).value
}
}
return obj[name]
}
module2.exports = function extend2() {
var options, name, src, copy, copyIsArray, clone
var target = arguments[0]
var i = 1
var length = arguments.length
var deep = false
if (typeof target === "boolean") {
deep = target
target = arguments[1] || {}
i = 2
}
if (
target == null ||
(typeof target !== "object" && typeof target !== "function")
) {
target = {}
}
for (; i < length; ++i) {
options = arguments[i]
if (options != null) {
for (name in options) {
src = getProperty(target, name)
copy = getProperty(options, name)
if (target !== copy) {
if (
deep &&
copy &&
(isPlainObject2(copy) || (copyIsArray = isArray(copy)))
) {
if (copyIsArray) {
copyIsArray = false
clone = src && isArray(src) ? src : []
} else {
clone = src && isPlainObject2(src) ? src : {}
}
setProperty(target, {
name,
newValue: extend2(deep, clone, copy),
})
} else if (typeof copy !== "undefined") {
setProperty(target, { name, newValue: copy })
}
}
}
}
}
return target
}
},
})
// lib/prepareMarkdown.mjs
__export(exports, {
default: () => prepareMarkdown_default,
})
// node_modules/annotatedtext/out/index.js
var defaults = {
children(node) {
return node.children
},
annotatetextnode(node, text3) {
if (node.type === "text") {
return {
offset: {
end: node.position.end.offset,
start: node.position.start.offset,
},
text: text3.substring(
node.position.start.offset,
node.position.end.offset,
),
}
} else {
return null
}
},
interpretmarkup(text3 = "") {
return text3
},
}
function collecttextnodes(ast, text3, options = defaults) {
const textannotations = []
function recurse(node) {
const annotation = options.annotatetextnode(node, text3)
if (annotation !== null) {
textannotations.push(annotation)
}
const children = options.children(node)
if (children !== null && Array.isArray(children)) {
children.forEach(recurse)
}
}
recurse(ast)
return textannotations
}
function composeannotation(text3, annotatedtextnodes, options = defaults) {
const annotations = []
let prior = {
offset: {
end: 0,
start: 0,
},
}
for (const current of annotatedtextnodes) {
const currenttext = text3.substring(prior.offset.end, current.offset.start)
annotations.push({
interpretAs: options.interpretmarkup(currenttext),
markup: currenttext,
offset: {
end: current.offset.start,
start: prior.offset.end,
},
})
annotations.push(current)
prior = current
}
const finaltext = text3.substring(prior.offset.end, text3.length)
annotations.push({
interpretAs: options.interpretmarkup(finaltext),
markup: finaltext,
offset: {
end: text3.length,
start: prior.offset.end,
},
})
return { annotation: annotations }
}
function build(text3, parse3, options = defaults) {
const nodes = parse3(text3)
const textnodes = collecttextnodes(nodes, text3, options)
return composeannotation(text3, textnodes, options)
}
// node_modules/fault/index.js
var import_format = __toModule(require_format())
var fault = Object.assign(create(Error), {
eval: create(EvalError),
range: create(RangeError),
reference: create(ReferenceError),
syntax: create(SyntaxError),
type: create(TypeError),
uri: create(URIError),
})
function create(Constructor) {
FormattedError.displayName = Constructor.displayName || Constructor.name
return FormattedError
function FormattedError(format, ...values) {
var reason = format ? (0, import_format.default)(format, ...values) : format
return new Constructor(reason)
}
}
// node_modules/micromark-extension-frontmatter/matters.js
var own = {}.hasOwnProperty
var markers = {
yaml: "-",
toml: "+",
}
function matters(options = "yaml") {
const results = []
let index2 = -1
if (!Array.isArray(options)) {
options = [options]
}
while (++index2 < options.length) {
results[index2] = matter(options[index2])
}
return results
}
function matter(option) {
let result = option
if (typeof result === "string") {
if (!own.call(markers, result)) {
throw fault("Missing matter definition for `%s`", result)
}
result = {
type: result,
marker: markers[result],
}
} else if (typeof result !== "object") {
throw fault("Expected matter to be an object, not `%j`", result)
}
if (!own.call(result, "type")) {
throw fault("Missing `type` in matter `%j`", result)
}
if (!own.call(result, "fence") && !own.call(result, "marker")) {
throw fault("Missing `marker` or `fence` in matter `%j`", result)
}
return result
}
// node_modules/micromark-util-character/lib/unicode-punctuation-regex.js
var unicodePunctuationRegex =
/[!-/:-@[-`{-~\u00A1\u00A7\u00AB\u00B6\u00B7\u00BB\u00BF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C77\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4F\u2E52\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]/
// node_modules/micromark-util-character/index.js
var asciiAlpha = regexCheck(/[A-Za-z]/)
var asciiDigit = regexCheck(/\d/)
var asciiHexDigit = regexCheck(/[\dA-Fa-f]/)
var asciiAlphanumeric = regexCheck(/[\dA-Za-z]/)
var asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/)
var asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/)
function asciiControl(code) {
return code !== null && (code < 32 || code === 127)
}
function markdownLineEndingOrSpace(code) {
return code !== null && (code < 0 || code === 32)
}
function markdownLineEnding(code) {
return code !== null && code < -2
}
function markdownSpace(code) {
return code === -2 || code === -1 || code === 32
}
var unicodeWhitespace = regexCheck(/\s/)
var unicodePunctuation = regexCheck(unicodePunctuationRegex)
function regexCheck(regex) {
return check
function check(code) {
return code !== null && regex.test(String.fromCharCode(code))
}
}
// node_modules/micromark-extension-frontmatter/lib/syntax.js
function frontmatter(options) {
const settings = matters(options)
const flow3 = {}
let index2 = -1
let matter2
let code
while (++index2 < settings.length) {
matter2 = settings[index2]
code = fence(matter2, "open").charCodeAt(0)
if (code in flow3) {
flow3[code].push(parse(matter2))
} else {
flow3[code] = [parse(matter2)]
}
}
return {
flow: flow3,
}
}
function parse(matter2) {
const name = matter2.type
const anywhere = matter2.anywhere
const valueType = name + "Value"
const fenceType = name + "Fence"
const sequenceType = fenceType + "Sequence"
const fenceConstruct = {
tokenize: tokenizeFence,
partial: true,
}
let buffer2
return {
tokenize: tokenizeFrontmatter,
concrete: true,
}
function tokenizeFrontmatter(effects, ok, nok) {
const self = this
return start
function start(code) {
const position2 = self.now()
if (position2.column !== 1 || (!anywhere && position2.line !== 1)) {
return nok(code)
}
effects.enter(name)
buffer2 = fence(matter2, "open")
return effects.attempt(fenceConstruct, afterOpeningFence, nok)(code)
}
function afterOpeningFence(code) {
buffer2 = fence(matter2, "close")
return lineEnd(code)
}
function lineStart(code) {
if (code === null || markdownLineEnding(code)) {
return lineEnd(code)
}
effects.enter(valueType)
return lineData(code)
}
function lineData(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit(valueType)
return lineEnd(code)
}
effects.consume(code)
return lineData
}
function lineEnd(code) {
if (code === null) {
return nok(code)
}
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
return effects.attempt(fenceConstruct, after, lineStart)
}
function after(code) {
effects.exit(name)
return ok(code)
}
}
function tokenizeFence(effects, ok, nok) {
let bufferIndex = 0
return start
function start(code) {
if (code === buffer2.charCodeAt(bufferIndex)) {
effects.enter(fenceType)
effects.enter(sequenceType)
return insideSequence(code)
}
return nok(code)
}
function insideSequence(code) {
if (bufferIndex === buffer2.length) {
effects.exit(sequenceType)
if (markdownSpace(code)) {
effects.enter("whitespace")
return insideWhitespace(code)
}
return fenceEnd(code)
}
if (code === buffer2.charCodeAt(bufferIndex++)) {
effects.consume(code)
return insideSequence
}
return nok(code)
}
function insideWhitespace(code) {
if (markdownSpace(code)) {
effects.consume(code)
return insideWhitespace
}
effects.exit("whitespace")
return fenceEnd(code)
}
function fenceEnd(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit(fenceType)
return ok(code)
}
return nok(code)
}
}
}
function fence(matter2, prop) {
return matter2.marker
? pick(matter2.marker, prop).repeat(3)
: pick(matter2.fence, prop)
}
function pick(schema, prop) {
return typeof schema === "string" ? schema : schema[prop]
}
// node_modules/mdast-util-frontmatter/index.js
function frontmatterFromMarkdown(options) {
const settings = matters(options)
const enter = {}
const exit2 = {}
let index2 = -1
while (++index2 < settings.length) {
const matter2 = settings[index2]
enter[matter2.type] = opener(matter2)
exit2[matter2.type] = close
exit2[matter2.type + "Value"] = value
}
return { enter, exit: exit2 }
}
function opener(matter2) {
return open
function open(token) {
this.enter({ type: matter2.type, value: "" }, token)
this.buffer()
}
}
function close(token) {
const data = this.resume()
this.exit(token).value = data.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, "")
}
function value(token) {
this.config.enter.data.call(this, token)
this.config.exit.data.call(this, token)
}
function frontmatterToMarkdown(options) {
const unsafe = []
const handlers = {}
const settings = matters(options)
let index2 = -1
while (++index2 < settings.length) {
const matter2 = settings[index2]
handlers[matter2.type] = handler(matter2)
unsafe.push({ atBreak: true, character: fence2(matter2, "open").charAt(0) })
}
return { unsafe, handlers }
}
function handler(matter2) {
const open = fence2(matter2, "open")
const close2 = fence2(matter2, "close")
return handle
function handle(node) {
return open + (node.value ? "\n" + node.value : "") + "\n" + close2
}
}
function fence2(matter2, prop) {
return matter2.marker
? pick2(matter2.marker, prop).repeat(3)
: pick2(matter2.fence, prop)
}
function pick2(schema, prop) {
return typeof schema === "string" ? schema : schema[prop]
}
// node_modules/remark-frontmatter/index.js
function remarkFrontmatter(options = "yaml") {
const data = this.data()
add("micromarkExtensions", frontmatter(options))
add("fromMarkdownExtensions", frontmatterFromMarkdown(options))
add("toMarkdownExtensions", frontmatterToMarkdown(options))
function add(field, value2) {
const list2 = data[field] ? data[field] : (data[field] = [])
list2.push(value2)
}
}
// node_modules/mdast-util-to-string/index.js
function toString(node, options) {
var { includeImageAlt = true } = options || {}
return one(node, includeImageAlt)
}
function one(node, includeImageAlt) {
return (
(node &&
typeof node === "object" &&
(node.value ||
(includeImageAlt ? node.alt : "") ||
("children" in node && all(node.children, includeImageAlt)) ||
(Array.isArray(node) && all(node, includeImageAlt)))) ||
""
)
}
function all(values, includeImageAlt) {
var result = []
var index2 = -1
while (++index2 < values.length) {
result[index2] = one(values[index2], includeImageAlt)
}
return result.join("")
}
// node_modules/micromark-util-chunked/index.js
function splice(list2, start, remove, items) {
const end = list2.length
let chunkStart = 0
let parameters
if (start < 0) {
start = -start > end ? 0 : end + start
} else {
start = start > end ? end : start
}
remove = remove > 0 ? remove : 0
if (items.length < 1e4) {
parameters = Array.from(items)
parameters.unshift(start, remove)
;[].splice.apply(list2, parameters)
} else {
if (remove) [].splice.apply(list2, [start, remove])
while (chunkStart < items.length) {
parameters = items.slice(chunkStart, chunkStart + 1e4)
parameters.unshift(start, 0)
;[].splice.apply(list2, parameters)
chunkStart += 1e4
start += 1e4
}
}
}
function push(list2, items) {
if (list2.length > 0) {
splice(list2, list2.length, 0, items)
return list2
}
return items
}
// node_modules/micromark-util-combine-extensions/index.js
var hasOwnProperty = {}.hasOwnProperty
function combineExtensions(extensions) {
const all2 = {}
let index2 = -1
while (++index2 < extensions.length) {
syntaxExtension(all2, extensions[index2])
}
return all2
}
function syntaxExtension(all2, extension2) {
let hook
for (hook in extension2) {
const maybe = hasOwnProperty.call(all2, hook) ? all2[hook] : void 0
const left = maybe || (all2[hook] = {})
const right = extension2[hook]
let code
for (code in right) {
if (!hasOwnProperty.call(left, code)) left[code] = []
const value2 = right[code]
constructs(
left[code],
Array.isArray(value2) ? value2 : value2 ? [value2] : [],
)
}
}
}
function constructs(existing, list2) {
let index2 = -1
const before = []
while (++index2 < list2.length) {
;(list2[index2].add === "after" ? existing : before).push(list2[index2])
}
splice(existing, 0, 0, before)
}
// node_modules/micromark-factory-space/index.js
function factorySpace(effects, ok, type, max) {
const limit = max ? max - 1 : Number.POSITIVE_INFINITY
let size = 0
return start
function start(code) {
if (markdownSpace(code)) {
effects.enter(type)
return prefix(code)
}
return ok(code)
}
function prefix(code) {
if (markdownSpace(code) && size++ < limit) {
effects.consume(code)
return prefix
}
effects.exit(type)
return ok(code)
}
}
// node_modules/micromark/lib/initialize/content.js
var content = {
tokenize: initializeContent,
}
function initializeContent(effects) {
const contentStart = effects.attempt(
this.parser.constructs.contentInitial,
afterContentStartConstruct,
paragraphInitial,
)
let previous2
return contentStart
function afterContentStartConstruct(code) {
if (code === null) {
effects.consume(code)
return
}
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
return factorySpace(effects, contentStart, "linePrefix")
}
function paragraphInitial(code) {
effects.enter("paragraph")
return lineStart(code)
}
function lineStart(code) {
const token = effects.enter("chunkText", {
contentType: "text",
previous: previous2,
})
if (previous2) {
previous2.next = token
}
previous2 = token
return data(code)
}
function data(code) {
if (code === null) {
effects.exit("chunkText")
effects.exit("paragraph")
effects.consume(code)
return
}
if (markdownLineEnding(code)) {
effects.consume(code)
effects.exit("chunkText")
return lineStart
}
effects.consume(code)
return data
}
}
// node_modules/micromark/lib/initialize/document.js
var document2 = {
tokenize: initializeDocument,
}
var containerConstruct = {
tokenize: tokenizeContainer,
}
function initializeDocument(effects) {
const self = this
const stack = []
let continued = 0
let childFlow
let childToken
let lineStartOffset
return start
function start(code) {
if (continued < stack.length) {
const item = stack[continued]
self.containerState = item[1]
return effects.attempt(
item[0].continuation,
documentContinue,
checkNewContainers,
)(code)
}
return checkNewContainers(code)
}
function documentContinue(code) {
continued++
if (self.containerState._closeFlow) {
self.containerState._closeFlow = void 0
if (childFlow) {
closeFlow()
}
const indexBeforeExits = self.events.length
let indexBeforeFlow = indexBeforeExits
let point2
while (indexBeforeFlow--) {
if (
self.events[indexBeforeFlow][0] === "exit" &&
self.events[indexBeforeFlow][1].type === "chunkFlow"
) {
point2 = self.events[indexBeforeFlow][1].end
break
}
}
exitContainers(continued)
let index2 = indexBeforeExits
while (index2 < self.events.length) {
self.events[index2][1].end = Object.assign({}, point2)
index2++
}
splice(
self.events,
indexBeforeFlow + 1,
0,
self.events.slice(indexBeforeExits),
)
self.events.length = index2
return checkNewContainers(code)
}
return start(code)
}
function checkNewContainers(code) {
if (continued === stack.length) {
if (!childFlow) {
return documentContinued(code)
}
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {
return flowStart(code)
}
self.interrupt = Boolean(childFlow.currentConstruct)
}
self.containerState = {}
return effects.check(
containerConstruct,
thereIsANewContainer,
thereIsNoNewContainer,
)(code)
}
function thereIsANewContainer(code) {
if (childFlow) closeFlow()
exitContainers(continued)
return documentContinued(code)
}
function thereIsNoNewContainer(code) {
self.parser.lazy[self.now().line] = continued !== stack.length
lineStartOffset = self.now().offset
return flowStart(code)
}
function documentContinued(code) {
self.containerState = {}
return effects.attempt(
containerConstruct,
containerContinue,
flowStart,
)(code)
}
function containerContinue(code) {
continued++
stack.push([self.currentConstruct, self.containerState])
return documentContinued(code)
}
function flowStart(code) {
if (code === null) {
if (childFlow) closeFlow()
exitContainers(0)
effects.consume(code)
return
}
childFlow = childFlow || self.parser.flow(self.now())
effects.enter("chunkFlow", {
contentType: "flow",
previous: childToken,
_tokenizer: childFlow,
})
return flowContinue(code)
}
function flowContinue(code) {
if (code === null) {
writeToChild(effects.exit("chunkFlow"), true)
exitContainers(0)
effects.consume(code)
return
}
if (markdownLineEnding(code)) {
effects.consume(code)
writeToChild(effects.exit("chunkFlow"))
continued = 0
self.interrupt = void 0
return start
}
effects.consume(code)
return flowContinue
}
function writeToChild(token, eof) {
const stream = self.sliceStream(token)
if (eof) stream.push(null)
token.previous = childToken
if (childToken) childToken.next = token
childToken = token
childFlow.defineSkip(token.start)
childFlow.write(stream)
if (self.parser.lazy[token.start.line]) {
let index2 = childFlow.events.length
while (index2--) {
if (
childFlow.events[index2][1].start.offset < lineStartOffset &&
(!childFlow.events[index2][1].end ||
childFlow.events[index2][1].end.offset > lineStartOffset)
) {
return
}
}
const indexBeforeExits = self.events.length
let indexBeforeFlow = indexBeforeExits
let seen
let point2
while (indexBeforeFlow--) {
if (
self.events[indexBeforeFlow][0] === "exit" &&
self.events[indexBeforeFlow][1].type === "chunkFlow"
) {
if (seen) {
point2 = self.events[indexBeforeFlow][1].end
break
}
seen = true
}
}
exitContainers(continued)
index2 = indexBeforeExits
while (index2 < self.events.length) {
self.events[index2][1].end = Object.assign({}, point2)
index2++
}
splice(
self.events,
indexBeforeFlow + 1,
0,
self.events.slice(indexBeforeExits),
)
self.events.length = index2
}
}
function exitContainers(size) {
let index2 = stack.length
while (index2-- > size) {
const entry = stack[index2]
self.containerState = entry[1]
entry[0].exit.call(self, effects)
}
stack.length = size
}
function closeFlow() {
childFlow.write([null])
childToken = void 0
childFlow = void 0
self.containerState._closeFlow = void 0
}
}
function tokenizeContainer(effects, ok, nok) {
return factorySpace(
effects,
effects.attempt(this.parser.constructs.document, ok, nok),
"linePrefix",
this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4,
)
}
// node_modules/micromark-util-classify-character/index.js
function classifyCharacter(code) {
if (
code === null ||
markdownLineEndingOrSpace(code) ||
unicodeWhitespace(code)
) {
return 1
}
if (unicodePunctuation(code)) {
return 2
}
}
// node_modules/micromark-util-resolve-all/index.js
function resolveAll(constructs2, events, context) {
const called = []
let index2 = -1
while (++index2 < constructs2.length) {
const resolve = constructs2[index2].resolveAll
if (resolve && !called.includes(resolve)) {
events = resolve(events, context)
called.push(resolve)
}
}
return events
}
// node_modules/micromark-core-commonmark/lib/attention.js
var attention = {
name: "attention",
tokenize: tokenizeAttention,
resolveAll: resolveAllAttention,
}
function resolveAllAttention(events, context) {
let index2 = -1
let open
let group
let text3
let openingSequence
let closingSequence
let use
let nextEvents
let offset
while (++index2 < events.length) {
if (
events[index2][0] === "enter" &&
events[index2][1].type === "attentionSequence" &&
events[index2][1]._close
) {
open = index2
while (open--) {
if (
events[open][0] === "exit" &&
events[open][1].type === "attentionSequence" &&
events[open][1]._open &&
context.sliceSerialize(events[open][1]).charCodeAt(0) ===
context.sliceSerialize(events[index2][1]).charCodeAt(0)
) {
if (
(events[open][1]._close || events[index2][1]._open) &&
(events[index2][1].end.offset - events[index2][1].start.offset) %
3 &&
!(
(events[open][1].end.offset -
events[open][1].start.offset +
events[index2][1].end.offset -
events[index2][1].start.offset) %
3
)
) {
continue
}
use =
events[open][1].end.offset - events[open][1].start.offset > 1 &&
events[index2][1].end.offset - events[index2][1].start.offset > 1
? 2
: 1
const start = Object.assign({}, events[open][1].end)
const end = Object.assign({}, events[index2][1].start)
movePoint(start, -use)
movePoint(end, use)
openingSequence = {
type: use > 1 ? "strongSequence" : "emphasisSequence",
start,
end: Object.assign({}, events[open][1].end),
}
closingSequence = {
type: use > 1 ? "strongSequence" : "emphasisSequence",
start: Object.assign({}, events[index2][1].start),
end,
}
text3 = {
type: use > 1 ? "strongText" : "emphasisText",
start: Object.assign({}, events[open][1].end),
end: Object.assign({}, events[index2][1].start),
}
group = {
type: use > 1 ? "strong" : "emphasis",
start: Object.assign({}, openingSequence.start),
end: Object.assign({}, closingSequence.end),
}
events[open][1].end = Object.assign({}, openingSequence.start)
events[index2][1].start = Object.assign({}, closingSequence.end)
nextEvents = []
if (events[open][1].end.offset - events[open][1].start.offset) {
nextEvents = push(nextEvents, [
["enter", events[open][1], context],
["exit", events[open][1], context],
])
}
nextEvents = push(nextEvents, [
["enter", group, context],
["enter", openingSequence, context],
["exit", openingSequence, context],
["enter", text3, context],
])
nextEvents = push(
nextEvents,
resolveAll(
context.parser.constructs.insideSpan.null,
events.slice(open + 1, index2),
context,
),
)
nextEvents = push(nextEvents, [
["exit", text3, context],
["enter", closingSequence, context],
["exit", closingSequence, context],
["exit", group, context],
])
if (events[index2][1].end.offset - events[index2][1].start.offset) {
offset = 2
nextEvents = push(nextEvents, [
["enter", events[index2][1], context],
["exit", events[index2][1], context],
])
} else {
offset = 0
}
splice(events, open - 1, index2 - open + 3, nextEvents)
index2 = open + nextEvents.length - offset - 2
break
}
}
}
}
index2 = -1
while (++index2 < events.length) {
if (events[index2][1].type === "attentionSequence") {
events[index2][1].type = "data"
}
}
return events
}
function tokenizeAttention(effects, ok) {
const attentionMarkers2 = this.parser.constructs.attentionMarkers.null
const previous2 = this.previous
const before = classifyCharacter(previous2)
let marker
return start
function start(code) {
effects.enter("attentionSequence")
marker = code
return sequence(code)
}
function sequence(code) {
if (code === marker) {
effects.consume(code)
return sequence
}
const token = effects.exit("attentionSequence")
const after = classifyCharacter(code)
const open =
!after || (after === 2 && before) || attentionMarkers2.includes(code)
const close2 =
!before ||
(before === 2 && after) ||
attentionMarkers2.includes(previous2)
token._open = Boolean(marker === 42 ? open : open && (before || !close2))
token._close = Boolean(marker === 42 ? close2 : close2 && (after || !open))
return ok(code)
}
}
function movePoint(point2, offset) {
point2.column += offset
point2.offset += offset
point2._bufferIndex += offset
}
// node_modules/micromark-core-commonmark/lib/autolink.js
var autolink = {
name: "autolink",
tokenize: tokenizeAutolink,
}
function tokenizeAutolink(effects, ok, nok) {
let size = 1
return start
function start(code) {
effects.enter("autolink")
effects.enter("autolinkMarker")
effects.consume(code)
effects.exit("autolinkMarker")
effects.enter("autolinkProtocol")
return open
}
function open(code) {
if (asciiAlpha(code)) {
effects.consume(code)
return schemeOrEmailAtext
}
return asciiAtext(code) ? emailAtext(code) : nok(code)
}
function schemeOrEmailAtext(code) {
return code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)
? schemeInsideOrEmailAtext(code)
: emailAtext(code)
}
function schemeInsideOrEmailAtext(code) {
if (code === 58) {
effects.consume(code)
return urlInside
}
if (
(code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) &&
size++ < 32
) {
effects.consume(code)
return schemeInsideOrEmailAtext
}
return emailAtext(code)
}
function urlInside(code) {
if (code === 62) {
effects.exit("autolinkProtocol")
return end(code)
}
if (code === null || code === 32 || code === 60 || asciiControl(code)) {
return nok(code)
}
effects.consume(code)
return urlInside
}
function emailAtext(code) {
if (code === 64) {
effects.consume(code)
size = 0
return emailAtSignOrDot
}
if (asciiAtext(code)) {
effects.consume(code)
return emailAtext
}
return nok(code)
}
function emailAtSignOrDot(code) {
return asciiAlphanumeric(code) ? emailLabel(code) : nok(code)
}
function emailLabel(code) {
if (code === 46) {
effects.consume(code)
size = 0
return emailAtSignOrDot
}
if (code === 62) {
effects.exit("autolinkProtocol").type = "autolinkEmail"
return end(code)
}
return emailValue(code)
}
function emailValue(code) {
if ((code === 45 || asciiAlphanumeric(code)) && size++ < 63) {
effects.consume(code)
return code === 45 ? emailValue : emailLabel
}
return nok(code)
}
function end(code) {
effects.enter("autolinkMarker")
effects.consume(code)
effects.exit("autolinkMarker")
effects.exit("autolink")
return ok
}
}
// node_modules/micromark-core-commonmark/lib/blank-line.js
var blankLine = {
tokenize: tokenizeBlankLine,
partial: true,
}
function tokenizeBlankLine(effects, ok, nok) {
return factorySpace(effects, afterWhitespace, "linePrefix")
function afterWhitespace(code) {
return code === null || markdownLineEnding(code) ? ok(code) : nok(code)
}
}
// node_modules/micromark-core-commonmark/lib/block-quote.js
var blockQuote = {
name: "blockQuote",
tokenize: tokenizeBlockQuoteStart,
continuation: {
tokenize: tokenizeBlockQuoteContinuation,
},
exit,
}
function tokenizeBlockQuoteStart(effects, ok, nok) {
const self = this
return start
function start(code) {
if (code === 62) {
const state = self.containerState
if (!state.open) {
effects.enter("blockQuote", {
_container: true,
})
state.open = true
}
effects.enter("blockQuotePrefix")
effects.enter("blockQuoteMarker")
effects.consume(code)
effects.exit("blockQuoteMarker")
return after
}
return nok(code)
}
function after(code) {
if (markdownSpace(code)) {
effects.enter("blockQuotePrefixWhitespace")
effects.consume(code)
effects.exit("blockQuotePrefixWhitespace")
effects.exit("blockQuotePrefix")
return ok
}
effects.exit("blockQuotePrefix")
return ok(code)
}
}
function tokenizeBlockQuoteContinuation(effects, ok, nok) {
return factorySpace(
effects,
effects.attempt(blockQuote, ok, nok),
"linePrefix",
this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4,
)
}
function exit(effects) {
effects.exit("blockQuote")
}
// node_modules/micromark-core-commonmark/lib/character-escape.js
var characterEscape = {
name: "characterEscape",
tokenize: tokenizeCharacterEscape,
}
function tokenizeCharacterEscape(effects, ok, nok) {
return start
function start(code) {
effects.enter("characterEscape")
effects.enter("escapeMarker")
effects.consume(code)
effects.exit("escapeMarker")
return open
}
function open(code) {
if (asciiPunctuation(code)) {
effects.enter("characterEscapeValue")
effects.consume(code)
effects.exit("characterEscapeValue")
effects.exit("characterEscape")
return ok
}
return nok(code)
}
}
// node_modules/parse-entities/decode-entity.browser.js
var semicolon = 59
var element
function decodeEntity(characters) {
var entity = "&" + characters + ";"
var char
element = element || document.createElement("i")
element.innerHTML = entity
char = element.textContent
if (char.charCodeAt(char.length - 1) === semicolon && characters !== "semi") {
return false
}
return char === entity ? false : char
}
// node_modules/micromark-core-commonmark/lib/character-reference.js
var characterReference = {
name: "characterReference",
tokenize: tokenizeCharacterReference,
}
function tokenizeCharacterReference(effects, ok, nok) {
const self = this
let size = 0
let max
let test
return start
function start(code) {
effects.enter("characterReference")
effects.enter("characterReferenceMarker")
effects.consume(code)
effects.exit("characterReferenceMarker")
return open
}
function open(code) {
if (code === 35) {
effects.enter("characterReferenceMarkerNumeric")
effects.consume(code)
effects.exit("characterReferenceMarkerNumeric")
return numeric
}
effects.enter("characterReferenceValue")
max = 31
test = asciiAlphanumeric
return value2(code)
}
function numeric(code) {
if (code === 88 || code === 120) {
effects.enter("characterReferenceMarkerHexadecimal")
effects.consume(code)
effects.exit("characterReferenceMarkerHexadecimal")
effects.enter("characterReferenceValue")
max = 6
test = asciiHexDigit
return value2
}
effects.enter("characterReferenceValue")
max = 7
test = asciiDigit
return value2(code)
}
function value2(code) {
let token
if (code === 59 && size) {
token = effects.exit("characterReferenceValue")
if (
test === asciiAlphanumeric &&
!decodeEntity(self.sliceSerialize(token))
) {
return nok(code)
}
effects.enter("characterReferenceMarker")
effects.consume(code)
effects.exit("characterReferenceMarker")
effects.exit("characterReference")
return ok
}
if (test(code) && size++ < max) {
effects.consume(code)
return value2
}
return nok(code)
}
}
// node_modules/micromark-core-commonmark/lib/code-fenced.js
var codeFenced = {
name: "codeFenced",
tokenize: tokenizeCodeFenced,
concrete: true,
}
function tokenizeCodeFenced(effects, ok, nok) {
const self = this
const closingFenceConstruct = {
tokenize: tokenizeClosingFence,
partial: true,
}
const nonLazyLine = {
tokenize: tokenizeNonLazyLine,
partial: true,
}
const tail = this.events[this.events.length - 1]
const initialPrefix =
tail && tail[1].type === "linePrefix"
? tail[2].sliceSerialize(tail[1], true).length
: 0
let sizeOpen = 0
let marker
return start
function start(code) {
effects.enter("codeFenced")
effects.enter("codeFencedFence")
effects.enter("codeFencedFenceSequence")
marker = code
return sequenceOpen(code)
}
function sequenceOpen(code) {
if (code === marker) {
effects.consume(code)
sizeOpen++
return sequenceOpen
}
effects.exit("codeFencedFenceSequence")
return sizeOpen < 3
? nok(code)
: factorySpace(effects, infoOpen, "whitespace")(code)
}
function infoOpen(code) {
if (code === null || markdownLineEnding(code)) {
return openAfter(code)
}
effects.enter("codeFencedFenceInfo")
effects.enter("chunkString", {
contentType: "string",
})
return info(code)
}
function info(code) {
if (code === null || markdownLineEndingOrSpace(code)) {
effects.exit("chunkString")
effects.exit("codeFencedFenceInfo")
return factorySpace(effects, infoAfter, "whitespace")(code)
}
if (code === 96 && code === marker) return nok(code)
effects.consume(code)
return info
}
function infoAfter(code) {
if (code === null || markdownLineEnding(code)) {
return openAfter(code)
}
effects.enter("codeFencedFenceMeta")
effects.enter("chunkString", {
contentType: "string",
})
return meta(code)
}
function meta(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("chunkString")
effects.exit("codeFencedFenceMeta")
return openAfter(code)
}
if (code === 96 && code === marker) return nok(code)
effects.consume(code)
return meta
}
function openAfter(code) {
effects.exit("codeFencedFence")
return self.interrupt ? ok(code) : contentStart(code)
}
function contentStart(code) {
if (code === null) {
return after(code)
}
if (markdownLineEnding(code)) {
return effects.attempt(
nonLazyLine,
effects.attempt(
closingFenceConstruct,
after,
initialPrefix
? factorySpace(
effects,
contentStart,
"linePrefix",
initialPrefix + 1,
)
: contentStart,
),
after,
)(code)
}
effects.enter("codeFlowValue")
return contentContinue(code)
}
function contentContinue(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("codeFlowValue")
return contentStart(code)
}
effects.consume(code)
return contentContinue
}
function after(code) {
effects.exit("codeFenced")
return ok(code)
}
function tokenizeNonLazyLine(effects2, ok2, nok2) {
const self2 = this
return start2
function start2(code) {
effects2.enter("lineEnding")
effects2.consume(code)
effects2.exit("lineEnding")
return lineStart
}
function lineStart(code) {
return self2.parser.lazy[self2.now().line] ? nok2(code) : ok2(code)
}
}
function tokenizeClosingFence(effects2, ok2, nok2) {
let size = 0
return factorySpace(
effects2,
closingSequenceStart,
"linePrefix",
this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4,
)
function closingSequenceStart(code) {
effects2.enter("codeFencedFence")
effects2.enter("codeFencedFenceSequence")
return closingSequence(code)
}
function closingSequence(code) {
if (code === marker) {
effects2.consume(code)
size++
return closingSequence
}
if (size < sizeOpen) return nok2(code)
effects2.exit("codeFencedFenceSequence")
return factorySpace(effects2, closingSequenceEnd, "whitespace")(code)
}
function closingSequenceEnd(code) {
if (code === null || markdownLineEnding(code)) {
effects2.exit("codeFencedFence")
return ok2(code)
}
return nok2(code)
}
}
}
// node_modules/micromark-core-commonmark/lib/code-indented.js
var codeIndented = {
name: "codeIndented",
tokenize: tokenizeCodeIndented,
}
var indentedContent = {
tokenize: tokenizeIndentedContent,
partial: true,
}
function tokenizeCodeIndented(effects, ok, nok) {
const self = this
return start
function start(code) {
effects.enter("codeIndented")
return factorySpace(effects, afterStartPrefix, "linePrefix", 4 + 1)(code)
}
function afterStartPrefix(code) {
const tail = self.events[self.events.length - 1]
return tail &&
tail[1].type === "linePrefix" &&
tail[2].sliceSerialize(tail[1], true).length >= 4
? afterPrefix(code)
: nok(code)
}
function afterPrefix(code) {
if (code === null) {
return after(code)
}
if (markdownLineEnding(code)) {
return effects.attempt(indentedContent, afterPrefix, after)(code)
}
effects.enter("codeFlowValue")
return content3(code)
}
function content3(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("codeFlowValue")
return afterPrefix(code)
}
effects.consume(code)
return content3
}
function after(code) {
effects.exit("codeIndented")
return ok(code)
}
}
function tokenizeIndentedContent(effects, ok, nok) {
const self = this
return start
function start(code) {
if (self.parser.lazy[self.now().line]) {
return nok(code)
}
if (markdownLineEnding(code)) {
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
return start
}
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code)
}
function afterPrefix(code) {
const tail = self.events[self.events.length - 1]
return tail &&
tail[1].type === "linePrefix" &&
tail[2].sliceSerialize(tail[1], true).length >= 4
? ok(code)
: markdownLineEnding(code)
? start(code)
: nok(code)
}
}
// node_modules/micromark-core-commonmark/lib/code-text.js
var codeText = {
name: "codeText",
tokenize: tokenizeCodeText,
resolve: resolveCodeText,
previous,
}
function resolveCodeText(events) {
let tailExitIndex = events.length - 4
let headEnterIndex = 3
let index2
let enter
if (
(events[headEnterIndex][1].type === "lineEnding" ||
events[headEnterIndex][1].type === "space") &&
(events[tailExitIndex][1].type === "lineEnding" ||
events[tailExitIndex][1].type === "space")
) {
index2 = headEnterIndex
while (++index2 < tailExitIndex) {
if (events[index2][1].type === "codeTextData") {
events[headEnterIndex][1].type = "codeTextPadding"
events[tailExitIndex][1].type = "codeTextPadding"
headEnterIndex += 2
tailExitIndex -= 2
break
}
}
}
index2 = headEnterIndex - 1
tailExitIndex++
while (++index2 <= tailExitIndex) {
if (enter === void 0) {
if (index2 !== tailExitIndex && events[index2][1].type !== "lineEnding") {
enter = index2
}
} else if (
index2 === tailExitIndex ||
events[index2][1].type === "lineEnding"
) {
events[enter][1].type = "codeTextData"
if (index2 !== enter + 2) {
events[enter][1].end = events[index2 - 1][1].end
events.splice(enter + 2, index2 - enter - 2)
tailExitIndex -= index2 - enter - 2
index2 = enter + 2
}
enter = void 0
}
}
return events
}
function previous(code) {
return (
code !== 96 ||
this.events[this.events.length - 1][1].type === "characterEscape"
)
}
function tokenizeCodeText(effects, ok, nok) {
const self = this
let sizeOpen = 0
let size
let token
return start
function start(code) {
effects.enter("codeText")
effects.enter("codeTextSequence")
return openingSequence(code)
}
function openingSequence(code) {
if (code === 96) {
effects.consume(code)
sizeOpen++
return openingSequence
}
effects.exit("codeTextSequence")
return gap(code)
}
function gap(code) {
if (code === null) {
return nok(code)
}
if (code === 96) {
token = effects.enter("codeTextSequence")
size = 0
return closingSequence(code)
}
if (code === 32) {
effects.enter("space")
effects.consume(code)
effects.exit("space")
return gap
}
if (markdownLineEnding(code)) {
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
return gap
}
effects.enter("codeTextData")
return data(code)
}
function data(code) {
if (
code === null ||
code === 32 ||
code === 96 ||
markdownLineEnding(code)
) {
effects.exit("codeTextData")
return gap(code)
}
effects.consume(code)
return data
}
function closingSequence(code) {
if (code === 96) {
effects.consume(code)
size++
return closingSequence
}
if (size === sizeOpen) {
effects.exit("codeTextSequence")
effects.exit("codeText")
return ok(code)
}
token.type = "codeTextData"
return data(code)
}
}
// node_modules/micromark-util-subtokenize/index.js
function subtokenize(events) {
const jumps = {}
let index2 = -1
let event
let lineIndex
let otherIndex
let otherEvent
let parameters
let subevents
let more
while (++index2 < events.length) {
while (index2 in jumps) {
index2 = jumps[index2]
}
event = events[index2]
if (
index2 &&
event[1].type === "chunkFlow" &&
events[index2 - 1][1].type === "listItemPrefix"
) {
subevents = event[1]._tokenizer.events
otherIndex = 0
if (
otherIndex < subevents.length &&
subevents[otherIndex][1].type === "lineEndingBlank"
) {
otherIndex += 2
}
if (
otherIndex < subevents.length &&
subevents[otherIndex][1].type === "content"
) {
while (++otherIndex < subevents.length) {
if (subevents[otherIndex][1].type === "content") {
break
}
if (subevents[otherIndex][1].type === "chunkText") {
subevents[otherIndex][1]._isInFirstContentOfListItem = true
otherIndex++
}
}
}
}
if (event[0] === "enter") {
if (event[1].contentType) {
Object.assign(jumps, subcontent(events, index2))
index2 = jumps[index2]
more = true
}
} else if (event[1]._container) {
otherIndex = index2
lineIndex = void 0
while (otherIndex--) {
otherEvent = events[otherIndex]
if (
otherEvent[1].type === "lineEnding" ||
otherEvent[1].type === "lineEndingBlank"
) {
if (otherEvent[0] === "enter") {
if (lineIndex) {
events[lineIndex][1].type = "lineEndingBlank"
}
otherEvent[1].type = "lineEnding"
lineIndex = otherIndex
}
} else {
break
}
}
if (lineIndex) {
event[1].end = Object.assign({}, events[lineIndex][1].start)
parameters = events.slice(lineIndex, index2)
parameters.unshift(event)
splice(events, lineIndex, index2 - lineIndex + 1, parameters)
}
}
}
return !more
}
function subcontent(events, eventIndex) {
const token = events[eventIndex][1]
const context = events[eventIndex][2]
let startPosition = eventIndex - 1
const startPositions = []
const tokenizer =
token._tokenizer || context.parser[token.contentType](token.start)
const childEvents = tokenizer.events
const jumps = []
const gaps = {}
let stream
let previous2
let index2 = -1
let current = token
let adjust = 0
let start = 0
const breaks = [start]
while (current) {
while (events[++startPosition][1] !== current) {}
startPositions.push(startPosition)
if (!current._tokenizer) {
stream = context.sliceStream(current)
if (!current.next) {
stream.push(null)
}
if (previous2) {
tokenizer.defineSkip(current.start)
}
if (current._isInFirstContentOfListItem) {
tokenizer._gfmTasklistFirstContentOfListItem = true
}
tokenizer.write(stream)
if (current._isInFirstContentOfListItem) {
tokenizer._gfmTasklistFirstContentOfListItem = void 0
}
}
previous2 = current
current = current.next
}
current = token
while (++index2 < childEvents.length) {
if (
childEvents[index2][0] === "exit" &&
childEvents[index2 - 1][0] === "enter" &&
childEvents[index2][1].type === childEvents[index2 - 1][1].type &&
childEvents[index2][1].start.line !== childEvents[index2][1].end.line
) {
start = index2 + 1
breaks.push(start)
current._tokenizer = void 0
current.previous = void 0
current = current.next
}
}
tokenizer.events = []
if (current) {
current._tokenizer = void 0
current.previous = void 0
} else {
breaks.pop()
}
index2 = breaks.length
while (index2--) {
const slice = childEvents.slice(breaks[index2], breaks[index2 + 1])
const start2 = startPositions.pop()
jumps.unshift([start2, start2 + slice.length - 1])
splice(events, start2, 2, slice)
}
index2 = -1
while (++index2 < jumps.length) {
gaps[adjust + jumps[index2][0]] = adjust + jumps[index2][1]
adjust += jumps[index2][1] - jumps[index2][0] - 1
}
return gaps
}
// node_modules/micromark-core-commonmark/lib/content.js
var content2 = {
tokenize: tokenizeContent,
resolve: resolveContent,
}
var continuationConstruct = {
tokenize: tokenizeContinuation,
partial: true,
}
function resolveContent(events) {
subtokenize(events)
return events
}
function tokenizeContent(effects, ok) {
let previous2
return start
function start(code) {
effects.enter("content")
previous2 = effects.enter("chunkContent", {
contentType: "content",
})
return data(code)
}
function data(code) {
if (code === null) {
return contentEnd(code)
}
if (markdownLineEnding(code)) {
return effects.check(
continuationConstruct,
contentContinue,
contentEnd,
)(code)
}
effects.consume(code)
return data
}
function contentEnd(code) {
effects.exit("chunkContent")
effects.exit("content")
return ok(code)
}
function contentContinue(code) {
effects.consume(code)
effects.exit("chunkContent")
previous2.next = effects.enter("chunkContent", {
contentType: "content",
previous: previous2,
})
previous2 = previous2.next
return data
}
}
function tokenizeContinuation(effects, ok, nok) {
const self = this
return startLookahead
function startLookahead(code) {
effects.exit("chunkContent")
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
return factorySpace(effects, prefixed, "linePrefix")
}
function prefixed(code) {
if (code === null || markdownLineEnding(code)) {
return nok(code)
}
const tail = self.events[self.events.length - 1]
if (
!self.parser.constructs.disable.null.includes("codeIndented") &&
tail &&
tail[1].type === "linePrefix" &&
tail[2].sliceSerialize(tail[1], true).length >= 4
) {
return ok(code)
}
return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)
}
}
// node_modules/micromark-factory-destination/index.js
function factoryDestination(
effects,
ok,
nok,
type,
literalType,
literalMarkerType,
rawType,
stringType,
max,
) {
const limit = max || Number.POSITIVE_INFINITY
let balance = 0
return start
function start(code) {
if (code === 60) {
effects.enter(type)
effects.enter(literalType)
effects.enter(literalMarkerType)
effects.consume(code)
effects.exit(literalMarkerType)
return destinationEnclosedBefore
}
if (code === null || code === 41 || asciiControl(code)) {
return nok(code)
}
effects.enter(type)
effects.enter(rawType)
effects.enter(stringType)
effects.enter("chunkString", {
contentType: "string",
})
return destinationRaw(code)
}
function destinationEnclosedBefore(code) {
if (code === 62) {
effects.enter(literalMarkerType)
effects.consume(code)
effects.exit(literalMarkerType)
effects.exit(literalType)
effects.exit(type)
return ok
}
effects.enter(stringType)
effects.enter("chunkString", {
contentType: "string",
})
return destinationEnclosed(code)
}
function destinationEnclosed(code) {
if (code === 62) {
effects.exit("chunkString")
effects.exit(stringType)
return destinationEnclosedBefore(code)
}
if (code === null || code === 60 || markdownLineEnding(code)) {
return nok(code)
}
effects.consume(code)
return code === 92 ? destinationEnclosedEscape : destinationEnclosed
}
function destinationEnclosedEscape(code) {
if (code === 60 || code === 62 || code === 92) {
effects.consume(code)
return destinationEnclosed
}
return destinationEnclosed(code)
}
function destinationRaw(code) {
if (code === 40) {
if (++balance > limit) return nok(code)
effects.consume(code)
return destinationRaw
}
if (code === 41) {
if (!balance--) {
effects.exit("chunkString")
effects.exit(stringType)
effects.exit(rawType)
effects.exit(type)
return ok(code)
}
effects.consume(code)
return destinationRaw
}
if (code === null || markdownLineEndingOrSpace(code)) {
if (balance) return nok(code)
effects.exit("chunkString")
effects.exit(stringType)
effects.exit(rawType)
effects.exit(type)
return ok(code)
}
if (asciiControl(code)) return nok(code)
effects.consume(code)
return code === 92 ? destinationRawEscape : destinationRaw
}
function destinationRawEscape(code) {
if (code === 40 || code === 41 || code === 92) {
effects.consume(code)
return destinationRaw
}
return destinationRaw(code)
}
}
// node_modules/micromark-factory-label/index.js
function factoryLabel(effects, ok, nok, type, markerType, stringType) {
const self = this
let size = 0
let data
return start
function start(code) {
effects.enter(type)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.enter(stringType)
return atBreak
}
function atBreak(code) {
if (
code === null ||
code === 91 ||
(code === 93 && !data) ||
(code === 94 &&
!size &&
"_hiddenFootnoteSupport" in self.parser.constructs) ||
size > 999
) {
return nok(code)
}
if (code === 93) {
effects.exit(stringType)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.exit(type)
return ok
}
if (markdownLineEnding(code)) {
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
return atBreak
}
effects.enter("chunkString", {
contentType: "string",
})
return label(code)
}
function label(code) {
if (
code === null ||
code === 91 ||
code === 93 ||
markdownLineEnding(code) ||
size++ > 999
) {
effects.exit("chunkString")
return atBreak(code)
}
effects.consume(code)
data = data || !markdownSpace(code)
return code === 92 ? labelEscape : label
}
function labelEscape(code) {
if (code === 91 || code === 92 || code === 93) {
effects.consume(code)
size++
return label
}
return label(code)
}
}
// node_modules/micromark-factory-title/index.js
function factoryTitle(effects, ok, nok, type, markerType, stringType) {
let marker
return start
function start(code) {
effects.enter(type)
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
marker = code === 40 ? 41 : code
return atFirstTitleBreak
}
function atFirstTitleBreak(code) {
if (code === marker) {
effects.enter(markerType)
effects.consume(code)
effects.exit(markerType)
effects.exit(type)
return ok
}
effects.enter(stringType)
return atTitleBreak(code)
}
function atTitleBreak(code) {
if (code === marker) {
effects.exit(stringType)
return atFirstTitleBreak(marker)
}
if (code === null) {
return nok(code)
}
if (markdownLineEnding(code)) {
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
return factorySpace(effects, atTitleBreak, "linePrefix")
}
effects.enter("chunkString", {
contentType: "string",
})
return title(code)
}
function title(code) {
if (code === marker || code === null || markdownLineEnding(code)) {
effects.exit("chunkString")
return atTitleBreak(code)
}
effects.consume(code)
return code === 92 ? titleEscape : title
}
function titleEscape(code) {
if (code === marker || code === 92) {
effects.consume(code)
return title
}
return title(code)
}
}
// node_modules/micromark-factory-whitespace/index.js
function factoryWhitespace(effects, ok) {
let seen
return start
function start(code) {
if (markdownLineEnding(code)) {
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
seen = true
return start
}
if (markdownSpace(code)) {
return factorySpace(
effects,
start,
seen ? "linePrefix" : "lineSuffix",
)(code)
}
return ok(code)
}
}
// node_modules/micromark-util-normalize-identifier/index.js
function normalizeIdentifier(value2) {
return value2
.replace(/[\t\n\r ]+/g, " ")
.replace(/^ | $/g, "")
.toLowerCase()
.toUpperCase()
}
// node_modules/micromark-core-commonmark/lib/definition.js
var definition = {
name: "definition",
tokenize: tokenizeDefinition,
}
var titleConstruct = {
tokenize: tokenizeTitle,
partial: true,
}
function tokenizeDefinition(effects, ok, nok) {
const self = this
let identifier
return start
function start(code) {
effects.enter("definition")
return factoryLabel.call(
self,
effects,
labelAfter,
nok,
"definitionLabel",
"definitionLabelMarker",
"definitionLabelString",
)(code)
}
function labelAfter(code) {
identifier = normalizeIdentifier(
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1),
)
if (code === 58) {
effects.enter("definitionMarker")
effects.consume(code)
effects.exit("definitionMarker")
return factoryWhitespace(
effects,
factoryDestination(
effects,
effects.attempt(
titleConstruct,
factorySpace(effects, after, "whitespace"),
factorySpace(effects, after, "whitespace"),
),
nok,
"definitionDestination",
"definitionDestinationLiteral",
"definitionDestinationLiteralMarker",
"definitionDestinationRaw",
"definitionDestinationString",
),
)
}
return nok(code)
}
function after(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("definition")
if (!self.parser.defined.includes(identifier)) {
self.parser.defined.push(identifier)
}
return ok(code)
}
return nok(code)
}
}
function tokenizeTitle(effects, ok, nok) {
return start
function start(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, before)(code)
: nok(code)
}
function before(code) {
if (code === 34 || code === 39 || code === 40) {
return factoryTitle(
effects,
factorySpace(effects, after, "whitespace"),
nok,
"definitionTitle",
"definitionTitleMarker",
"definitionTitleString",
)(code)
}
return nok(code)
}
function after(code) {
return code === null || markdownLineEnding(code) ? ok(code) : nok(code)
}
}
// node_modules/micromark-core-commonmark/lib/hard-break-escape.js
var hardBreakEscape = {
name: "hardBreakEscape",
tokenize: tokenizeHardBreakEscape,
}
function tokenizeHardBreakEscape(effects, ok, nok) {
return start
function start(code) {
effects.enter("hardBreakEscape")
effects.enter("escapeMarker")
effects.consume(code)
return open
}
function open(code) {
if (markdownLineEnding(code)) {
effects.exit("escapeMarker")
effects.exit("hardBreakEscape")
return ok(code)
}
return nok(code)
}
}
// node_modules/micromark-core-commonmark/lib/heading-atx.js
var headingAtx = {
name: "headingAtx",
tokenize: tokenizeHeadingAtx,
resolve: resolveHeadingAtx,
}
function resolveHeadingAtx(events, context) {
let contentEnd = events.length - 2
let contentStart = 3
let content3
let text3
if (events[contentStart][1].type === "whitespace") {
contentStart += 2
}
if (
contentEnd - 2 > contentStart &&
events[contentEnd][1].type === "whitespace"
) {
contentEnd -= 2
}
if (
events[contentEnd][1].type === "atxHeadingSequence" &&
(contentStart === contentEnd - 1 ||
(contentEnd - 4 > contentStart &&
events[contentEnd - 2][1].type === "whitespace"))
) {
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4
}
if (contentEnd > contentStart) {
content3 = {
type: "atxHeadingText",
start: events[contentStart][1].start,
end: events[contentEnd][1].end,
}
text3 = {
type: "chunkText",
start: events[contentStart][1].start,
end: events[contentEnd][1].end,
contentType: "text",
}
splice(events, contentStart, contentEnd - contentStart + 1, [
["enter", content3, context],
["enter", text3, context],
["exit", text3, context],
["exit", content3, context],
])
}
return events
}
function tokenizeHeadingAtx(effects, ok, nok) {
const self = this
let size = 0
return start
function start(code) {
effects.enter("atxHeading")
effects.enter("atxHeadingSequence")
return fenceOpenInside(code)
}
function fenceOpenInside(code) {
if (code === 35 && size++ < 6) {
effects.consume(code)
return fenceOpenInside
}
if (code === null || markdownLineEndingOrSpace(code)) {
effects.exit("atxHeadingSequence")
return self.interrupt ? ok(code) : headingBreak(code)
}
return nok(code)
}
function headingBreak(code) {
if (code === 35) {
effects.enter("atxHeadingSequence")
return sequence(code)
}
if (code === null || markdownLineEnding(code)) {
effects.exit("atxHeading")
return ok(code)
}
if (markdownSpace(code)) {
return factorySpace(effects, headingBreak, "whitespace")(code)
}
effects.enter("atxHeadingText")
return data(code)
}
function sequence(code) {
if (code === 35) {
effects.consume(code)
return sequence
}
effects.exit("atxHeadingSequence")
return headingBreak(code)
}
function data(code) {
if (code === null || code === 35 || markdownLineEndingOrSpace(code)) {
effects.exit("atxHeadingText")
return headingBreak(code)
}
effects.consume(code)
return data
}
}
// node_modules/micromark-util-html-tag-name/index.js
var htmlBlockNames = [
"address",
"article",
"aside",
"base",
"basefont",
"blockquote",
"body",
"caption",
"center",
"col",
"colgroup",
"dd",
"details",
"dialog",
"dir",
"div",
"dl",
"dt",
"fieldset",
"figcaption",
"figure",
"footer",
"form",
"frame",
"frameset",
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"head",
"header",
"hr",
"html",
"iframe",
"legend",
"li",
"link",
"main",
"menu",
"menuitem",
"nav",
"noframes",
"ol",
"optgroup",
"option",
"p",
"param",
"section",
"source",
"summary",
"table",
"tbody",
"td",
"tfoot",
"th",
"thead",
"title",
"tr",
"track",
"ul",
]
var htmlRawNames = ["pre", "script", "style", "textarea"]
// node_modules/micromark-core-commonmark/lib/html-flow.js
var htmlFlow = {
name: "htmlFlow",
tokenize: tokenizeHtmlFlow,
resolveTo: resolveToHtmlFlow,
concrete: true,
}
var nextBlankConstruct = {
tokenize: tokenizeNextBlank,
partial: true,
}
function resolveToHtmlFlow(events) {
let index2 = events.length
while (index2--) {
if (
events[index2][0] === "enter" &&
events[index2][1].type === "htmlFlow"
) {
break
}
}
if (index2 > 1 && events[index2 - 2][1].type === "linePrefix") {
events[index2][1].start = events[index2 - 2][1].start
events[index2 + 1][1].start = events[index2 - 2][1].start
events.splice(index2 - 2, 2)
}
return events
}
function tokenizeHtmlFlow(effects, ok, nok) {
const self = this
let kind
let startTag
let buffer2
let index2
let marker
return start
function start(code) {
effects.enter("htmlFlow")
effects.enter("htmlFlowData")
effects.consume(code)
return open
}
function open(code) {
if (code === 33) {
effects.consume(code)
return declarationStart
}
if (code === 47) {
effects.consume(code)
return tagCloseStart
}
if (code === 63) {
effects.consume(code)
kind = 3
return self.interrupt ? ok : continuationDeclarationInside
}
if (asciiAlpha(code)) {
effects.consume(code)
buffer2 = String.fromCharCode(code)
startTag = true
return tagName
}
return nok(code)
}
function declarationStart(code) {
if (code === 45) {
effects.consume(code)
kind = 2
return commentOpenInside
}
if (code === 91) {
effects.consume(code)
kind = 5
buffer2 = "CDATA["
index2 = 0
return cdataOpenInside
}
if (asciiAlpha(code)) {
effects.consume(code)
kind = 4
return self.interrupt ? ok : continuationDeclarationInside
}
return nok(code)
}
function commentOpenInside(code) {
if (code === 45) {
effects.consume(code)
return self.interrupt ? ok : continuationDeclarationInside
}
return nok(code)
}
function cdataOpenInside(code) {
if (code === buffer2.charCodeAt(index2++)) {
effects.consume(code)
return index2 === buffer2.length
? self.interrupt
? ok
: continuation
: cdataOpenInside
}
return nok(code)
}
function tagCloseStart(code) {
if (asciiAlpha(code)) {
effects.consume(code)
buffer2 = String.fromCharCode(code)
return tagName
}
return nok(code)
}
function tagName(code) {
if (
code === null ||
code === 47 ||
code === 62 ||
markdownLineEndingOrSpace(code)
) {
if (
code !== 47 &&
startTag &&
htmlRawNames.includes(buffer2.toLowerCase())
) {
kind = 1
return self.interrupt ? ok(code) : continuation(code)
}
if (htmlBlockNames.includes(buffer2.toLowerCase())) {
kind = 6
if (code === 47) {
effects.consume(code)
return basicSelfClosing
}
return self.interrupt ? ok(code) : continuation(code)
}
kind = 7
return self.interrupt && !self.parser.lazy[self.now().line]
? nok(code)
: startTag
? completeAttributeNameBefore(code)
: completeClosingTagAfter(code)
}
if (code === 45 || asciiAlphanumeric(code)) {
effects.consume(code)
buffer2 += String.fromCharCode(code)
return tagName
}
return nok(code)
}
function basicSelfClosing(code) {
if (code === 62) {
effects.consume(code)
return self.interrupt ? ok : continuation
}
return nok(code)
}
function completeClosingTagAfter(code) {
if (markdownSpace(code)) {
effects.consume(code)
return completeClosingTagAfter
}
return completeEnd(code)
}
function completeAttributeNameBefore(code) {
if (code === 47) {
effects.consume(code)
return completeEnd
}
if (code === 58 || code === 95 || asciiAlpha(code)) {
effects.consume(code)
return completeAttributeName
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeNameBefore
}
return completeEnd(code)
}
function completeAttributeName(code) {
if (
code === 45 ||
code === 46 ||
code === 58 ||
code === 95 ||
asciiAlphanumeric(code)
) {
effects.consume(code)
return completeAttributeName
}
return completeAttributeNameAfter(code)
}
function completeAttributeNameAfter(code) {
if (code === 61) {
effects.consume(code)
return completeAttributeValueBefore
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeNameAfter
}
return completeAttributeNameBefore(code)
}
function completeAttributeValueBefore(code) {
if (
code === null ||
code === 60 ||
code === 61 ||
code === 62 ||
code === 96
) {
return nok(code)
}
if (code === 34 || code === 39) {
effects.consume(code)
marker = code
return completeAttributeValueQuoted
}
if (markdownSpace(code)) {
effects.consume(code)
return completeAttributeValueBefore
}
marker = null
return completeAttributeValueUnquoted(code)
}
function completeAttributeValueQuoted(code) {
if (code === null || markdownLineEnding(code)) {
return nok(code)
}
if (code === marker) {
effects.consume(code)
return completeAttributeValueQuotedAfter
}
effects.consume(code)
return completeAttributeValueQuoted
}
function completeAttributeValueUnquoted(code) {
if (
code === null ||
code === 34 ||
code === 39 ||
code === 60 ||
code === 61 ||
code === 62 ||
code === 96 ||
markdownLineEndingOrSpace(code)
) {
return completeAttributeNameAfter(code)
}
effects.consume(code)
return completeAttributeValueUnquoted
}
function completeAttributeValueQuotedAfter(code) {
if (code === 47 || code === 62 || markdownSpace(code)) {
return completeAttributeNameBefore(code)
}
return nok(code)
}
function completeEnd(code) {
if (code === 62) {
effects.consume(code)
return completeAfter
}
return nok(code)
}
function completeAfter(code) {
if (markdownSpace(code)) {
effects.consume(code)
return completeAfter
}
return code === null || markdownLineEnding(code)
? continuation(code)
: nok(code)
}
function continuation(code) {
if (code === 45 && kind === 2) {
effects.consume(code)
return continuationCommentInside
}
if (code === 60 && kind === 1) {
effects.consume(code)
return continuationRawTagOpen
}
if (code === 62 && kind === 4) {
effects.consume(code)
return continuationClose
}
if (code === 63 && kind === 3) {
effects.consume(code)
return continuationDeclarationInside
}
if (code === 93 && kind === 5) {
effects.consume(code)
return continuationCharacterDataInside
}
if (markdownLineEnding(code) && (kind === 6 || kind === 7)) {
return effects.check(
nextBlankConstruct,
continuationClose,
continuationAtLineEnding,
)(code)
}
if (code === null || markdownLineEnding(code)) {
return continuationAtLineEnding(code)
}
effects.consume(code)
return continuation
}
function continuationAtLineEnding(code) {
effects.exit("htmlFlowData")
return htmlContinueStart(code)
}
function htmlContinueStart(code) {
if (code === null) {
return done(code)
}
if (markdownLineEnding(code)) {
return effects.attempt(
{
tokenize: htmlLineEnd,
partial: true,
},
htmlContinueStart,
done,
)(code)
}
effects.enter("htmlFlowData")
return continuation(code)
}
function htmlLineEnd(effects2, ok2, nok2) {
return start2
function start2(code) {
effects2.enter("lineEnding")
effects2.consume(code)
effects2.exit("lineEnding")
return lineStart
}
function lineStart(code) {
return self.parser.lazy[self.now().line] ? nok2(code) : ok2(code)
}
}
function continuationCommentInside(code) {
if (code === 45) {
effects.consume(code)
return continuationDeclarationInside
}
return continuation(code)
}
function continuationRawTagOpen(code) {
if (code === 47) {
effects.consume(code)
buffer2 = ""
return continuationRawEndTag
}
return continuation(code)
}
function continuationRawEndTag(code) {
if (code === 62 && htmlRawNames.includes(buffer2.toLowerCase())) {
effects.consume(code)
return continuationClose
}
if (asciiAlpha(code) && buffer2.length < 8) {
effects.consume(code)
buffer2 += String.fromCharCode(code)
return continuationRawEndTag
}
return continuation(code)
}
function continuationCharacterDataInside(code) {
if (code === 93) {
effects.consume(code)
return continuationDeclarationInside
}
return continuation(code)
}
function continuationDeclarationInside(code) {
if (code === 62) {
effects.consume(code)
return continuationClose
}
return continuation(code)
}
function continuationClose(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("htmlFlowData")
return done(code)
}
effects.consume(code)
return continuationClose
}
function done(code) {
effects.exit("htmlFlow")
return ok(code)
}
}
function tokenizeNextBlank(effects, ok, nok) {
return start
function start(code) {
effects.exit("htmlFlowData")
effects.enter("lineEndingBlank")
effects.consume(code)
effects.exit("lineEndingBlank")
return effects.attempt(blankLine, ok, nok)
}
}
// node_modules/micromark-core-commonmark/lib/html-text.js
var htmlText = {
name: "htmlText",
tokenize: tokenizeHtmlText,
}
function tokenizeHtmlText(effects, ok, nok) {
const self = this
let marker
let buffer2
let index2
let returnState
return start
function start(code) {
effects.enter("htmlText")
effects.enter("htmlTextData")
effects.consume(code)
return open
}
function open(code) {
if (code === 33) {
effects.consume(code)
return declarationOpen
}
if (code === 47) {
effects.consume(code)
return tagCloseStart
}
if (code === 63) {
effects.consume(code)
return instruction
}
if (asciiAlpha(code)) {
effects.consume(code)
return tagOpen
}
return nok(code)
}
function declarationOpen(code) {
if (code === 45) {
effects.consume(code)
return commentOpen
}
if (code === 91) {
effects.consume(code)
buffer2 = "CDATA["
index2 = 0
return cdataOpen
}
if (asciiAlpha(code)) {
effects.consume(code)
return declaration
}
return nok(code)
}
function commentOpen(code) {
if (code === 45) {
effects.consume(code)
return commentStart
}
return nok(code)
}
function commentStart(code) {
if (code === null || code === 62) {
return nok(code)
}
if (code === 45) {
effects.consume(code)
return commentStartDash
}
return comment(code)
}
function commentStartDash(code) {
if (code === null || code === 62) {
return nok(code)
}
return comment(code)
}
function comment(code) {
if (code === null) {
return nok(code)
}
if (code === 45) {
effects.consume(code)
return commentClose
}
if (markdownLineEnding(code)) {
returnState = comment
return atLineEnding(code)
}
effects.consume(code)
return comment
}
function commentClose(code) {
if (code === 45) {
effects.consume(code)
return end
}
return comment(code)
}
function cdataOpen(code) {
if (code === buffer2.charCodeAt(index2++)) {
effects.consume(code)
return index2 === buffer2.length ? cdata : cdataOpen
}
return nok(code)
}
function cdata(code) {
if (code === null) {
return nok(code)
}
if (code === 93) {
effects.consume(code)
return cdataClose
}
if (markdownLineEnding(code)) {
returnState = cdata
return atLineEnding(code)
}
effects.consume(code)
return cdata
}
function cdataClose(code) {
if (code === 93) {
effects.consume(code)
return cdataEnd
}
return cdata(code)
}
function cdataEnd(code) {
if (code === 62) {
return end(code)
}
if (code === 93) {
effects.consume(code)
return cdataEnd
}
return cdata(code)
}
function declaration(code) {
if (code === null || code === 62) {
return end(code)
}
if (markdownLineEnding(code)) {
returnState = declaration
return atLineEnding(code)
}
effects.consume(code)
return declaration
}
function instruction(code) {
if (code === null) {
return nok(code)
}
if (code === 63) {
effects.consume(code)
return instructionClose
}
if (markdownLineEnding(code)) {
returnState = instruction
return atLineEnding(code)
}
effects.consume(code)
return instruction
}
function instructionClose(code) {
return code === 62 ? end(code) : instruction(code)
}
function tagCloseStart(code) {
if (asciiAlpha(code)) {
effects.consume(code)
return tagClose
}
return nok(code)
}
function tagClose(code) {
if (code === 45 || asciiAlphanumeric(code)) {
effects.consume(code)
return tagClose
}
return tagCloseBetween(code)
}
function tagCloseBetween(code) {
if (markdownLineEnding(code)) {
returnState = tagCloseBetween
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagCloseBetween
}
return end(code)
}
function tagOpen(code) {
if (code === 45 || asciiAlphanumeric(code)) {
effects.consume(code)
return tagOpen
}
if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code)
}
return nok(code)
}
function tagOpenBetween(code) {
if (code === 47) {
effects.consume(code)
return end
}
if (code === 58 || code === 95 || asciiAlpha(code)) {
effects.consume(code)
return tagOpenAttributeName
}
if (markdownLineEnding(code)) {
returnState = tagOpenBetween
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenBetween
}
return end(code)
}
function tagOpenAttributeName(code) {
if (
code === 45 ||
code === 46 ||
code === 58 ||
code === 95 ||
asciiAlphanumeric(code)
) {
effects.consume(code)
return tagOpenAttributeName
}
return tagOpenAttributeNameAfter(code)
}
function tagOpenAttributeNameAfter(code) {
if (code === 61) {
effects.consume(code)
return tagOpenAttributeValueBefore
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeNameAfter
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenAttributeNameAfter
}
return tagOpenBetween(code)
}
function tagOpenAttributeValueBefore(code) {
if (
code === null ||
code === 60 ||
code === 61 ||
code === 62 ||
code === 96
) {
return nok(code)
}
if (code === 34 || code === 39) {
effects.consume(code)
marker = code
return tagOpenAttributeValueQuoted
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueBefore
return atLineEnding(code)
}
if (markdownSpace(code)) {
effects.consume(code)
return tagOpenAttributeValueBefore
}
effects.consume(code)
marker = void 0
return tagOpenAttributeValueUnquoted
}
function tagOpenAttributeValueQuoted(code) {
if (code === marker) {
effects.consume(code)
return tagOpenAttributeValueQuotedAfter
}
if (code === null) {
return nok(code)
}
if (markdownLineEnding(code)) {
returnState = tagOpenAttributeValueQuoted
return atLineEnding(code)
}
effects.consume(code)
return tagOpenAttributeValueQuoted
}
function tagOpenAttributeValueQuotedAfter(code) {
if (code === 62 || code === 47 || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code)
}
return nok(code)
}
function tagOpenAttributeValueUnquoted(code) {
if (
code === null ||
code === 34 ||
code === 39 ||
code === 60 ||
code === 61 ||
code === 96
) {
return nok(code)
}
if (code === 62 || markdownLineEndingOrSpace(code)) {
return tagOpenBetween(code)
}
effects.consume(code)
return tagOpenAttributeValueUnquoted
}
function atLineEnding(code) {
effects.exit("htmlTextData")
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
return factorySpace(
effects,
afterPrefix,
"linePrefix",
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4,
)
}
function afterPrefix(code) {
effects.enter("htmlTextData")
return returnState(code)
}
function end(code) {
if (code === 62) {
effects.consume(code)
effects.exit("htmlTextData")
effects.exit("htmlText")
return ok
}
return nok(code)
}
}
// node_modules/micromark-core-commonmark/lib/label-end.js
var labelEnd = {
name: "labelEnd",
tokenize: tokenizeLabelEnd,
resolveTo: resolveToLabelEnd,
resolveAll: resolveAllLabelEnd,
}
var resourceConstruct = {
tokenize: tokenizeResource,
}
var fullReferenceConstruct = {
tokenize: tokenizeFullReference,
}
var collapsedReferenceConstruct = {
tokenize: tokenizeCollapsedReference,
}
function resolveAllLabelEnd(events) {
let index2 = -1
let token
while (++index2 < events.length) {
token = events[index2][1]
if (
token.type === "labelImage" ||
token.type === "labelLink" ||
token.type === "labelEnd"
) {
events.splice(index2 + 1, token.type === "labelImage" ? 4 : 2)
token.type = "data"
index2++
}
}
return events
}
function resolveToLabelEnd(events, context) {
let index2 = events.length
let offset = 0
let token
let open
let close2
let media
while (index2--) {
token = events[index2][1]
if (open) {
if (
token.type === "link" ||
(token.type === "labelLink" && token._inactive)
) {
break
}
if (events[index2][0] === "enter" && token.type === "labelLink") {
token._inactive = true
}
} else if (close2) {
if (
events[index2][0] === "enter" &&
(token.type === "labelImage" || token.type === "labelLink") &&
!token._balanced
) {
open = index2
if (token.type !== "labelLink") {
offset = 2
break
}
}
} else if (token.type === "labelEnd") {
close2 = index2
}
}
const group = {
type: events[open][1].type === "labelLink" ? "link" : "image",
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[events.length - 1][1].end),
}
const label = {
type: "label",
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[close2][1].end),
}
const text3 = {
type: "labelText",
start: Object.assign({}, events[open + offset + 2][1].end),
end: Object.assign({}, events[close2 - 2][1].start),
}
media = [
["enter", group, context],
["enter", label, context],
]
media = push(media, events.slice(open + 1, open + offset + 3))
media = push(media, [["enter", text3, context]])
media = push(
media,
resolveAll(
context.parser.constructs.insideSpan.null,
events.slice(open + offset + 4, close2 - 3),
context,
),
)
media = push(media, [
["exit", text3, context],
events[close2 - 2],
events[close2 - 1],
["exit", label, context],
])
media = push(media, events.slice(close2 + 1))
media = push(media, [["exit", group, context]])
splice(events, open, events.length, media)
return events
}
function tokenizeLabelEnd(effects, ok, nok) {
const self = this
let index2 = self.events.length
let labelStart
let defined
while (index2--) {
if (
(self.events[index2][1].type === "labelImage" ||
self.events[index2][1].type === "labelLink") &&
!self.events[index2][1]._balanced
) {
labelStart = self.events[index2][1]
break
}
}
return start
function start(code) {
if (!labelStart) {
return nok(code)
}
if (labelStart._inactive) return balanced(code)
defined = self.parser.defined.includes(
normalizeIdentifier(
self.sliceSerialize({
start: labelStart.end,
end: self.now(),
}),
),
)
effects.enter("labelEnd")
effects.enter("labelMarker")
effects.consume(code)
effects.exit("labelMarker")
effects.exit("labelEnd")
return afterLabelEnd
}
function afterLabelEnd(code) {
if (code === 40) {
return effects.attempt(
resourceConstruct,
ok,
defined ? ok : balanced,
)(code)
}
if (code === 91) {
return effects.attempt(
fullReferenceConstruct,
ok,
defined
? effects.attempt(collapsedReferenceConstruct, ok, balanced)
: balanced,
)(code)
}
return defined ? ok(code) : balanced(code)
}
function balanced(code) {
labelStart._balanced = true
return nok(code)
}
}
function tokenizeResource(effects, ok, nok) {
return start
function start(code) {
effects.enter("resource")
effects.enter("resourceMarker")
effects.consume(code)
effects.exit("resourceMarker")
return factoryWhitespace(effects, open)
}
function open(code) {
if (code === 41) {
return end(code)
}
return factoryDestination(
effects,
destinationAfter,
nok,
"resourceDestination",
"resourceDestinationLiteral",
"resourceDestinationLiteralMarker",
"resourceDestinationRaw",
"resourceDestinationString",
3,
)(code)
}
function destinationAfter(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, between)(code)
: end(code)
}
function between(code) {
if (code === 34 || code === 39 || code === 40) {
return factoryTitle(
effects,
factoryWhitespace(effects, end),
nok,
"resourceTitle",
"resourceTitleMarker",
"resourceTitleString",
)(code)
}
return end(code)
}
function end(code) {
if (code === 41) {
effects.enter("resourceMarker")
effects.consume(code)
effects.exit("resourceMarker")
effects.exit("resource")
return ok
}
return nok(code)
}
}
function tokenizeFullReference(effects, ok, nok) {
const self = this
return start
function start(code) {
return factoryLabel.call(
self,
effects,
afterLabel,
nok,
"reference",
"referenceMarker",
"referenceString",
)(code)
}
function afterLabel(code) {
return self.parser.defined.includes(
normalizeIdentifier(
self
.sliceSerialize(self.events[self.events.length - 1][1])
.slice(1, -1),
),
)
? ok(code)
: nok(code)
}
}
function tokenizeCollapsedReference(effects, ok, nok) {
return start
function start(code) {
effects.enter("reference")
effects.enter("referenceMarker")
effects.consume(code)
effects.exit("referenceMarker")
return open
}
function open(code) {
if (code === 93) {
effects.enter("referenceMarker")
effects.consume(code)
effects.exit("referenceMarker")
effects.exit("reference")
return ok
}
return nok(code)
}
}
// node_modules/micromark-core-commonmark/lib/label-start-image.js
var labelStartImage = {
name: "labelStartImage",
tokenize: tokenizeLabelStartImage,
resolveAll: labelEnd.resolveAll,
}
function tokenizeLabelStartImage(effects, ok, nok) {
const self = this
return start
function start(code) {
effects.enter("labelImage")
effects.enter("labelImageMarker")
effects.consume(code)
effects.exit("labelImageMarker")
return open
}
function open(code) {
if (code === 91) {
effects.enter("labelMarker")
effects.consume(code)
effects.exit("labelMarker")
effects.exit("labelImage")
return after
}
return nok(code)
}
function after(code) {
return code === 94 && "_hiddenFootnoteSupport" in self.parser.constructs
? nok(code)
: ok(code)
}
}
// node_modules/micromark-core-commonmark/lib/label-start-link.js
var labelStartLink = {
name: "labelStartLink",
tokenize: tokenizeLabelStartLink,
resolveAll: labelEnd.resolveAll,
}
function tokenizeLabelStartLink(effects, ok, nok) {
const self = this
return start
function start(code) {
effects.enter("labelLink")
effects.enter("labelMarker")
effects.consume(code)
effects.exit("labelMarker")
effects.exit("labelLink")
return after
}
function after(code) {
return code === 94 && "_hiddenFootnoteSupport" in self.parser.constructs
? nok(code)
: ok(code)
}
}
// node_modules/micromark-core-commonmark/lib/line-ending.js
var lineEnding = {
name: "lineEnding",
tokenize: tokenizeLineEnding,
}
function tokenizeLineEnding(effects, ok) {
return start
function start(code) {
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
return factorySpace(effects, ok, "linePrefix")
}
}
// node_modules/micromark-core-commonmark/lib/thematic-break.js
var thematicBreak = {
name: "thematicBreak",
tokenize: tokenizeThematicBreak,
}
function tokenizeThematicBreak(effects, ok, nok) {
let size = 0
let marker
return start
function start(code) {
effects.enter("thematicBreak")
marker = code
return atBreak(code)
}
function atBreak(code) {
if (code === marker) {
effects.enter("thematicBreakSequence")
return sequence(code)
}
if (markdownSpace(code)) {
return factorySpace(effects, atBreak, "whitespace")(code)
}
if (size < 3 || (code !== null && !markdownLineEnding(code))) {
return nok(code)
}
effects.exit("thematicBreak")
return ok(code)
}
function sequence(code) {
if (code === marker) {
effects.consume(code)
size++
return sequence
}
effects.exit("thematicBreakSequence")
return atBreak(code)
}
}
// node_modules/micromark-core-commonmark/lib/list.js
var list = {
name: "list",
tokenize: tokenizeListStart,
continuation: {
tokenize: tokenizeListContinuation,
},
exit: tokenizeListEnd,
}
var listItemPrefixWhitespaceConstruct = {
tokenize: tokenizeListItemPrefixWhitespace,
partial: true,
}
var indentConstruct = {
tokenize: tokenizeIndent,
partial: true,
}
function tokenizeListStart(effects, ok, nok) {
const self = this
const tail = self.events[self.events.length - 1]
let initialSize =
tail && tail[1].type === "linePrefix"
? tail[2].sliceSerialize(tail[1], true).length
: 0
let size = 0
return start
function start(code) {
const kind =
self.containerState.type ||
(code === 42 || code === 43 || code === 45
? "listUnordered"
: "listOrdered")
if (
kind === "listUnordered"
? !self.containerState.marker || code === self.containerState.marker
: asciiDigit(code)
) {
if (!self.containerState.type) {
self.containerState.type = kind
effects.enter(kind, {
_container: true,
})
}
if (kind === "listUnordered") {
effects.enter("listItemPrefix")
return code === 42 || code === 45
? effects.check(thematicBreak, nok, atMarker)(code)
: atMarker(code)
}
if (!self.interrupt || code === 49) {
effects.enter("listItemPrefix")
effects.enter("listItemValue")
return inside(code)
}
}
return nok(code)
}
function inside(code) {
if (asciiDigit(code) && ++size < 10) {
effects.consume(code)
return inside
}
if (
(!self.interrupt || size < 2) &&
(self.containerState.marker
? code === self.containerState.marker
: code === 41 || code === 46)
) {
effects.exit("listItemValue")
return atMarker(code)
}
return nok(code)
}
function atMarker(code) {
effects.enter("listItemMarker")
effects.consume(code)
effects.exit("listItemMarker")
self.containerState.marker = self.containerState.marker || code
return effects.check(
blankLine,
self.interrupt ? nok : onBlank,
effects.attempt(
listItemPrefixWhitespaceConstruct,
endOfPrefix,
otherPrefix,
),
)
}
function onBlank(code) {
self.containerState.initialBlankLine = true
initialSize++
return endOfPrefix(code)
}
function otherPrefix(code) {
if (markdownSpace(code)) {
effects.enter("listItemPrefixWhitespace")
effects.consume(code)
effects.exit("listItemPrefixWhitespace")
return endOfPrefix
}
return nok(code)
}
function endOfPrefix(code) {
self.containerState.size =
initialSize +
self.sliceSerialize(effects.exit("listItemPrefix"), true).length
return ok(code)
}
}
function tokenizeListContinuation(effects, ok, nok) {
const self = this
self.containerState._closeFlow = void 0
return effects.check(blankLine, onBlank, notBlank)
function onBlank(code) {
self.containerState.furtherBlankLines =
self.containerState.furtherBlankLines ||
self.containerState.initialBlankLine
return factorySpace(
effects,
ok,
"listItemIndent",
self.containerState.size + 1,
)(code)
}
function notBlank(code) {
if (self.containerState.furtherBlankLines || !markdownSpace(code)) {
self.containerState.furtherBlankLines = void 0
self.containerState.initialBlankLine = void 0
return notInCurrentItem(code)
}
self.containerState.furtherBlankLines = void 0
self.containerState.initialBlankLine = void 0
return effects.attempt(indentConstruct, ok, notInCurrentItem)(code)
}
function notInCurrentItem(code) {
self.containerState._closeFlow = true
self.interrupt = void 0
return factorySpace(
effects,
effects.attempt(list, ok, nok),
"linePrefix",
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4,
)(code)
}
}
function tokenizeIndent(effects, ok, nok) {
const self = this
return factorySpace(
effects,
afterPrefix,
"listItemIndent",
self.containerState.size + 1,
)
function afterPrefix(code) {
const tail = self.events[self.events.length - 1]
return tail &&
tail[1].type === "listItemIndent" &&
tail[2].sliceSerialize(tail[1], true).length === self.containerState.size
? ok(code)
: nok(code)
}
}
function tokenizeListEnd(effects) {
effects.exit(this.containerState.type)
}
function tokenizeListItemPrefixWhitespace(effects, ok, nok) {
const self = this
return factorySpace(
effects,
afterPrefix,
"listItemPrefixWhitespace",
self.parser.constructs.disable.null.includes("codeIndented")
? void 0
: 4 + 1,
)
function afterPrefix(code) {
const tail = self.events[self.events.length - 1]
return !markdownSpace(code) &&
tail &&
tail[1].type === "listItemPrefixWhitespace"
? ok(code)
: nok(code)
}
}
// node_modules/micromark-core-commonmark/lib/setext-underline.js
var setextUnderline = {
name: "setextUnderline",
tokenize: tokenizeSetextUnderline,
resolveTo: resolveToSetextUnderline,
}
function resolveToSetextUnderline(events, context) {
let index2 = events.length
let content3
let text3
let definition2
while (index2--) {
if (events[index2][0] === "enter") {
if (events[index2][1].type === "content") {
content3 = index2
break
}
if (events[index2][1].type === "paragraph") {
text3 = index2
}
} else {
if (events[index2][1].type === "content") {
events.splice(index2, 1)
}
if (!definition2 && events[index2][1].type === "definition") {
definition2 = index2
}
}
}
const heading = {
type: "setextHeading",
start: Object.assign({}, events[text3][1].start),
end: Object.assign({}, events[events.length - 1][1].end),
}
events[text3][1].type = "setextHeadingText"
if (definition2) {
events.splice(text3, 0, ["enter", heading, context])
events.splice(definition2 + 1, 0, ["exit", events[content3][1], context])
events[content3][1].end = Object.assign({}, events[definition2][1].end)
} else {
events[content3][1] = heading
}
events.push(["exit", heading, context])
return events
}
function tokenizeSetextUnderline(effects, ok, nok) {
const self = this
let index2 = self.events.length
let marker
let paragraph
while (index2--) {
if (
self.events[index2][1].type !== "lineEnding" &&
self.events[index2][1].type !== "linePrefix" &&
self.events[index2][1].type !== "content"
) {
paragraph = self.events[index2][1].type === "paragraph"
break
}
}
return start
function start(code) {
if (!self.parser.lazy[self.now().line] && (self.interrupt || paragraph)) {
effects.enter("setextHeadingLine")
effects.enter("setextHeadingLineSequence")
marker = code
return closingSequence(code)
}
return nok(code)
}
function closingSequence(code) {
if (code === marker) {
effects.consume(code)
return closingSequence
}
effects.exit("setextHeadingLineSequence")
return factorySpace(effects, closingSequenceEnd, "lineSuffix")(code)
}
function closingSequenceEnd(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit("setextHeadingLine")
return ok(code)
}
return nok(code)
}
}
// node_modules/micromark/lib/initialize/flow.js
var flow = {
tokenize: initializeFlow,
}
function initializeFlow(effects) {
const self = this
const initial = effects.attempt(
blankLine,
atBlankEnding,
effects.attempt(
this.parser.constructs.flowInitial,
afterConstruct,
factorySpace(
effects,
effects.attempt(
this.parser.constructs.flow,
afterConstruct,
effects.attempt(content2, afterConstruct),
),
"linePrefix",
),
),
)
return initial
function atBlankEnding(code) {
if (code === null) {
effects.consume(code)
return
}
effects.enter("lineEndingBlank")
effects.consume(code)
effects.exit("lineEndingBlank")
self.currentConstruct = void 0
return initial
}
function afterConstruct(code) {
if (code === null) {
effects.consume(code)
return
}
effects.enter("lineEnding")
effects.consume(code)
effects.exit("lineEnding")
self.currentConstruct = void 0
return initial
}
}
// node_modules/micromark/lib/initialize/text.js
var resolver = {
resolveAll: createResolver(),
}
var string = initializeFactory("string")
var text = initializeFactory("text")
function initializeFactory(field) {
return {
tokenize: initializeText,
resolveAll: createResolver(
field === "text" ? resolveAllLineSuffixes : void 0,
),
}
function initializeText(effects) {
const self = this
const constructs2 = this.parser.constructs[field]
const text3 = effects.attempt(constructs2, start, notText)
return start
function start(code) {
return atBreak(code) ? text3(code) : notText(code)
}
function notText(code) {
if (code === null) {
effects.consume(code)
return
}
effects.enter("data")
effects.consume(code)
return data
}
function data(code) {
if (atBreak(code)) {
effects.exit("data")
return text3(code)
}
effects.consume(code)
return data
}
function atBreak(code) {
if (code === null) {
return true
}
const list2 = constructs2[code]
let index2 = -1
if (list2) {
while (++index2 < list2.length) {
const item = list2[index2]
if (!item.previous || item.previous.call(self, self.previous)) {
return true
}
}
}
return false
}
}
}
function createResolver(extraResolver) {
return resolveAllText
function resolveAllText(events, context) {
let index2 = -1
let enter
while (++index2 <= events.length) {
if (enter === void 0) {
if (events[index2] && events[index2][1].type === "data") {
enter = index2
index2++
}
} else if (!events[index2] || events[index2][1].type !== "data") {
if (index2 !== enter + 2) {
events[enter][1].end = events[index2 - 1][1].end
events.splice(enter + 2, index2 - enter - 2)
index2 = enter + 2
}
enter = void 0
}
}
return extraResolver ? extraResolver(events, context) : events
}
}
function resolveAllLineSuffixes(events, context) {
let eventIndex = -1
while (++eventIndex <= events.length) {
if (
(eventIndex === events.length ||
events[eventIndex][1].type === "lineEnding") &&
events[eventIndex - 1][1].type === "data"
) {
const data = events[eventIndex - 1][1]
const chunks = context.sliceStream(data)
let index2 = chunks.length
let bufferIndex = -1
let size = 0
let tabs
while (index2--) {
const chunk = chunks[index2]
if (typeof chunk === "string") {
bufferIndex = chunk.length
while (chunk.charCodeAt(bufferIndex - 1) === 32) {
size++
bufferIndex--
}
if (bufferIndex) break
bufferIndex = -1
} else if (chunk === -2) {
tabs = true
size++
} else if (chunk === -1) {
} else {
index2++
break
}
}
if (size) {
const token = {
type:
eventIndex === events.length || tabs || size < 2
? "lineSuffix"
: "hardBreakTrailing",
start: {
line: data.end.line,
column: data.end.column - size,
offset: data.end.offset - size,
_index: data.start._index + index2,
_bufferIndex: index2
? bufferIndex
: data.start._bufferIndex + bufferIndex,
},
end: Object.assign({}, data.end),
}
data.end = Object.assign({}, token.start)
if (data.start.offset === data.end.offset) {
Object.assign(data, token)
} else {
events.splice(
eventIndex,
0,
["enter", token, context],
["exit", token, context],
)
eventIndex += 2
}
}
eventIndex++
}
}
return events
}
// node_modules/micromark/lib/create-tokenizer.js
function createTokenizer(parser, initialize, from) {
let point2 = Object.assign(
from
? Object.assign({}, from)
: {
line: 1,
column: 1,
offset: 0,
},
{
_index: 0,
_bufferIndex: -1,
},
)
const columnStart = {}
const resolveAllConstructs = []
let chunks = []
let stack = []
let consumed = true
const effects = {
consume,
enter,
exit: exit2,
attempt: constructFactory(onsuccessfulconstruct),
check: constructFactory(onsuccessfulcheck),
interrupt: constructFactory(onsuccessfulcheck, {
interrupt: true,
}),
}
const context = {
previous: null,
code: null,
containerState: {},
events: [],
parser,
sliceStream,
sliceSerialize,
now,
defineSkip,
write,
}
let state = initialize.tokenize.call(context, effects)
let expectedCode
if (initialize.resolveAll) {
resolveAllConstructs.push(initialize)
}
return context
function write(slice) {
chunks = push(chunks, slice)
main()
if (chunks[chunks.length - 1] !== null) {
return []
}
addResult(initialize, 0)
context.events = resolveAll(resolveAllConstructs, context.events, context)
return context.events
}
function sliceSerialize(token, expandTabs) {
return serializeChunks(sliceStream(token), expandTabs)
}
function sliceStream(token) {
return sliceChunks(chunks, token)
}
function now() {
return Object.assign({}, point2)
}
function defineSkip(value2) {
columnStart[value2.line] = value2.column
accountForPotentialSkip()
}
function main() {
let chunkIndex
while (point2._index < chunks.length) {
const chunk = chunks[point2._index]
if (typeof chunk === "string") {
chunkIndex = point2._index
if (point2._bufferIndex < 0) {
point2._bufferIndex = 0
}
while (
point2._index === chunkIndex &&
point2._bufferIndex < chunk.length
) {
go(chunk.charCodeAt(point2._bufferIndex))
}
} else {
go(chunk)
}
}
}
function go(code) {
consumed = void 0
expectedCode = code
state = state(code)
}
function consume(code) {
if (markdownLineEnding(code)) {
point2.line++
point2.column = 1
point2.offset += code === -3 ? 2 : 1
accountForPotentialSkip()
} else if (code !== -1) {
point2.column++
point2.offset++
}
if (point2._bufferIndex < 0) {
point2._index++
} else {
point2._bufferIndex++
if (point2._bufferIndex === chunks[point2._index].length) {
point2._bufferIndex = -1
point2._index++
}
}
context.previous = code
consumed = true
}
function enter(type, fields) {
const token = fields || {}
token.type = type
token.start = now()
context.events.push(["enter", token, context])
stack.push(token)
return token
}
function exit2(type) {
const token = stack.pop()
token.end = now()
context.events.push(["exit", token, context])
return token
}
function onsuccessfulconstruct(construct, info) {
addResult(construct, info.from)
}
function onsuccessfulcheck(_, info) {
info.restore()
}
function constructFactory(onreturn, fields) {
return hook
function hook(constructs2, returnState, bogusState) {
let listOfConstructs
let constructIndex
let currentConstruct
let info
return Array.isArray(constructs2)
? handleListOfConstructs(constructs2)
: "tokenize" in constructs2
? handleListOfConstructs([constructs2])
: handleMapOfConstructs(constructs2)
function handleMapOfConstructs(map) {
return start
function start(code) {
const def = code !== null && map[code]
const all2 = code !== null && map.null
const list2 = [
...(Array.isArray(def) ? def : def ? [def] : []),
...(Array.isArray(all2) ? all2 : all2 ? [all2] : []),
]
return handleListOfConstructs(list2)(code)
}
}
function handleListOfConstructs(list2) {
listOfConstructs = list2
constructIndex = 0
if (list2.length === 0) {
return bogusState
}
return handleConstruct(list2[constructIndex])
}
function handleConstruct(construct) {
return start
function start(code) {
info = store()
currentConstruct = construct
if (!construct.partial) {
context.currentConstruct = construct
}
if (
construct.name &&
context.parser.constructs.disable.null.includes(construct.name)
) {
return nok(code)
}
return construct.tokenize.call(
fields ? Object.assign(Object.create(context), fields) : context,
effects,
ok,
nok,
)(code)
}
}
function ok(code) {
consumed = true
onreturn(currentConstruct, info)
return returnState
}
function nok(code) {
consumed = true
info.restore()
if (++constructIndex < listOfConstructs.length) {
return handleConstruct(listOfConstructs[constructIndex])
}
return bogusState
}
}
}
function addResult(construct, from2) {
if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
resolveAllConstructs.push(construct)
}
if (construct.resolve) {
splice(
context.events,
from2,
context.events.length - from2,
construct.resolve(context.events.slice(from2), context),
)
}
if (construct.resolveTo) {
context.events = construct.resolveTo(context.events, context)
}
}
function store() {
const startPoint = now()
const startPrevious = context.previous
const startCurrentConstruct = context.currentConstruct
const startEventsIndex = context.events.length
const startStack = Array.from(stack)
return {
restore,
from: startEventsIndex,
}
function restore() {
point2 = startPoint
context.previous = startPrevious
context.currentConstruct = startCurrentConstruct
context.events.length = startEventsIndex
stack = startStack
accountForPotentialSkip()
}
}
function accountForPotentialSkip() {
if (point2.line in columnStart && point2.column < 2) {
point2.column = columnStart[point2.line]
point2.offset += columnStart[point2.line] - 1
}
}
}
function sliceChunks(chunks, token) {
const startIndex = token.start._index
const startBufferIndex = token.start._bufferIndex
const endIndex = token.end._index
const endBufferIndex = token.end._bufferIndex
let view
if (startIndex === endIndex) {
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)]
} else {
view = chunks.slice(startIndex, endIndex)
if (startBufferIndex > -1) {
view[0] = view[0].slice(startBufferIndex)
}
if (endBufferIndex > 0) {
view.push(chunks[endIndex].slice(0, endBufferIndex))
}
}
return view
}
function serializeChunks(chunks, expandTabs) {
let index2 = -1
const result = []
let atTab
while (++index2 < chunks.length) {
const chunk = chunks[index2]
let value2
if (typeof chunk === "string") {
value2 = chunk
} else
switch (chunk) {
case -5: {
value2 = "\r"
break
}
case -4: {
value2 = "\n"
break
}
case -3: {
value2 = "\r\n"
break
}
case -2: {
value2 = expandTabs ? " " : " "
break
}
case -1: {
if (!expandTabs && atTab) continue
value2 = " "
break
}
default: {
value2 = String.fromCharCode(chunk)
}
}
atTab = chunk === -2
result.push(value2)
}
return result.join("")
}
// node_modules/micromark/lib/constructs.js
var constructs_exports = {}
__export(constructs_exports, {
attentionMarkers: () => attentionMarkers,
contentInitial: () => contentInitial,
disable: () => disable,
document: () => document3,
flow: () => flow2,
flowInitial: () => flowInitial,
insideSpan: () => insideSpan,
string: () => string2,
text: () => text2,
})
var document3 = {
[42]: list,
[43]: list,
[45]: list,
[48]: list,
[49]: list,
[50]: list,
[51]: list,
[52]: list,
[53]: list,
[54]: list,
[55]: list,
[56]: list,
[57]: list,
[62]: blockQuote,
}
var contentInitial = {
[91]: definition,
}
var flowInitial = {
[-2]: codeIndented,
[-1]: codeIndented,
[32]: codeIndented,
}
var flow2 = {
[35]: headingAtx,
[42]: thematicBreak,
[45]: [setextUnderline, thematicBreak],
[60]: htmlFlow,
[61]: setextUnderline,
[95]: thematicBreak,
[96]: codeFenced,
[126]: codeFenced,
}
var string2 = {
[38]: characterReference,
[92]: characterEscape,
}
var text2 = {
[-5]: lineEnding,
[-4]: lineEnding,
[-3]: lineEnding,
[33]: labelStartImage,
[38]: characterReference,
[42]: attention,
[60]: [autolink, htmlText],
[91]: labelStartLink,
[92]: [hardBreakEscape, characterEscape],
[93]: labelEnd,
[95]: attention,
[96]: codeText,
}
var insideSpan = {
null: [attention, resolver],
}
var attentionMarkers = {
null: [42, 95],
}
var disable = {
null: [],
}
// node_modules/micromark/lib/parse.js
function parse2(options = {}) {
const constructs2 = combineExtensions(
[constructs_exports].concat(options.extensions || []),
)
const parser = {
defined: [],
lazy: {},
constructs: constructs2,
content: create2(content),
document: create2(document2),
flow: create2(flow),
string: create2(string),
text: create2(text),
}
return parser
function create2(initial) {
return creator
function creator(from) {
return createTokenizer(parser, initial, from)
}
}
}
// node_modules/micromark/lib/preprocess.js
var search = /[\0\t\n\r]/g
function preprocess() {
let column = 1
let buffer2 = ""
let start = true
let atCarriageReturn
return preprocessor
function preprocessor(value2, encoding, end) {
const chunks = []
let match
let next
let startPosition
let endPosition
let code
value2 = buffer2 + value2.toString(encoding)
startPosition = 0
buffer2 = ""
if (start) {
if (value2.charCodeAt(0) === 65279) {
startPosition++
}
start = void 0
}
while (startPosition < value2.length) {
search.lastIndex = startPosition
match = search.exec(value2)
endPosition =
match && match.index !== void 0 ? match.index : value2.length
code = value2.charCodeAt(endPosition)
if (!match) {
buffer2 = value2.slice(startPosition)
break
}
if (code === 10 && startPosition === endPosition && atCarriageReturn) {
chunks.push(-3)
atCarriageReturn = void 0
} else {
if (atCarriageReturn) {
chunks.push(-5)
atCarriageReturn = void 0
}
if (startPosition < endPosition) {
chunks.push(value2.slice(startPosition, endPosition))
column += endPosition - startPosition
}
switch (code) {
case 0: {
chunks.push(65533)
column++
break
}
case 9: {
next = Math.ceil(column / 4) * 4
chunks.push(-2)
while (column++ < next) chunks.push(-1)
break
}
case 10: {
chunks.push(-4)
column = 1
break
}
default: {
atCarriageReturn = true
column = 1
}
}
}
startPosition = endPosition + 1
}
if (end) {
if (atCarriageReturn) chunks.push(-5)
if (buffer2) chunks.push(buffer2)
chunks.push(null)
}
return chunks
}
}
// node_modules/micromark/lib/postprocess.js
function postprocess(events) {
while (!subtokenize(events)) {}
return events
}
// node_modules/micromark-util-decode-numeric-character-reference/index.js
function decodeNumericCharacterReference(value2, base2) {
const code = Number.parseInt(value2, base2)
if (
code < 9 ||
code === 11 ||
(code > 13 && code < 32) ||
(code > 126 && code < 160) ||
(code > 55295 && code < 57344) ||
(code > 64975 && code < 65008) ||
(code & 65535) === 65535 ||
(code & 65535) === 65534 ||
code > 1114111
) {
return "\uFFFD"
}
return String.fromCharCode(code)
}
// node_modules/micromark-util-decode-string/index.js
var characterEscapeOrReference =
/\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi
function decodeString(value2) {
return value2.replace(characterEscapeOrReference, decode)
}
function decode($0, $1, $2) {
if ($1) {
return $1
}
const head = $2.charCodeAt(0)
if (head === 35) {
const head2 = $2.charCodeAt(1)
const hex = head2 === 120 || head2 === 88
return decodeNumericCharacterReference($2.slice(hex ? 2 : 1), hex ? 16 : 10)
}
return decodeEntity($2) || $0
}
// node_modules/unist-util-stringify-position/index.js
var own2 = {}.hasOwnProperty
function stringifyPosition(value2) {
if (!value2 || typeof value2 !== "object") {
return ""
}
if (own2.call(value2, "position") || own2.call(value2, "type")) {
return position(value2.position)
}
if (own2.call(value2, "start") || own2.call(value2, "end")) {
return position(value2)
}
if (own2.call(value2, "line") || own2.call(value2, "column")) {
return point(value2)
}
return ""
}
function point(point2) {
return index(point2 && point2.line) + ":" + index(point2 && point2.column)
}
function position(pos) {
return point(pos && pos.start) + "-" + point(pos && pos.end)
}
function index(value2) {
return value2 && typeof value2 === "number" ? value2 : 1
}
// node_modules/mdast-util-from-markdown/lib/index.js
var own3 = {}.hasOwnProperty
var fromMarkdown = function (value2, encoding, options) {
if (typeof encoding !== "string") {
options = encoding
encoding = void 0
}
return compiler(options)(
postprocess(
parse2(options).document().write(preprocess()(value2, encoding, true)),
),
)
}
function compiler(options = {}) {
const config = configure(
{
transforms: [],
canContainEols: [
"emphasis",
"fragment",
"heading",
"paragraph",
"strong",
],
enter: {
autolink: opener2(link),
autolinkProtocol: onenterdata,
autolinkEmail: onenterdata,
atxHeading: opener2(heading),
blockQuote: opener2(blockQuote2),
characterEscape: onenterdata,
characterReference: onenterdata,
codeFenced: opener2(codeFlow),
codeFencedFenceInfo: buffer2,
codeFencedFenceMeta: buffer2,
codeIndented: opener2(codeFlow, buffer2),
codeText: opener2(codeText2, buffer2),
codeTextData: onenterdata,
data: onenterdata,
codeFlowValue: onenterdata,
definition: opener2(definition2),
definitionDestinationString: buffer2,
definitionLabelString: buffer2,
definitionTitleString: buffer2,
emphasis: opener2(emphasis),
hardBreakEscape: opener2(hardBreak),
hardBreakTrailing: opener2(hardBreak),
htmlFlow: opener2(html, buffer2),
htmlFlowData: onenterdata,
htmlText: opener2(html, buffer2),
htmlTextData: onenterdata,
image: opener2(image),
label: buffer2,
link: opener2(link),
listItem: opener2(listItem),
listItemValue: onenterlistitemvalue,
listOrdered: opener2(list2, onenterlistordered),
listUnordered: opener2(list2),
paragraph: opener2(paragraph),
reference: onenterreference,
referenceString: buffer2,
resourceDestinationString: buffer2,
resourceTitleString: buffer2,
setextHeading: opener2(heading),
strong: opener2(strong),
thematicBreak: opener2(thematicBreak2),
},
exit: {
atxHeading: closer(),
atxHeadingSequence: onexitatxheadingsequence,
autolink: closer(),
autolinkEmail: onexitautolinkemail,
autolinkProtocol: onexitautolinkprotocol,
blockQuote: closer(),
characterEscapeValue: onexitdata,
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
characterReferenceValue: onexitcharacterreferencevalue,
codeFenced: closer(onexitcodefenced),
codeFencedFence: onexitcodefencedfence,
codeFencedFenceInfo: onexitcodefencedfenceinfo,
codeFencedFenceMeta: onexitcodefencedfencemeta,
codeFlowValue: onexitdata,
codeIndented: closer(onexitcodeindented),
codeText: closer(onexitcodetext),
codeTextData: onexitdata,
data: onexitdata,
definition: closer(),
definitionDestinationString: onexitdefinitiondestinationstring,
definitionLabelString: onexitdefinitionlabelstring,
definitionTitleString: onexitdefinitiontitlestring,
emphasis: closer(),
hardBreakEscape: closer(onexithardbreak),
hardBreakTrailing: closer(onexithardbreak),
htmlFlow: closer(onexithtmlflow),
htmlFlowData: onexitdata,
htmlText: closer(onexithtmltext),
htmlTextData: onexitdata,
image: closer(onexitimage),
label: onexitlabel,
labelText: onexitlabeltext,
lineEnding: onexitlineending,
link: closer(onexitlink),
listItem: closer(),
listOrdered: closer(),
listUnordered: closer(),
paragraph: closer(),
referenceString: onexitreferencestring,
resourceDestinationString: onexitresourcedestinationstring,
resourceTitleString: onexitresourcetitlestring,
resource: onexitresource,
setextHeading: closer(onexitsetextheading),
setextHeadingLineSequence: onexitsetextheadinglinesequence,
setextHeadingText: onexitsetextheadingtext,
strong: closer(),
thematicBreak: closer(),
},
},
options.mdastExtensions || [],
)
const data = {}
return compile
function compile(events) {
let tree = {
type: "root",
children: [],
}
const stack = [tree]
const tokenStack = []
const listStack = []
const context = {
stack,
tokenStack,
config,
enter,
exit: exit2,
buffer: buffer2,
resume,
setData,
getData,
}
let index2 = -1
while (++index2 < events.length) {
if (
events[index2][1].type === "listOrdered" ||
events[index2][1].type === "listUnordered"
) {
if (events[index2][0] === "enter") {
listStack.push(index2)
} else {
const tail = listStack.pop()
index2 = prepareList(events, tail, index2)
}
}
}
index2 = -1
while (++index2 < events.length) {
const handler2 = config[events[index2][0]]
if (own3.call(handler2, events[index2][1].type)) {
handler2[events[index2][1].type].call(
Object.assign(
{
sliceSerialize: events[index2][2].sliceSerialize,
},
context,
),
events[index2][1],
)
}
}
if (tokenStack.length > 0) {
throw new Error(
"Cannot close document, a token (`" +
tokenStack[tokenStack.length - 1].type +
"`, " +
stringifyPosition({
start: tokenStack[tokenStack.length - 1].start,
end: tokenStack[tokenStack.length - 1].end,
}) +
") is still open",
)
}
tree.position = {
start: point2(
events.length > 0
? events[0][1].start
: {
line: 1,
column: 1,
offset: 0,
},
),
end: point2(
events.length > 0
? events[events.length - 2][1].end
: {
line: 1,
column: 1,
offset: 0,
},
),
}
index2 = -1
while (++index2 < config.transforms.length) {
tree = config.transforms[index2](tree) || tree
}
return tree
}
function prepareList(events, start, length) {
let index2 = start - 1
let containerBalance = -1
let listSpread = false
let listItem2
let lineIndex
let firstBlankLineIndex
let atMarker
while (++index2 <= length) {
const event = events[index2]
if (
event[1].type === "listUnordered" ||
event[1].type === "listOrdered" ||
event[1].type === "blockQuote"
) {
if (event[0] === "enter") {
containerBalance++
} else {
containerBalance--
}
atMarker = void 0
} else if (event[1].type === "lineEndingBlank") {
if (event[0] === "enter") {
if (
listItem2 &&
!atMarker &&
!containerBalance &&
!firstBlankLineIndex
) {
firstBlankLineIndex = index2
}
atMarker = void 0
}
} else if (
event[1].type === "linePrefix" ||
event[1].type === "listItemValue" ||
event[1].type === "listItemMarker" ||
event[1].type === "listItemPrefix" ||
event[1].type === "listItemPrefixWhitespace"
) {
} else {
atMarker = void 0
}
if (
(!containerBalance &&
event[0] === "enter" &&
event[1].type === "listItemPrefix") ||
(containerBalance === -1 &&
event[0] === "exit" &&
(event[1].type === "listUnordered" ||
event[1].type === "listOrdered"))
) {
if (listItem2) {
let tailIndex = index2
lineIndex = void 0
while (tailIndex--) {
const tailEvent = events[tailIndex]
if (
tailEvent[1].type === "lineEnding" ||
tailEvent[1].type === "lineEndingBlank"
) {
if (tailEvent[0] === "exit") continue
if (lineIndex) {
events[lineIndex][1].type = "lineEndingBlank"
listSpread = true
}
tailEvent[1].type = "lineEnding"
lineIndex = tailIndex
} else if (
tailEvent[1].type === "linePrefix" ||
tailEvent[1].type === "blockQuotePrefix" ||
tailEvent[1].type === "blockQuotePrefixWhitespace" ||
tailEvent[1].type === "blockQuoteMarker" ||
tailEvent[1].type === "listItemIndent"
) {
} else {
break
}
}
if (
firstBlankLineIndex &&
(!lineIndex || firstBlankLineIndex < lineIndex)
) {
listItem2._spread = true
}
listItem2.end = Object.assign(
{},
lineIndex ? events[lineIndex][1].start : event[1].end,
)
events.splice(lineIndex || index2, 0, ["exit", listItem2, event[2]])
index2++
length++
}
if (event[1].type === "listItemPrefix") {
listItem2 = {
type: "listItem",
_spread: false,
start: Object.assign({}, event[1].start),
}
events.splice(index2, 0, ["enter", listItem2, event[2]])
index2++
length++
firstBlankLineIndex = void 0
atMarker = true
}
}
}
events[start][1]._spread = listSpread
return length
}
function setData(key, value2) {
data[key] = value2
}
function getData(key) {
return data[key]
}
function point2(d) {
return {
line: d.line,
column: d.column,
offset: d.offset,
}
}
function opener2(create2, and) {
return open
function open(token) {
enter.call(this, create2(token), token)
if (and) and.call(this, token)
}
}
function buffer2() {
this.stack.push({
type: "fragment",
children: [],
})
}
function enter(node, token) {
const parent = this.stack[this.stack.length - 1]
parent.children.push(node)
this.stack.push(node)
this.tokenStack.push(token)
node.position = {
start: point2(token.start),
}
return node
}
function closer(and) {
return close2
function close2(token) {
if (and) and.call(this, token)
exit2.call(this, token)
}
}
function exit2(token) {
const node = this.stack.pop()
const open = this.tokenStack.pop()
if (!open) {
throw new Error(
"Cannot close `" +
token.type +
"` (" +
stringifyPosition({
start: token.start,
end: token.end,
}) +
"): it\u2019s not open",
)
} else if (open.type !== token.type) {
throw new Error(
"Cannot close `" +
token.type +
"` (" +
stringifyPosition({
start: token.start,
end: token.end,
}) +
"): a different token (`" +
open.type +
"`, " +
stringifyPosition({
start: open.start,
end: open.end,
}) +
") is open",
)
}
node.position.end = point2(token.end)
return node
}
function resume() {
return toString(this.stack.pop())
}
function onenterlistordered() {
setData("expectingFirstListItemValue", true)
}
function onenterlistitemvalue(token) {
if (getData("expectingFirstListItemValue")) {
const ancestor = this.stack[this.stack.length - 2]
ancestor.start = Number.parseInt(this.sliceSerialize(token), 10)
setData("expectingFirstListItemValue")
}
}
function onexitcodefencedfenceinfo() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.lang = data2
}
function onexitcodefencedfencemeta() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.meta = data2
}
function onexitcodefencedfence() {
if (getData("flowCodeInside")) return
this.buffer()
setData("flowCodeInside", true)
}
function onexitcodefenced() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data2.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, "")
setData("flowCodeInside")
}
function onexitcodeindented() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data2.replace(/(\r?\n|\r)$/g, "")
}
function onexitdefinitionlabelstring(token) {
const label = this.resume()
const node = this.stack[this.stack.length - 1]
node.label = label
node.identifier = normalizeIdentifier(
this.sliceSerialize(token),
).toLowerCase()
}
function onexitdefinitiontitlestring() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.title = data2
}
function onexitdefinitiondestinationstring() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.url = data2
}
function onexitatxheadingsequence(token) {
const node = this.stack[this.stack.length - 1]
if (!node.depth) {
const depth = this.sliceSerialize(token).length
node.depth = depth
}
}
function onexitsetextheadingtext() {
setData("setextHeadingSlurpLineEnding", true)
}
function onexitsetextheadinglinesequence(token) {
const node = this.stack[this.stack.length - 1]
node.depth = this.sliceSerialize(token).charCodeAt(0) === 61 ? 1 : 2
}
function onexitsetextheading() {
setData("setextHeadingSlurpLineEnding")
}
function onenterdata(token) {
const parent = this.stack[this.stack.length - 1]
let tail = parent.children[parent.children.length - 1]
if (!tail || tail.type !== "text") {
tail = text3()
tail.position = {
start: point2(token.start),
}
parent.children.push(tail)
}
this.stack.push(tail)
}
function onexitdata(token) {
const tail = this.stack.pop()
tail.value += this.sliceSerialize(token)
tail.position.end = point2(token.end)
}
function onexitlineending(token) {
const context = this.stack[this.stack.length - 1]
if (getData("atHardBreak")) {
const tail = context.children[context.children.length - 1]
tail.position.end = point2(token.end)
setData("atHardBreak")
return
}
if (
!getData("setextHeadingSlurpLineEnding") &&
config.canContainEols.includes(context.type)
) {
onenterdata.call(this, token)
onexitdata.call(this, token)
}
}
function onexithardbreak() {
setData("atHardBreak", true)
}
function onexithtmlflow() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data2
}
function onexithtmltext() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data2
}
function onexitcodetext() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.value = data2
}
function onexitlink() {
const context = this.stack[this.stack.length - 1]
if (getData("inReference")) {
context.type += "Reference"
context.referenceType = getData("referenceType") || "shortcut"
delete context.url
delete context.title
} else {
delete context.identifier
delete context.label
}
setData("referenceType")
}
function onexitimage() {
const context = this.stack[this.stack.length - 1]
if (getData("inReference")) {
context.type += "Reference"
context.referenceType = getData("referenceType") || "shortcut"
delete context.url
delete context.title
} else {
delete context.identifier
delete context.label
}
setData("referenceType")
}
function onexitlabeltext(token) {
const ancestor = this.stack[this.stack.length - 2]
const string3 = this.sliceSerialize(token)
ancestor.label = decodeString(string3)
ancestor.identifier = normalizeIdentifier(string3).toLowerCase()
}
function onexitlabel() {
const fragment = this.stack[this.stack.length - 1]
const value2 = this.resume()
const node = this.stack[this.stack.length - 1]
setData("inReference", true)
if (node.type === "link") {
node.children = fragment.children
} else {
node.alt = value2
}
}
function onexitresourcedestinationstring() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.url = data2
}
function onexitresourcetitlestring() {
const data2 = this.resume()
const node = this.stack[this.stack.length - 1]
node.title = data2
}
function onexitresource() {
setData("inReference")
}
function onenterreference() {
setData("referenceType", "collapsed")
}
function onexitreferencestring(token) {
const label = this.resume()
const node = this.stack[this.stack.length - 1]
node.label = label
node.identifier = normalizeIdentifier(
this.sliceSerialize(token),
).toLowerCase()
setData("referenceType", "full")
}
function onexitcharacterreferencemarker(token) {
setData("characterReferenceType", token.type)
}
function onexitcharacterreferencevalue(token) {
const data2 = this.sliceSerialize(token)
const type = getData("characterReferenceType")
let value2
if (type) {
value2 = decodeNumericCharacterReference(
data2,
type === "characterReferenceMarkerNumeric" ? 10 : 16,
)
setData("characterReferenceType")
} else {
value2 = decodeEntity(data2)
}
const tail = this.stack.pop()
tail.value += value2
tail.position.end = point2(token.end)
}
function onexitautolinkprotocol(token) {
onexitdata.call(this, token)
const node = this.stack[this.stack.length - 1]
node.url = this.sliceSerialize(token)
}
function onexitautolinkemail(token) {
onexitdata.call(this, token)
const node = this.stack[this.stack.length - 1]
node.url = "mailto:" + this.sliceSerialize(token)
}
function blockQuote2() {
return {
type: "blockquote",
children: [],
}
}
function codeFlow() {
return {
type: "code",
lang: null,
meta: null,
value: "",
}
}
function codeText2() {
return {
type: "inlineCode",
value: "",
}
}
function definition2() {
return {
type: "definition",
identifier: "",
label: null,
title: null,
url: "",
}
}
function emphasis() {
return {
type: "emphasis",
children: [],
}
}
function heading() {
return {
type: "heading",
depth: void 0,
children: [],
}
}
function hardBreak() {
return {
type: "break",
}
}
function html() {
return {
type: "html",
value: "",
}
}
function image() {
return {
type: "image",
title: null,
url: "",
alt: null,
}
}
function link() {
return {
type: "link",
title: null,
url: "",
children: [],
}
}
function list2(token) {
return {
type: "list",
ordered: token.type === "listOrdered",
start: null,
spread: token._spread,
children: [],
}
}
function listItem(token) {
return {
type: "listItem",
spread: token._spread,
checked: null,
children: [],
}
}
function paragraph() {
return {
type: "paragraph",
children: [],
}
}
function strong() {
return {
type: "strong",
children: [],
}
}
function text3() {
return {
type: "text",
value: "",
}
}
function thematicBreak2() {
return {
type: "thematicBreak",
}
}
}
function configure(combined, extensions) {
let index2 = -1
while (++index2 < extensions.length) {
const value2 = extensions[index2]
if (Array.isArray(value2)) {
configure(combined, value2)
} else {
extension(combined, value2)
}
}
return combined
}
function extension(combined, extension2) {
let key
for (key in extension2) {
if (own3.call(extension2, key)) {
const list2 = key === "canContainEols" || key === "transforms"
const maybe = own3.call(combined, key) ? combined[key] : void 0
const left = maybe || (combined[key] = list2 ? [] : {})
const right = extension2[key]
if (right) {
if (list2) {
combined[key] = [...left, ...right]
} else {
Object.assign(left, right)
}
}
}
}
}
// node_modules/remark-parse/lib/index.js
function remarkParse(options) {
const parser = (doc) => {
const settings = this.data("settings")
return fromMarkdown(
doc,
Object.assign({}, settings, options, {
extensions: this.data("micromarkExtensions") || [],
mdastExtensions: this.data("fromMarkdownExtensions") || [],
}),
)
}
Object.assign(this, { Parser: parser })
}
// node_modules/remark-parse/index.js
var remark_parse_default = remarkParse
// node_modules/bail/index.js
function bail(error) {
if (error) {
throw error
}
}
// node_modules/unified/lib/index.js
var import_is_buffer2 = __toModule(require_is_buffer())
var import_extend = __toModule(require_extend())
// node_modules/is-plain-obj/index.js
function isPlainObject(value2) {
if (Object.prototype.toString.call(value2) !== "[object Object]") {
return false
}
const prototype = Object.getPrototypeOf(value2)
return prototype === null || prototype === Object.prototype
}
// node_modules/trough/index.js
function trough() {
const fns = []
const pipeline = { run, use }
return pipeline
function run(...values) {
let middlewareIndex = -1
const callback = values.pop()
if (typeof callback !== "function") {
throw new TypeError("Expected function as last argument, not " + callback)
}
next(null, ...values)
function next(error, ...output) {
const fn = fns[++middlewareIndex]
let index2 = -1
if (error) {
callback(error)
return
}
while (++index2 < values.length) {
if (output[index2] === null || output[index2] === void 0) {
output[index2] = values[index2]
}
}
values = output
if (fn) {
wrap(fn, next)(...output)
} else {
callback(null, ...output)
}
}
}
function use(middelware) {
if (typeof middelware !== "function") {
throw new TypeError(
"Expected `middelware` to be a function, not " + middelware,
)
}
fns.push(middelware)
return pipeline
}
}
function wrap(middleware, callback) {
let called
return wrapped
function wrapped(...parameters) {
const fnExpectsCallback = middleware.length > parameters.length
let result
if (fnExpectsCallback) {
parameters.push(done)
}
try {
result = middleware(...parameters)
} catch (error) {
const exception = error
if (fnExpectsCallback && called) {
throw exception
}
return done(exception)
}
if (!fnExpectsCallback) {
if (result instanceof Promise) {
result.then(then, done)
} else if (result instanceof Error) {
done(result)
} else {
then(result)
}
}
}
function done(error, ...output) {
if (!called) {
called = true
callback(error, ...output)
}
}
function then(value2) {
done(null, value2)
}
}
// node_modules/vfile/lib/index.js
var import_is_buffer = __toModule(require_is_buffer())
// node_modules/vfile-message/index.js
var VFileMessage = class extends Error {
constructor(reason, place, origin) {
var parts = [null, null]
var position2 = {
start: { line: null, column: null },
end: { line: null, column: null },
}
var index2
super()
if (typeof place === "string") {
origin = place
place = null
}
if (typeof origin === "string") {
index2 = origin.indexOf(":")
if (index2 === -1) {
parts[1] = origin
} else {
parts[0] = origin.slice(0, index2)
parts[1] = origin.slice(index2 + 1)
}
}
if (place) {
if ("type" in place || "position" in place) {
if (place.position) {
position2 = place.position
}
} else if ("start" in place || "end" in place) {
position2 = place
} else if ("line" in place || "column" in place) {
position2.start = place
}
}
this.name = stringifyPosition(place) || "1:1"
this.message = typeof reason === "object" ? reason.message : reason
this.stack = typeof reason === "object" ? reason.stack : ""
this.reason = this.message
this.line = position2.start.line
this.column = position2.start.column
this.source = parts[0]
this.ruleId = parts[1]
this.position = position2
this.file
this.fatal
this.url
this.note
}
}
VFileMessage.prototype.file = ""
VFileMessage.prototype.name = ""
VFileMessage.prototype.reason = ""
VFileMessage.prototype.message = ""
VFileMessage.prototype.stack = ""
VFileMessage.prototype.fatal = null
VFileMessage.prototype.column = null
VFileMessage.prototype.line = null
VFileMessage.prototype.source = null
VFileMessage.prototype.ruleId = null
VFileMessage.prototype.position = null
// node_modules/vfile/lib/minpath.browser.js
var path = { basename, dirname, extname, join, sep: "/" }
function basename(path2, ext) {
if (ext !== void 0 && typeof ext !== "string") {
throw new TypeError('"ext" argument must be a string')
}
assertPath(path2)
let start = 0
let end = -1
let index2 = path2.length
let seenNonSlash
if (ext === void 0 || ext.length === 0 || ext.length > path2.length) {
while (index2--) {
if (path2.charCodeAt(index2) === 47) {
if (seenNonSlash) {
start = index2 + 1
break
}
} else if (end < 0) {
seenNonSlash = true
end = index2 + 1
}
}
return end < 0 ? "" : path2.slice(start, end)
}
if (ext === path2) {
return ""
}
let firstNonSlashEnd = -1
let extIndex = ext.length - 1
while (index2--) {
if (path2.charCodeAt(index2) === 47) {
if (seenNonSlash) {
start = index2 + 1
break
}
} else {
if (firstNonSlashEnd < 0) {
seenNonSlash = true
firstNonSlashEnd = index2 + 1
}
if (extIndex > -1) {
if (path2.charCodeAt(index2) === ext.charCodeAt(extIndex--)) {
if (extIndex < 0) {
end = index2
}
} else {
extIndex = -1
end = firstNonSlashEnd
}
}
}
}
if (start === end) {
end = firstNonSlashEnd
} else if (end < 0) {
end = path2.length
}
return path2.slice(start, end)
}
function dirname(path2) {
assertPath(path2)
if (path2.length === 0) {
return "."
}
let end = -1
let index2 = path2.length
let unmatchedSlash
while (--index2) {
if (path2.charCodeAt(index2) === 47) {
if (unmatchedSlash) {
end = index2
break
}
} else if (!unmatchedSlash) {
unmatchedSlash = true
}
}
return end < 0
? path2.charCodeAt(0) === 47
? "/"
: "."
: end === 1 && path2.charCodeAt(0) === 47
? "//"
: path2.slice(0, end)
}
function extname(path2) {
assertPath(path2)
let index2 = path2.length
let end = -1
let startPart = 0
let startDot = -1
let preDotState = 0
let unmatchedSlash
while (index2--) {
const code = path2.charCodeAt(index2)
if (code === 47) {
if (unmatchedSlash) {
startPart = index2 + 1
break
}
continue
}
if (end < 0) {
unmatchedSlash = true
end = index2 + 1
}
if (code === 46) {
if (startDot < 0) {
startDot = index2
} else if (preDotState !== 1) {
preDotState = 1
}
} else if (startDot > -1) {
preDotState = -1
}
}
if (
startDot < 0 ||
end < 0 ||
preDotState === 0 ||
(preDotState === 1 && startDot === end - 1 && startDot === startPart + 1)
) {
return ""
}
return path2.slice(startDot, end)
}
function join(...segments) {
let index2 = -1
let joined
while (++index2 < segments.length) {
assertPath(segments[index2])
if (segments[index2]) {
joined =
joined === void 0 ? segments[index2] : joined + "/" + segments[index2]
}
}
return joined === void 0 ? "." : normalize(joined)
}
function normalize(path2) {
assertPath(path2)
const absolute = path2.charCodeAt(0) === 47
let value2 = normalizeString(path2, !absolute)
if (value2.length === 0 && !absolute) {
value2 = "."
}
if (value2.length > 0 && path2.charCodeAt(path2.length - 1) === 47) {
value2 += "/"
}
return absolute ? "/" + value2 : value2
}
function normalizeString(path2, allowAboveRoot) {
let result = ""
let lastSegmentLength = 0
let lastSlash = -1
let dots = 0
let index2 = -1
let code
let lastSlashIndex
while (++index2 <= path2.length) {
if (index2 < path2.length) {
code = path2.charCodeAt(index2)
} else if (code === 47) {
break
} else {
code = 47
}
if (code === 47) {
if (lastSlash === index2 - 1 || dots === 1) {
} else if (lastSlash !== index2 - 1 && dots === 2) {
if (
result.length < 2 ||
lastSegmentLength !== 2 ||
result.charCodeAt(result.length - 1) !== 46 ||
result.charCodeAt(result.length - 2) !== 46
) {
if (result.length > 2) {
lastSlashIndex = result.lastIndexOf("/")
if (lastSlashIndex !== result.length - 1) {
if (lastSlashIndex < 0) {
result = ""
lastSegmentLength = 0
} else {
result = result.slice(0, lastSlashIndex)
lastSegmentLength = result.length - 1 - result.lastIndexOf("/")
}
lastSlash = index2
dots = 0
continue
}
} else if (result.length > 0) {
result = ""
lastSegmentLength = 0
lastSlash = index2
dots = 0
continue
}
}
if (allowAboveRoot) {
result = result.length > 0 ? result + "/.." : ".."
lastSegmentLength = 2
}
} else {
if (result.length > 0) {
result += "/" + path2.slice(lastSlash + 1, index2)
} else {
result = path2.slice(lastSlash + 1, index2)
}
lastSegmentLength = index2 - lastSlash - 1
}
lastSlash = index2
dots = 0
} else if (code === 46 && dots > -1) {
dots++
} else {
dots = -1
}
}
return result
}
function assertPath(path2) {
if (typeof path2 !== "string") {
throw new TypeError(
"Path must be a string. Received " + JSON.stringify(path2),
)
}
}
// node_modules/vfile/lib/minproc.browser.js
var proc = { cwd }
function cwd() {
return "/"
}
// node_modules/vfile/lib/minurl.shared.js
function isUrl(fileURLOrPath) {
return (
fileURLOrPath !== null &&
typeof fileURLOrPath === "object" &&
fileURLOrPath.href &&
fileURLOrPath.origin
)
}
// node_modules/vfile/lib/minurl.browser.js
function urlToPath(path2) {
if (typeof path2 === "string") {
path2 = new URL(path2)
} else if (!isUrl(path2)) {
const error = new TypeError(
'The "path" argument must be of type string or an instance of URL. Received `' +
path2 +
"`",
)
error.code = "ERR_INVALID_ARG_TYPE"
throw error
}
if (path2.protocol !== "file:") {
const error = new TypeError("The URL must be of scheme file")
error.code = "ERR_INVALID_URL_SCHEME"
throw error
}
return getPathFromURLPosix(path2)
}
function getPathFromURLPosix(url) {
if (url.hostname !== "") {
const error = new TypeError(
'File URL host must be "localhost" or empty on darwin',
)
error.code = "ERR_INVALID_FILE_URL_HOST"
throw error
}
const pathname = url.pathname
let index2 = -1
while (++index2 < pathname.length) {
if (
pathname.charCodeAt(index2) === 37 &&
pathname.charCodeAt(index2 + 1) === 50
) {
const third = pathname.charCodeAt(index2 + 2)
if (third === 70 || third === 102) {
const error = new TypeError(
"File URL path must not include encoded / characters",
)
error.code = "ERR_INVALID_FILE_URL_PATH"
throw error
}
}
}
return decodeURIComponent(pathname)
}
// node_modules/vfile/lib/index.js
var order = ["history", "path", "basename", "stem", "extname", "dirname"]
var VFile = class {
constructor(value2) {
let options
if (!value2) {
options = {}
} else if (
typeof value2 === "string" ||
(0, import_is_buffer.default)(value2)
) {
options = { value: value2 }
} else if (isUrl(value2)) {
options = { path: value2 }
} else {
options = value2
}
this.data = {}
this.messages = []
this.history = []
this.cwd = proc.cwd()
this.value
this.stored
this.result
this.map
let index2 = -1
while (++index2 < order.length) {
const prop2 = order[index2]
if (prop2 in options && options[prop2] !== void 0) {
this[prop2] = prop2 === "history" ? [...options[prop2]] : options[prop2]
}
}
let prop
for (prop in options) {
if (!order.includes(prop)) this[prop] = options[prop]
}
}
get path() {
return this.history[this.history.length - 1]
}
set path(path2) {
if (isUrl(path2)) {
path2 = urlToPath(path2)
}
assertNonEmpty(path2, "path")
if (this.path !== path2) {
this.history.push(path2)
}
}
get dirname() {
return typeof this.path === "string" ? path.dirname(this.path) : void 0
}
set dirname(dirname2) {
assertPath2(this.basename, "dirname")
this.path = path.join(dirname2 || "", this.basename)
}
get basename() {
return typeof this.path === "string" ? path.basename(this.path) : void 0
}
set basename(basename2) {
assertNonEmpty(basename2, "basename")
assertPart(basename2, "basename")
this.path = path.join(this.dirname || "", basename2)
}
get extname() {
return typeof this.path === "string" ? path.extname(this.path) : void 0
}
set extname(extname2) {
assertPart(extname2, "extname")
assertPath2(this.dirname, "extname")
if (extname2) {
if (extname2.charCodeAt(0) !== 46) {
throw new Error("`extname` must start with `.`")
}
if (extname2.includes(".", 1)) {
throw new Error("`extname` cannot contain multiple dots")
}
}
this.path = path.join(this.dirname, this.stem + (extname2 || ""))
}
get stem() {
return typeof this.path === "string"
? path.basename(this.path, this.extname)
: void 0
}
set stem(stem) {
assertNonEmpty(stem, "stem")
assertPart(stem, "stem")
this.path = path.join(this.dirname || "", stem + (this.extname || ""))
}
toString(encoding) {
return (this.value || "").toString(encoding)
}
message(reason, place, origin) {
const message = new VFileMessage(reason, place, origin)
if (this.path) {
message.name = this.path + ":" + message.name
message.file = this.path
}
message.fatal = false
this.messages.push(message)
return message
}
info(reason, place, origin) {
const message = this.message(reason, place, origin)
message.fatal = null
return message
}
fail(reason, place, origin) {
const message = this.message(reason, place, origin)
message.fatal = true
throw message
}
}
function assertPart(part, name) {
if (part && part.includes(path.sep)) {
throw new Error(
"`" + name + "` cannot be a path: did not expect `" + path.sep + "`",
)
}
}
function assertNonEmpty(part, name) {
if (!part) {
throw new Error("`" + name + "` cannot be empty")
}
}
function assertPath2(path2, name) {
if (!path2) {
throw new Error("Setting `" + name + "` requires `path` to be set too")
}
}
// node_modules/unified/lib/index.js
var unified = base().freeze()
var own4 = {}.hasOwnProperty
function base() {
const transformers = trough()
const attachers = []
let namespace = {}
let frozen
let freezeIndex = -1
processor.data = data
processor.Parser = void 0
processor.Compiler = void 0
processor.freeze = freeze
processor.attachers = attachers
processor.use = use
processor.parse = parse3
processor.stringify = stringify
processor.run = run
processor.runSync = runSync
processor.process = process
processor.processSync = processSync
return processor
function processor() {
const destination = base()
let index2 = -1
while (++index2 < attachers.length) {
destination.use(...attachers[index2])
}
destination.data((0, import_extend.default)(true, {}, namespace))
return destination
}
function data(key, value2) {
if (typeof key === "string") {
if (arguments.length === 2) {
assertUnfrozen("data", frozen)
namespace[key] = value2
return processor
}
return (own4.call(namespace, key) && namespace[key]) || null
}
if (key) {
assertUnfrozen("data", frozen)
namespace = key
return processor
}
return namespace
}
function freeze() {
if (frozen) {
return processor
}
while (++freezeIndex < attachers.length) {
const [attacher, ...options] = attachers[freezeIndex]
if (options[0] === false) {
continue
}
if (options[0] === true) {
options[1] = void 0
}
const transformer = attacher.call(processor, ...options)
if (typeof transformer === "function") {
transformers.use(transformer)
}
}
frozen = true
freezeIndex = Number.POSITIVE_INFINITY
return processor
}
function use(value2, ...options) {
let settings
assertUnfrozen("use", frozen)
if (value2 === null || value2 === void 0) {
} else if (typeof value2 === "function") {
addPlugin(value2, ...options)
} else if (typeof value2 === "object") {
if (Array.isArray(value2)) {
addList(value2)
} else {
addPreset(value2)
}
} else {
throw new TypeError("Expected usable value, not `" + value2 + "`")
}
if (settings) {
namespace.settings = Object.assign(namespace.settings || {}, settings)
}
return processor
function add(value3) {
if (typeof value3 === "function") {
addPlugin(value3)
} else if (typeof value3 === "object") {
if (Array.isArray(value3)) {
const [plugin, ...options2] = value3
addPlugin(plugin, ...options2)
} else {
addPreset(value3)
}
} else {
throw new TypeError("Expected usable value, not `" + value3 + "`")
}
}
function addPreset(result) {
addList(result.plugins)
if (result.settings) {
settings = Object.assign(settings || {}, result.settings)
}
}
function addList(plugins) {
let index2 = -1
if (plugins === null || plugins === void 0) {
} else if (Array.isArray(plugins)) {
while (++index2 < plugins.length) {
const thing = plugins[index2]
add(thing)
}
} else {
throw new TypeError("Expected a list of plugins, not `" + plugins + "`")
}
}
function addPlugin(plugin, value3) {
let index2 = -1
let entry
while (++index2 < attachers.length) {
if (attachers[index2][0] === plugin) {
entry = attachers[index2]
break
}
}
if (entry) {
if (isPlainObject(entry[1]) && isPlainObject(value3)) {
value3 = (0, import_extend.default)(true, entry[1], value3)
}
entry[1] = value3
} else {
attachers.push([...arguments])
}
}
}
function parse3(doc) {
processor.freeze()
const file = vfile(doc)
const Parser = processor.Parser
assertParser("parse", Parser)
if (newable(Parser, "parse")) {
return new Parser(String(file), file).parse()
}
return Parser(String(file), file)
}
function stringify(node, doc) {
processor.freeze()
const file = vfile(doc)
const Compiler = processor.Compiler
assertCompiler("stringify", Compiler)
assertNode(node)
if (newable(Compiler, "compile")) {
return new Compiler(node, file).compile()
}
return Compiler(node, file)
}
function run(node, doc, callback) {
assertNode(node)
processor.freeze()
if (!callback && typeof doc === "function") {
callback = doc
doc = void 0
}
if (!callback) {
return new Promise(executor)
}
executor(null, callback)
function executor(resolve, reject) {
transformers.run(node, vfile(doc), done)
function done(error, tree, file) {
tree = tree || node
if (error) {
reject(error)
} else if (resolve) {
resolve(tree)
} else {
callback(null, tree, file)
}
}
}
}
function runSync(node, file) {
let result
let complete
processor.run(node, file, done)
assertDone("runSync", "run", complete)
return result
function done(error, tree) {
bail(error)
result = tree
complete = true
}
}
function process(doc, callback) {
processor.freeze()
assertParser("process", processor.Parser)
assertCompiler("process", processor.Compiler)
if (!callback) {
return new Promise(executor)
}
executor(null, callback)
function executor(resolve, reject) {
const file = vfile(doc)
processor.run(processor.parse(file), file, (error, tree, file2) => {
if (error || !tree || !file2) {
done(error)
} else {
const result = processor.stringify(tree, file2)
if (result === void 0 || result === null) {
} else if (looksLikeAVFileValue(result)) {
file2.value = result
} else {
file2.result = result
}
done(error, file2)
}
})
function done(error, file2) {
if (error || !file2) {
reject(error)
} else if (resolve) {
resolve(file2)
} else {
callback(null, file2)
}
}
}
}
function processSync(doc) {
let complete
processor.freeze()
assertParser("processSync", processor.Parser)
assertCompiler("processSync", processor.Compiler)
const file = vfile(doc)
processor.process(file, done)
assertDone("processSync", "process", complete)
return file
function done(error) {
complete = true
bail(error)
}
}
}
function newable(value2, name) {
return (
typeof value2 === "function" &&
value2.prototype &&
(keys(value2.prototype) || name in value2.prototype)
)
}
function keys(value2) {
let key
for (key in value2) {
if (own4.call(value2, key)) {
return true
}
}
return false
}
function assertParser(name, value2) {
if (typeof value2 !== "function") {
throw new TypeError("Cannot `" + name + "` without `Parser`")
}
}
function assertCompiler(name, value2) {
if (typeof value2 !== "function") {
throw new TypeError("Cannot `" + name + "` without `Compiler`")
}
}
function assertUnfrozen(name, frozen) {
if (frozen) {
throw new Error(
"Cannot call `" +
name +
"` on a frozen processor.\nCreate a new processor first, by calling it: use `processor()` instead of `processor`.",
)
}
}
function assertNode(node) {
if (!isPlainObject(node) || typeof node.type !== "string") {
throw new TypeError("Expected node, got `" + node + "`")
}
}
function assertDone(name, asyncName, complete) {
if (!complete) {
throw new Error(
"`" + name + "` finished async. Use `" + asyncName + "` instead",
)
}
}
function vfile(value2) {
return looksLikeAVFile(value2) ? value2 : new VFile(value2)
}
function looksLikeAVFile(value2) {
return Boolean(
value2 &&
typeof value2 === "object" &&
"message" in value2 &&
"messages" in value2,
)
}
function looksLikeAVFileValue(value2) {
return typeof value2 === "string" || (0, import_is_buffer2.default)(value2)
}
// node_modules/annotatedtext-remark/out/index.js
var defaults2 = {
children(node) {
return defaults.children(node)
},
annotatetextnode(node, text3) {
return defaults.annotatetextnode(node, text3)
},
interpretmarkup(text3 = "") {
return "\n".repeat((text3.match(/\n/g) || []).length)
},
remarkoptions: {},
}
function build2(text3, options = defaults2) {
const processor = unified()
.use(remark_parse_default, options.remarkoptions)
.use(remarkFrontmatter, ["yaml", "toml"])
return build(text3, processor.parse, options)
}
// lib/prepareMarkdown.mjs
var prepareMarkdown = (text3) => JSON.stringify(build2(text3))
var prepareMarkdown_default = prepareMarkdown
/*!
* Determine if an object is a Buffer
*
* @author Feross Aboukhadijeh
* @license MIT
*/
================================================
FILE: src/utils/stripStyles.js
================================================
/**
* Strips terminal styles from string
* (colors, weight etc.)
*
* @param {string} text any string
*/
const stripStyles = (text) => {
// eslint-disable-next-line no-control-regex
const regex = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g
return text.replace(regex, "")
}
module.exports = stripStyles
================================================
FILE: src/utils/stripStyles.test.js
================================================
const kleur = require("kleur")
const stripStyles = require("./stripStyles")
describe("Strips styles from console string", () => {
it("strips colors", () => {
const input = kleur.red("foo")
const expected = "foo"
const result = stripStyles(input)
expect(result).toEqual(expected)
})
it("strips background colors", () => {
const input = kleur.bgRed("foo")
const expected = "foo"
const result = stripStyles(input)
expect(result).toEqual(expected)
})
it("strips font style", () => {
const input = kleur
.bold()
.italic()
.underline()
.strikethrough()
.dim("foo")
const expected = "foo"
const result = stripStyles(input)
expect(result).toEqual(expected)
})
})
================================================
FILE: src/utils/unzipFile.js
================================================
const decompress = require("decompress")
const decompressUnzip = require("decompress-unzip")
const unzipFile = (pathToFile, outputFolder) => {
return decompress(pathToFile, outputFolder, {
plugins: [decompressUnzip()],
})
}
module.exports = unzipFile
================================================
FILE: src/validators/languages.js
================================================
const languages = require("../../data/languages.json")
const languageOptions = [
"config",
"auto",
...languages.map((language) => language.longCode),
]
const isLanguage = (value) => {
return languageOptions.includes(value)
}
module.exports = {
languageOptions,
isLanguage,
}
================================================
FILE: src/validators/rules.js
================================================
const rules = require("../../data/rules.json")
const ruleOptions = rules.map((rule) => rule.id.toLowerCase())
const isRule = (value) => {
return ruleOptions.includes(value)
}
module.exports = {
ruleOptions,
isRule,
}
================================================
FILE: tsconfig.json
================================================
{
"include": ["src/index.js"],
"compilerOptions": {
"allowJs": true,
"declaration": true,
"emitDeclarationOnly": true,
"outDir": "src/types"
}
}