Added the files for this package.
This commit is contained in:
parent
b6e2272c4e
commit
6c65f91d73
28 changed files with 10913 additions and 1 deletions
13
.editorconfig
Normal file
13
.editorconfig
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 2
|
||||||
|
charset = utf-8
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[*.json]
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
|
[*.yml]
|
||||||
|
indent_style = space
|
8
.eslintignore
Normal file
8
.eslintignore
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
node_modules
|
||||||
|
/built
|
||||||
|
/coverage
|
||||||
|
/.eslintrc.js
|
||||||
|
/jest.config.ts
|
||||||
|
parser.js
|
||||||
|
/test
|
||||||
|
/test-d
|
63
.eslintrc.js
Normal file
63
.eslintrc.js
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
module.exports = {
|
||||||
|
root: true,
|
||||||
|
parser: '@typescript-eslint/parser',
|
||||||
|
parserOptions: {
|
||||||
|
tsconfigRootDir: __dirname,
|
||||||
|
project: ['./tsconfig.json'],
|
||||||
|
},
|
||||||
|
plugins: [
|
||||||
|
'@typescript-eslint',
|
||||||
|
],
|
||||||
|
extends: [
|
||||||
|
'eslint:recommended',
|
||||||
|
'plugin:@typescript-eslint/recommended',
|
||||||
|
],
|
||||||
|
rules: {
|
||||||
|
'indent': ['error', 'tab', {
|
||||||
|
'SwitchCase': 1,
|
||||||
|
'MemberExpression': 'off',
|
||||||
|
'flatTernaryExpressions': true,
|
||||||
|
'ArrayExpression': 'first',
|
||||||
|
'ObjectExpression': 'first',
|
||||||
|
}],
|
||||||
|
'eol-last': ['error', 'always'],
|
||||||
|
'semi': ['error', 'always'],
|
||||||
|
'quotes': ['error', 'single'],
|
||||||
|
'comma-dangle': ['error', 'always-multiline'],
|
||||||
|
'keyword-spacing': ['error', {
|
||||||
|
'before': true,
|
||||||
|
'after': true,
|
||||||
|
}],
|
||||||
|
'key-spacing': ['error', {
|
||||||
|
'beforeColon': false,
|
||||||
|
'afterColon': true,
|
||||||
|
}],
|
||||||
|
'space-infix-ops': ['error'],
|
||||||
|
'space-before-blocks': ['error', 'always'],
|
||||||
|
'object-curly-spacing': ['error', 'always'],
|
||||||
|
'nonblock-statement-body-position': ['error', 'beside'],
|
||||||
|
'eqeqeq': ['error', 'always', { 'null': 'ignore' }],
|
||||||
|
'no-multiple-empty-lines': ['error', { 'max': 1 }],
|
||||||
|
'no-multi-spaces': ['error'],
|
||||||
|
'no-var': ['error'],
|
||||||
|
'prefer-arrow-callback': ['error'],
|
||||||
|
'no-throw-literal': ['error'],
|
||||||
|
'no-param-reassign': ['warn'],
|
||||||
|
'no-constant-condition': ['warn', {
|
||||||
|
checkLoops: false,
|
||||||
|
}],
|
||||||
|
'no-empty-pattern': ['warn'],
|
||||||
|
'@typescript-eslint/no-unnecessary-condition': ['warn', {
|
||||||
|
allowConstantLoopConditions: true,
|
||||||
|
}],
|
||||||
|
'@typescript-eslint/no-inferrable-types': ['warn'],
|
||||||
|
'@typescript-eslint/no-non-null-assertion': ['warn'],
|
||||||
|
'@typescript-eslint/explicit-function-return-type': ['warn'],
|
||||||
|
'@typescript-eslint/no-misused-promises': ['error', {
|
||||||
|
'checksVoidReturn': false,
|
||||||
|
}],
|
||||||
|
'@typescript-eslint/no-unused-vars': ['error', {
|
||||||
|
"argsIgnorePattern": "^_",
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
};
|
13
.gitignore
vendored
Normal file
13
.gitignore
vendored
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# npm
|
||||||
|
node_modules
|
||||||
|
|
||||||
|
# editor
|
||||||
|
.vscode
|
||||||
|
|
||||||
|
# app dir
|
||||||
|
built
|
||||||
|
temp
|
||||||
|
|
||||||
|
coverage
|
||||||
|
|
||||||
|
src/internal/parser.js
|
16
CHANGELOG.md
Normal file
16
CHANGELOG.md
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
<!--
|
||||||
|
## 0.x.x (unreleased)
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
### Improvements
|
||||||
|
|
||||||
|
### Changes
|
||||||
|
|
||||||
|
### Bugfixes
|
||||||
|
|
||||||
|
-->
|
||||||
|
|
||||||
|
## 0.24.0
|
||||||
|
### Features
|
||||||
|
- Supports Unicode 15.0 emoji
|
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
|
@ -0,0 +1,128 @@
|
||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
|
nationality, personal appearance, race, religion, or sexual identity
|
||||||
|
and orientation.
|
||||||
|
|
||||||
|
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||||
|
diverse, inclusive, and healthy community.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to a positive environment for our
|
||||||
|
community include:
|
||||||
|
|
||||||
|
* Demonstrating empathy and kindness toward other people
|
||||||
|
* Being respectful of differing opinions, viewpoints, and experiences
|
||||||
|
* Giving and gracefully accepting constructive feedback
|
||||||
|
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
|
and learning from the experience
|
||||||
|
* Focusing on what is best not just for us as individuals, but for the
|
||||||
|
overall community
|
||||||
|
|
||||||
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery, and sexual attention or
|
||||||
|
advances of any kind
|
||||||
|
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or email
|
||||||
|
address, without their explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Enforcement Responsibilities
|
||||||
|
|
||||||
|
Community leaders are responsible for clarifying and enforcing our standards of
|
||||||
|
acceptable behavior and will take appropriate and fair corrective action in
|
||||||
|
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||||
|
or harmful.
|
||||||
|
|
||||||
|
Community leaders have the right and responsibility to remove, edit, or reject
|
||||||
|
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||||
|
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||||
|
decisions when appropriate.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies within all community spaces, and also applies when
|
||||||
|
an individual is officially representing the community in public spaces.
|
||||||
|
Examples of representing our community include using an official e-mail address,
|
||||||
|
posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported to the community leaders responsible for enforcement at
|
||||||
|
abuse@dragonschildstudios.com.
|
||||||
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
reporter of any incident.
|
||||||
|
|
||||||
|
## Enforcement Guidelines
|
||||||
|
|
||||||
|
Community leaders will follow these Community Impact Guidelines in determining
|
||||||
|
the consequences for any action they deem in violation of this Code of Conduct:
|
||||||
|
|
||||||
|
### 1. Correction
|
||||||
|
|
||||||
|
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||||
|
unprofessional or unwelcome in the community.
|
||||||
|
|
||||||
|
**Consequence**: A private, written warning from community leaders, providing
|
||||||
|
clarity around the nature of the violation and an explanation of why the
|
||||||
|
behavior was inappropriate. A public apology may be requested.
|
||||||
|
|
||||||
|
### 2. Warning
|
||||||
|
|
||||||
|
**Community Impact**: A violation through a single incident or series
|
||||||
|
of actions.
|
||||||
|
|
||||||
|
**Consequence**: A warning with consequences for continued behavior. No
|
||||||
|
interaction with the people involved, including unsolicited interaction with
|
||||||
|
those enforcing the Code of Conduct, for a specified period of time. This
|
||||||
|
includes avoiding interactions in community spaces as well as external channels
|
||||||
|
like social media. Violating these terms may lead to a temporary or
|
||||||
|
permanent ban.
|
||||||
|
|
||||||
|
### 3. Temporary Ban
|
||||||
|
|
||||||
|
**Community Impact**: A serious violation of community standards, including
|
||||||
|
sustained inappropriate behavior.
|
||||||
|
|
||||||
|
**Consequence**: A temporary ban from any sort of interaction or public
|
||||||
|
communication with the community for a specified period of time. No public or
|
||||||
|
private interaction with the people involved, including unsolicited interaction
|
||||||
|
with those enforcing the Code of Conduct, is allowed during this period.
|
||||||
|
Violating these terms may lead to a permanent ban.
|
||||||
|
|
||||||
|
### 4. Permanent Ban
|
||||||
|
|
||||||
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
|
**Consequence**: A permanent ban from any sort of public interaction within
|
||||||
|
the community.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
|
version 2.0, available at
|
||||||
|
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||||
|
|
||||||
|
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||||
|
enforcement ladder](https://github.com/mozilla/diversity).
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see the FAQ at
|
||||||
|
https://www.contributor-covenant.org/faq. Translations are available at
|
||||||
|
https://www.contributor-covenant.org/translations.
|
24
CONTRIBUTING.md
Normal file
24
CONTRIBUTING.md
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# Contribution guide
|
||||||
|
:v: Thanks for your contributions :v:
|
||||||
|
|
||||||
|
## Issues
|
||||||
|
Before creating an issue, please check the following:
|
||||||
|
- To avoid duplication, please search for similar issues before creating a new issue.
|
||||||
|
- Do not use Issues as a question.
|
||||||
|
- Issues should only be used to feature requests, suggestions, and report problems.
|
||||||
|
- Please ask questions in the [Discord](https://discord.gg/y9KtEx7KFx).
|
||||||
|
|
||||||
|
## Creating a PR
|
||||||
|
Thank you for your PR! Before creating a PR, please check the following:
|
||||||
|
- If possible, prefix the title with a keyword that identifies the type of this PR, as shown below.
|
||||||
|
- fix / refactor / feat / enhance / perf / chore etc.
|
||||||
|
- Also, make sure that the granularity of this PR is appropriate. Please do not include more than one type of change or interest in a single PR.
|
||||||
|
- If there is an Issue which will be resolved by this PR, please include a reference to the Issue in the text.
|
||||||
|
- Please add the summary of the changes to [`CHANGELOG.md`](/CHANGELOG.md). However, this is not necessary for changes that do not affect the users, such as refactoring.
|
||||||
|
- Check if there are any documents that need to be created or updated due to this change.
|
||||||
|
- If you have added a feature or fixed a bug, please add a test case if possible.
|
||||||
|
- Please make sure that tests and Lint are passed in advance.
|
||||||
|
- You can run it with `npm run test` and `npm run lint`.
|
||||||
|
- Run `npm run api` to update the API report and commit it if there are any diffs.
|
||||||
|
|
||||||
|
Thanks for your cooperation 🤗
|
65
README.md
65
README.md
|
@ -1,2 +1,65 @@
|
||||||
# mfm.js
|
# vfm.js
|
||||||
|
An VFM parser implementation with TypeScript.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
```
|
||||||
|
npm i vfm-js
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
Please see [docs](./docs/index.md) for the detail.
|
||||||
|
|
||||||
|
TypeScript:
|
||||||
|
```ts
|
||||||
|
import * as vfm from 'vfm-js';
|
||||||
|
|
||||||
|
const inputText =
|
||||||
|
`<center>
|
||||||
|
Hello $[tada everynyan! 🎉]
|
||||||
|
|
||||||
|
I'm @bygul, A bot of valkyriecoms!
|
||||||
|
|
||||||
|
https://toastielab.dev/toastie_t0ast/bygul
|
||||||
|
</center>`;
|
||||||
|
|
||||||
|
// Generate a VFM tree from the full VFM text.
|
||||||
|
const vfmTree = vfm.parse(inputText);
|
||||||
|
|
||||||
|
// Generate a VFM tree from the simple VFM text.
|
||||||
|
const simpleVfmTree = vfm.parseSimple('I like the hot soup :soup:');
|
||||||
|
|
||||||
|
// Reverse to a VFM text from the VFM tree.
|
||||||
|
const text = vfm.toString(vfmTree);
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
## Develop
|
||||||
|
### 1. Clone
|
||||||
|
```
|
||||||
|
git clone https://toastielab.dev/Valkyriecoms/vfm.js.git
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Install packages
|
||||||
|
```
|
||||||
|
cd vfm.js
|
||||||
|
npm i
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Build
|
||||||
|
```
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use the interactive CLI parser
|
||||||
|
full parser:
|
||||||
|
```
|
||||||
|
npm run parse
|
||||||
|
```
|
||||||
|
|
||||||
|
simple parser:
|
||||||
|
```
|
||||||
|
npm run parse-simple
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
This software is released under the [MIT License](LICENSE).
|
||||||
|
|
364
api-extractor.json
Normal file
364
api-extractor.json
Normal file
|
@ -0,0 +1,364 @@
|
||||||
|
/**
|
||||||
|
* Config file for API Extractor. For more info, please visit: https://api-extractor.com
|
||||||
|
*/
|
||||||
|
{
|
||||||
|
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optionally specifies another JSON config file that this file extends from. This provides a way for
|
||||||
|
* standard settings to be shared across multiple projects.
|
||||||
|
*
|
||||||
|
* If the path starts with "./" or "../", the path is resolved relative to the folder of the file that contains
|
||||||
|
* the "extends" field. Otherwise, the first path segment is interpreted as an NPM package name, and will be
|
||||||
|
* resolved using NodeJS require().
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: none
|
||||||
|
* DEFAULT VALUE: ""
|
||||||
|
*/
|
||||||
|
// "extends": "./shared/api-extractor-base.json"
|
||||||
|
// "extends": "my-package/include/api-extractor-base.json"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determines the "<projectFolder>" token that can be used with other config file settings. The project folder
|
||||||
|
* typically contains the tsconfig.json and package.json config files, but the path is user-defined.
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting.
|
||||||
|
*
|
||||||
|
* The default value for "projectFolder" is the token "<lookup>", which means the folder is determined by traversing
|
||||||
|
* parent folders, starting from the folder containing api-extractor.json, and stopping at the first folder
|
||||||
|
* that contains a tsconfig.json file. If a tsconfig.json file cannot be found in this way, then an error
|
||||||
|
* will be reported.
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <lookup>
|
||||||
|
* DEFAULT VALUE: "<lookup>"
|
||||||
|
*/
|
||||||
|
// "projectFolder": "..",
|
||||||
|
|
||||||
|
/**
|
||||||
|
* (REQUIRED) Specifies the .d.ts file to be used as the starting point for analysis. API Extractor
|
||||||
|
* analyzes the symbols exported by this module.
|
||||||
|
*
|
||||||
|
* The file extension must be ".d.ts" and not ".ts".
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
|
||||||
|
* prepend a folder token such as "<projectFolder>".
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
|
||||||
|
*/
|
||||||
|
"mainEntryPointFilePath": "<projectFolder>/built/index.d.ts",
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A list of NPM package names whose exports should be treated as part of this package.
|
||||||
|
*
|
||||||
|
* For example, suppose that Webpack is used to generate a distributed bundle for the project "library1",
|
||||||
|
* and another NPM package "library2" is embedded in this bundle. Some types from library2 may become part
|
||||||
|
* of the exported API for library1, but by default API Extractor would generate a .d.ts rollup that explicitly
|
||||||
|
* imports library2. To avoid this, we can specify:
|
||||||
|
*
|
||||||
|
* "bundledPackages": [ "library2" ],
|
||||||
|
*
|
||||||
|
* This would direct API Extractor to embed those types directly in the .d.ts rollup, as if they had been
|
||||||
|
* local files for library1.
|
||||||
|
*/
|
||||||
|
"bundledPackages": [],
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determines how the TypeScript compiler engine will be invoked by API Extractor.
|
||||||
|
*/
|
||||||
|
"compiler": {
|
||||||
|
/**
|
||||||
|
* Specifies the path to the tsconfig.json file to be used by API Extractor when analyzing the project.
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
|
||||||
|
* prepend a folder token such as "<projectFolder>".
|
||||||
|
*
|
||||||
|
* Note: This setting will be ignored if "overrideTsconfig" is used.
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
|
||||||
|
* DEFAULT VALUE: "<projectFolder>/tsconfig.json"
|
||||||
|
*/
|
||||||
|
// "tsconfigFilePath": "<projectFolder>/tsconfig.json",
|
||||||
|
/**
|
||||||
|
* Provides a compiler configuration that will be used instead of reading the tsconfig.json file from disk.
|
||||||
|
* The object must conform to the TypeScript tsconfig schema:
|
||||||
|
*
|
||||||
|
* http://json.schemastore.org/tsconfig
|
||||||
|
*
|
||||||
|
* If omitted, then the tsconfig.json file will be read from the "projectFolder".
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: no overrideTsconfig section
|
||||||
|
*/
|
||||||
|
// "overrideTsconfig": {
|
||||||
|
// . . .
|
||||||
|
// }
|
||||||
|
/**
|
||||||
|
* This option causes the compiler to be invoked with the --skipLibCheck option. This option is not recommended
|
||||||
|
* and may cause API Extractor to produce incomplete or incorrect declarations, but it may be required when
|
||||||
|
* dependencies contain declarations that are incompatible with the TypeScript engine that API Extractor uses
|
||||||
|
* for its analysis. Where possible, the underlying issue should be fixed rather than relying on skipLibCheck.
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: false
|
||||||
|
*/
|
||||||
|
// "skipLibCheck": true,
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures how the API report file (*.api.md) will be generated.
|
||||||
|
*/
|
||||||
|
"apiReport": {
|
||||||
|
/**
|
||||||
|
* (REQUIRED) Whether to generate an API report.
|
||||||
|
*/
|
||||||
|
"enabled": true
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The filename for the API report files. It will be combined with "reportFolder" or "reportTempFolder" to produce
|
||||||
|
* a full file path.
|
||||||
|
*
|
||||||
|
* The file extension should be ".api.md", and the string should not contain a path separator such as "\" or "/".
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <packageName>, <unscopedPackageName>
|
||||||
|
* DEFAULT VALUE: "<unscopedPackageName>.api.md"
|
||||||
|
*/
|
||||||
|
// "reportFileName": "<unscopedPackageName>.api.md",
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the folder where the API report file is written. The file name portion is determined by
|
||||||
|
* the "reportFileName" setting.
|
||||||
|
*
|
||||||
|
* The API report file is normally tracked by Git. Changes to it can be used to trigger a branch policy,
|
||||||
|
* e.g. for an API review.
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
|
||||||
|
* prepend a folder token such as "<projectFolder>".
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
|
||||||
|
* DEFAULT VALUE: "<projectFolder>/etc/"
|
||||||
|
*/
|
||||||
|
// "reportFolder": "<projectFolder>/etc/",
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the folder where the temporary report file is written. The file name portion is determined by
|
||||||
|
* the "reportFileName" setting.
|
||||||
|
*
|
||||||
|
* After the temporary file is written to disk, it is compared with the file in the "reportFolder".
|
||||||
|
* If they are different, a production build will fail.
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
|
||||||
|
* prepend a folder token such as "<projectFolder>".
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
|
||||||
|
* DEFAULT VALUE: "<projectFolder>/temp/"
|
||||||
|
*/
|
||||||
|
// "reportTempFolder": "<projectFolder>/temp/"
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures how the doc model file (*.api.json) will be generated.
|
||||||
|
*/
|
||||||
|
"docModel": {
|
||||||
|
/**
|
||||||
|
* (REQUIRED) Whether to generate a doc model file.
|
||||||
|
*/
|
||||||
|
"enabled": true
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The output path for the doc model file. The file extension should be ".api.json".
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
|
||||||
|
* prepend a folder token such as "<projectFolder>".
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
|
||||||
|
* DEFAULT VALUE: "<projectFolder>/temp/<unscopedPackageName>.api.json"
|
||||||
|
*/
|
||||||
|
// "apiJsonFilePath": "<projectFolder>/temp/<unscopedPackageName>.api.json"
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures how the .d.ts rollup file will be generated.
|
||||||
|
*/
|
||||||
|
"dtsRollup": {
|
||||||
|
/**
|
||||||
|
* (REQUIRED) Whether to generate the .d.ts rollup file.
|
||||||
|
*/
|
||||||
|
"enabled": false
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the output path for a .d.ts rollup file to be generated without any trimming.
|
||||||
|
* This file will include all declarations that are exported by the main entry point.
|
||||||
|
*
|
||||||
|
* If the path is an empty string, then this file will not be written.
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
|
||||||
|
* prepend a folder token such as "<projectFolder>".
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
|
||||||
|
* DEFAULT VALUE: "<projectFolder>/dist/<unscopedPackageName>.d.ts"
|
||||||
|
*/
|
||||||
|
// "untrimmedFilePath": "<projectFolder>/dist/<unscopedPackageName>.d.ts",
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the output path for a .d.ts rollup file to be generated with trimming for a "beta" release.
|
||||||
|
* This file will include only declarations that are marked as "@public" or "@beta".
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
|
||||||
|
* prepend a folder token such as "<projectFolder>".
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
|
||||||
|
* DEFAULT VALUE: ""
|
||||||
|
*/
|
||||||
|
// "betaTrimmedFilePath": "<projectFolder>/dist/<unscopedPackageName>-beta.d.ts",
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the output path for a .d.ts rollup file to be generated with trimming for a "public" release.
|
||||||
|
* This file will include only declarations that are marked as "@public".
|
||||||
|
*
|
||||||
|
* If the path is an empty string, then this file will not be written.
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
|
||||||
|
* prepend a folder token such as "<projectFolder>".
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
|
||||||
|
* DEFAULT VALUE: ""
|
||||||
|
*/
|
||||||
|
// "publicTrimmedFilePath": "<projectFolder>/dist/<unscopedPackageName>-public.d.ts",
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When a declaration is trimmed, by default it will be replaced by a code comment such as
|
||||||
|
* "Excluded from this release type: exampleMember". Set "omitTrimmingComments" to true to remove the
|
||||||
|
* declaration completely.
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: false
|
||||||
|
*/
|
||||||
|
// "omitTrimmingComments": true
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures how the tsdoc-metadata.json file will be generated.
|
||||||
|
*/
|
||||||
|
"tsdocMetadata": {
|
||||||
|
/**
|
||||||
|
* Whether to generate the tsdoc-metadata.json file.
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: true
|
||||||
|
*/
|
||||||
|
// "enabled": true,
|
||||||
|
/**
|
||||||
|
* Specifies where the TSDoc metadata file should be written.
|
||||||
|
*
|
||||||
|
* The path is resolved relative to the folder of the config file that contains the setting; to change this,
|
||||||
|
* prepend a folder token such as "<projectFolder>".
|
||||||
|
*
|
||||||
|
* The default value is "<lookup>", which causes the path to be automatically inferred from the "tsdocMetadata",
|
||||||
|
* "typings" or "main" fields of the project's package.json. If none of these fields are set, the lookup
|
||||||
|
* falls back to "tsdoc-metadata.json" in the package folder.
|
||||||
|
*
|
||||||
|
* SUPPORTED TOKENS: <projectFolder>, <packageName>, <unscopedPackageName>
|
||||||
|
* DEFAULT VALUE: "<lookup>"
|
||||||
|
*/
|
||||||
|
// "tsdocMetadataFilePath": "<projectFolder>/dist/tsdoc-metadata.json"
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies what type of newlines API Extractor should use when writing output files. By default, the output files
|
||||||
|
* will be written with Windows-style newlines. To use POSIX-style newlines, specify "lf" instead.
|
||||||
|
* To use the OS's default newline kind, specify "os".
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: "crlf"
|
||||||
|
*/
|
||||||
|
// "newlineKind": "crlf",
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures how API Extractor reports error and warning messages produced during analysis.
|
||||||
|
*
|
||||||
|
* There are three sources of messages: compiler messages, API Extractor messages, and TSDoc messages.
|
||||||
|
*/
|
||||||
|
"messages": {
|
||||||
|
/**
|
||||||
|
* Configures handling of diagnostic messages reported by the TypeScript compiler engine while analyzing
|
||||||
|
* the input .d.ts files.
|
||||||
|
*
|
||||||
|
* TypeScript message identifiers start with "TS" followed by an integer. For example: "TS2551"
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: A single "default" entry with logLevel=warning.
|
||||||
|
*/
|
||||||
|
"compilerMessageReporting": {
|
||||||
|
/**
|
||||||
|
* Configures the default routing for messages that don't match an explicit rule in this table.
|
||||||
|
*/
|
||||||
|
"default": {
|
||||||
|
/**
|
||||||
|
* Specifies whether the message should be written to the the tool's output log. Note that
|
||||||
|
* the "addToApiReportFile" property may supersede this option.
|
||||||
|
*
|
||||||
|
* Possible values: "error", "warning", "none"
|
||||||
|
*
|
||||||
|
* Errors cause the build to fail and return a nonzero exit code. Warnings cause a production build fail
|
||||||
|
* and return a nonzero exit code. For a non-production build (e.g. when "api-extractor run" includes
|
||||||
|
* the "--local" option), the warning is displayed but the build will not fail.
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: "warning"
|
||||||
|
*/
|
||||||
|
"logLevel": "warning"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When addToApiReportFile is true: If API Extractor is configured to write an API report file (.api.md),
|
||||||
|
* then the message will be written inside that file; otherwise, the message is instead logged according to
|
||||||
|
* the "logLevel" option.
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: false
|
||||||
|
*/
|
||||||
|
// "addToApiReportFile": false
|
||||||
|
}
|
||||||
|
|
||||||
|
// "TS2551": {
|
||||||
|
// "logLevel": "warning",
|
||||||
|
// "addToApiReportFile": true
|
||||||
|
// },
|
||||||
|
//
|
||||||
|
// . . .
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures handling of messages reported by API Extractor during its analysis.
|
||||||
|
*
|
||||||
|
* API Extractor message identifiers start with "ae-". For example: "ae-extra-release-tag"
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: See api-extractor-defaults.json for the complete table of extractorMessageReporting mappings
|
||||||
|
*/
|
||||||
|
"extractorMessageReporting": {
|
||||||
|
"default": {
|
||||||
|
"logLevel": "none"
|
||||||
|
// "addToApiReportFile": false
|
||||||
|
}
|
||||||
|
|
||||||
|
// "ae-extra-release-tag": {
|
||||||
|
// "logLevel": "warning",
|
||||||
|
// "addToApiReportFile": true
|
||||||
|
// },
|
||||||
|
//
|
||||||
|
// . . .
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures handling of messages reported by the TSDoc parser when analyzing code comments.
|
||||||
|
*
|
||||||
|
* TSDoc message identifiers start with "tsdoc-". For example: "tsdoc-link-tag-unescaped-text"
|
||||||
|
*
|
||||||
|
* DEFAULT VALUE: A single "default" entry with logLevel=warning.
|
||||||
|
*/
|
||||||
|
"tsdocMessageReporting": {
|
||||||
|
"default": {
|
||||||
|
"logLevel": "warning"
|
||||||
|
// "addToApiReportFile": false
|
||||||
|
}
|
||||||
|
|
||||||
|
// "tsdoc-link-tag-unescaped-text": {
|
||||||
|
// "logLevel": "warning",
|
||||||
|
// "addToApiReportFile": true
|
||||||
|
// },
|
||||||
|
//
|
||||||
|
// . . .
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
281
etc/vfm-js.api.md
Normal file
281
etc/vfm-js.api.md
Normal file
|
@ -0,0 +1,281 @@
|
||||||
|
## API Report File for "vfm-js"
|
||||||
|
|
||||||
|
> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/).
|
||||||
|
|
||||||
|
```ts
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const BOLD: (children: VfmInline[]) => NodeType<'bold'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const CENTER: (children: VfmInline[]) => NodeType<'center'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const CODE_BLOCK: (code: string, lang: string | null) => NodeType<'blockCode'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const EMOJI_CODE: (name: string) => NodeType<'emojiCode'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export function extract(nodes: VfmNode[], predicate: (node: VfmNode) => boolean): VfmNode[];
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const FN: (name: string, args: VfmFn['props']['args'], children: VfmFn['children']) => NodeType<'fn'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const HASHTAG: (value: string) => NodeType<'hashtag'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const INLINE_CODE: (code: string) => NodeType<'inlineCode'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export function inspect(node: VfmNode, action: (node: VfmNode) => void): void;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export function inspect(nodes: VfmNode[], action: (node: VfmNode) => void): void;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const ITALIC: (children: VfmInline[]) => NodeType<'italic'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const LINK: (silent: boolean, url: string, children: VfmInline[]) => NodeType<'link'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const MATH_BLOCK: (formula: string) => NodeType<'mathBlock'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const MATH_INLINE: (formula: string) => NodeType<'mathInline'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const MENTION: (username: string, host: string | null, acct: string) => NodeType<'mention'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const N_URL: (value: string, brackets?: boolean) => NodeType<'url'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type NodeType<T extends VfmNode['type']> = T extends 'quote' ? VfmQuote : T extends 'search' ? VfmSearch : T extends 'blockCode' ? VfmCodeBlock : T extends 'mathBlock' ? VfmMathBlock : T extends 'center' ? VfmCenter : T extends 'unicodeEmoji' ? VfmUnicodeEmoji : T extends 'emojiCode' ? VfmEmojiCode : T extends 'bold' ? VfmBold : T extends 'small' ? VfmSmall : T extends 'italic' ? VfmItalic : T extends 'strike' ? VfmStrike : T extends 'inlineCode' ? VfmInlineCode : T extends 'mathInline' ? VfmMathInline : T extends 'mention' ? VfmMention : T extends 'hashtag' ? VfmHashtag : T extends 'url' ? VfmUrl : T extends 'link' ? VfmLink : T extends 'fn' ? VfmFn : T extends 'plain' ? VfmPlain : T extends 'text' ? VfmText : never;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export function parse(input: string, opts?: Partial<{
|
||||||
|
nestLimit: number;
|
||||||
|
}>): VfmNode[];
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export function parseSimple(input: string): VfmSimpleNode[];
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const PLAIN: (text: string) => NodeType<'plain'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const QUOTE: (children: VfmNode[]) => NodeType<'quote'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const SEARCH: (query: string, content: string) => NodeType<'search'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const SMALL: (children: VfmInline[]) => NodeType<'small'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const STRIKE: (children: VfmInline[]) => NodeType<'strike'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const TEXT: (value: string) => NodeType<'text'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
function toString_2(tree: VfmNode[]): string;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
function toString_2(node: VfmNode): string;
|
||||||
|
export { toString_2 as toString }
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export const UNI_EMOJI: (value: string) => NodeType<'unicodeEmoji'>;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmBlock = VfmQuote | VfmSearch | VfmCodeBlock | VfmMathBlock | VfmCenter;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmBold = {
|
||||||
|
type: 'bold';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmCenter = {
|
||||||
|
type: 'center';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmCodeBlock = {
|
||||||
|
type: 'blockCode';
|
||||||
|
props: {
|
||||||
|
code: string;
|
||||||
|
lang: string | null;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmEmojiCode = {
|
||||||
|
type: 'emojiCode';
|
||||||
|
props: {
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmFn = {
|
||||||
|
type: 'fn';
|
||||||
|
props: {
|
||||||
|
name: string;
|
||||||
|
args: Record<string, string | true>;
|
||||||
|
};
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmHashtag = {
|
||||||
|
type: 'hashtag';
|
||||||
|
props: {
|
||||||
|
hashtag: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmInline = VfmUnicodeEmoji | VfmEmojiCode | VfmBold | VfmSmall | VfmItalic | VfmStrike | VfmInlineCode | VfmMathInline | VfmMention | VfmHashtag | VfmUrl | VfmLink | VfmFn | VfmPlain | VfmText;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmInlineCode = {
|
||||||
|
type: 'inlineCode';
|
||||||
|
props: {
|
||||||
|
code: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmItalic = {
|
||||||
|
type: 'italic';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmLink = {
|
||||||
|
type: 'link';
|
||||||
|
props: {
|
||||||
|
silent: boolean;
|
||||||
|
url: string;
|
||||||
|
};
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmMathBlock = {
|
||||||
|
type: 'mathBlock';
|
||||||
|
props: {
|
||||||
|
formula: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmMathInline = {
|
||||||
|
type: 'mathInline';
|
||||||
|
props: {
|
||||||
|
formula: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmMention = {
|
||||||
|
type: 'mention';
|
||||||
|
props: {
|
||||||
|
username: string;
|
||||||
|
host: string | null;
|
||||||
|
acct: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmNode = VfmBlock | VfmInline;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmPlain = {
|
||||||
|
type: 'plain';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmText[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmQuote = {
|
||||||
|
type: 'quote';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmNode[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmSearch = {
|
||||||
|
type: 'search';
|
||||||
|
props: {
|
||||||
|
query: string;
|
||||||
|
content: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmSimpleNode = VfmUnicodeEmoji | VfmEmojiCode | VfmText | VfmPlain;
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmSmall = {
|
||||||
|
type: 'small';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmStrike = {
|
||||||
|
type: 'strike';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmText = {
|
||||||
|
type: 'text';
|
||||||
|
props: {
|
||||||
|
text: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmUnicodeEmoji = {
|
||||||
|
type: 'unicodeEmoji';
|
||||||
|
props: {
|
||||||
|
emoji: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// @public (undocumented)
|
||||||
|
export type VfmUrl = {
|
||||||
|
type: 'url';
|
||||||
|
props: {
|
||||||
|
url: string;
|
||||||
|
brackets?: boolean;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
|
||||||
|
// (No @packageDocumentation comment for this package)
|
||||||
|
|
||||||
|
```
|
197
jest.config.ts
Normal file
197
jest.config.ts
Normal file
|
@ -0,0 +1,197 @@
|
||||||
|
/*
|
||||||
|
* For a detailed explanation regarding each configuration property and type check, visit:
|
||||||
|
* https://jestjs.io/docs/en/configuration.html
|
||||||
|
*/
|
||||||
|
|
||||||
|
export default {
|
||||||
|
// All imported modules in your tests should be mocked automatically
|
||||||
|
// automock: false,
|
||||||
|
|
||||||
|
// Stop running tests after `n` failures
|
||||||
|
// bail: 0,
|
||||||
|
|
||||||
|
// The directory where Jest should store its cached dependency information
|
||||||
|
// cacheDirectory: "C:\\Users\\ai\\AppData\\Local\\Temp\\jest",
|
||||||
|
|
||||||
|
// Automatically clear mock calls and instances between every test
|
||||||
|
// clearMocks: false,
|
||||||
|
|
||||||
|
// Indicates whether the coverage information should be collected while executing the test
|
||||||
|
// collectCoverage: false,
|
||||||
|
|
||||||
|
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||||
|
collectCoverageFrom: ['src/**/*.ts', '!src/cli/**/*.ts'],
|
||||||
|
|
||||||
|
// The directory where Jest should output its coverage files
|
||||||
|
coverageDirectory: "coverage",
|
||||||
|
|
||||||
|
// An array of regexp pattern strings used to skip coverage collection
|
||||||
|
// coveragePathIgnorePatterns: [
|
||||||
|
// "\\\\node_modules\\\\"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// Indicates which provider should be used to instrument code for coverage
|
||||||
|
coverageProvider: "v8",
|
||||||
|
|
||||||
|
// A list of reporter names that Jest uses when writing coverage reports
|
||||||
|
// coverageReporters: [
|
||||||
|
// "json",
|
||||||
|
// "text",
|
||||||
|
// "lcov",
|
||||||
|
// "clover"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An object that configures minimum threshold enforcement for coverage results
|
||||||
|
// coverageThreshold: undefined,
|
||||||
|
|
||||||
|
// A path to a custom dependency extractor
|
||||||
|
// dependencyExtractor: undefined,
|
||||||
|
|
||||||
|
// Make calling deprecated APIs throw helpful error messages
|
||||||
|
// errorOnDeprecated: false,
|
||||||
|
|
||||||
|
// Force coverage collection from ignored files using an array of glob patterns
|
||||||
|
// forceCoverageMatch: [],
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once before all test suites
|
||||||
|
// globalSetup: undefined,
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once after all test suites
|
||||||
|
// globalTeardown: undefined,
|
||||||
|
|
||||||
|
// A set of global variables that need to be available in all test environments
|
||||||
|
// globals: {},
|
||||||
|
|
||||||
|
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||||
|
// maxWorkers: "50%",
|
||||||
|
|
||||||
|
// An array of directory names to be searched recursively up from the requiring module's location
|
||||||
|
// moduleDirectories: [
|
||||||
|
// "node_modules"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of file extensions your modules use
|
||||||
|
// moduleFileExtensions: [
|
||||||
|
// "js",
|
||||||
|
// "json",
|
||||||
|
// "jsx",
|
||||||
|
// "ts",
|
||||||
|
// "tsx",
|
||||||
|
// "node"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||||
|
// moduleNameMapper: {},
|
||||||
|
|
||||||
|
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||||
|
// modulePathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Activates notifications for test results
|
||||||
|
// notify: false,
|
||||||
|
|
||||||
|
// An enum that specifies notification mode. Requires { notify: true }
|
||||||
|
// notifyMode: "failure-change",
|
||||||
|
|
||||||
|
// A preset that is used as a base for Jest's configuration
|
||||||
|
// preset: undefined,
|
||||||
|
|
||||||
|
// Run tests from one or more projects
|
||||||
|
// projects: undefined,
|
||||||
|
|
||||||
|
// Use this configuration option to add custom reporters to Jest
|
||||||
|
// reporters: undefined,
|
||||||
|
|
||||||
|
// Automatically reset mock state between every test
|
||||||
|
// resetMocks: false,
|
||||||
|
|
||||||
|
// Reset the module registry before running each individual test
|
||||||
|
// resetModules: false,
|
||||||
|
|
||||||
|
// A path to a custom resolver
|
||||||
|
// resolver: undefined,
|
||||||
|
|
||||||
|
// Automatically restore mock state between every test
|
||||||
|
// restoreMocks: false,
|
||||||
|
|
||||||
|
// The root directory that Jest should scan for tests and modules within
|
||||||
|
// rootDir: undefined,
|
||||||
|
|
||||||
|
// A list of paths to directories that Jest should use to search for files in
|
||||||
|
roots: [
|
||||||
|
"<rootDir>"
|
||||||
|
],
|
||||||
|
|
||||||
|
// Allows you to use a custom runner instead of Jest's default test runner
|
||||||
|
// runner: "jest-runner",
|
||||||
|
|
||||||
|
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||||
|
// setupFiles: [],
|
||||||
|
|
||||||
|
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||||
|
// setupFilesAfterEnv: [],
|
||||||
|
|
||||||
|
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||||
|
// slowTestThreshold: 5,
|
||||||
|
|
||||||
|
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||||
|
// snapshotSerializers: [],
|
||||||
|
|
||||||
|
// The test environment that will be used for testing
|
||||||
|
testEnvironment: "node",
|
||||||
|
|
||||||
|
// Options that will be passed to the testEnvironment
|
||||||
|
// testEnvironmentOptions: {},
|
||||||
|
|
||||||
|
// Adds a location field to test results
|
||||||
|
// testLocationInResults: false,
|
||||||
|
|
||||||
|
// The glob patterns Jest uses to detect test files
|
||||||
|
testMatch: [
|
||||||
|
"**/__tests__/**/*.[jt]s?(x)",
|
||||||
|
"**/?(*.)+(spec|test).[tj]s?(x)",
|
||||||
|
"<rootDir>/test/**/*"
|
||||||
|
],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||||
|
// testPathIgnorePatterns: [
|
||||||
|
// "\\\\node_modules\\\\"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||||
|
// testRegex: [],
|
||||||
|
|
||||||
|
// This option allows the use of a custom results processor
|
||||||
|
// testResultsProcessor: undefined,
|
||||||
|
|
||||||
|
// This option allows use of a custom test runner
|
||||||
|
// testRunner: "jasmine2",
|
||||||
|
|
||||||
|
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
||||||
|
// testURL: "http://localhost",
|
||||||
|
|
||||||
|
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
||||||
|
// timers: "real",
|
||||||
|
|
||||||
|
// A map from regular expressions to paths to transformers
|
||||||
|
transform: {
|
||||||
|
"^.+\\.(ts|tsx)$": "ts-jest"
|
||||||
|
},
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
|
// transformIgnorePatterns: [
|
||||||
|
// "\\\\node_modules\\\\",
|
||||||
|
// "\\.pnp\\.[^\\\\]+$"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||||
|
// unmockedModulePathPatterns: undefined,
|
||||||
|
|
||||||
|
// Indicates whether each individual test should be reported during the run
|
||||||
|
// verbose: undefined,
|
||||||
|
|
||||||
|
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||||
|
// watchPathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Whether to use watchman for file crawling
|
||||||
|
// watchman: true,
|
||||||
|
};
|
6179
package-lock.json
generated
Normal file
6179
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
45
package.json
Normal file
45
package.json
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
{
|
||||||
|
"name": "vfm-js",
|
||||||
|
"version": "0.24.0",
|
||||||
|
"description": "An VFM parser implementation with TypeScript",
|
||||||
|
"main": "./built/index.js",
|
||||||
|
"types": "./built/index.d.ts",
|
||||||
|
"scripts": {
|
||||||
|
"build": "npm run tsc",
|
||||||
|
"tsc": "tsc",
|
||||||
|
"tsd": "tsd",
|
||||||
|
"parse": "node ./built/cli/parse",
|
||||||
|
"parse-simple": "node ./built/cli/parseSimple",
|
||||||
|
"api": "npx api-extractor run --local --verbose",
|
||||||
|
"api-prod": "npx api-extractor run --verbose",
|
||||||
|
"lint": "eslint . --ext .js,.jsx,.ts,.tsx",
|
||||||
|
"jest": "jest --coverage",
|
||||||
|
"test": "npm run jest && npm run tsd"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://toastielab.dev/Valkyriecoms/vfm.js.git"
|
||||||
|
},
|
||||||
|
"author": "Toastie <toastie@toastiet0ast.com>",
|
||||||
|
"license": "MIT",
|
||||||
|
"devDependencies": {
|
||||||
|
"@microsoft/api-extractor": "7.38.5",
|
||||||
|
"@types/jest": "29.5.11",
|
||||||
|
"@types/node": "20.10.5",
|
||||||
|
"@typescript-eslint/eslint-plugin": "6.14.0",
|
||||||
|
"@typescript-eslint/parser": "6.14.0",
|
||||||
|
"eslint": "8.56.0",
|
||||||
|
"jest": "29.7.0",
|
||||||
|
"ts-jest": "29.1.1",
|
||||||
|
"ts-node": "10.9.2",
|
||||||
|
"tsd": "0.30.0",
|
||||||
|
"typescript": "5.3.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@twemoji/parser": "15.0.0"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"built",
|
||||||
|
"CHANGELOG.md"
|
||||||
|
]
|
||||||
|
}
|
4
src/@types/twemoji.d.ts
vendored
Normal file
4
src/@types/twemoji.d.ts
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
declare module '@twemoji/parser/dist/regex' {
|
||||||
|
const regex: RegExp;
|
||||||
|
export default regex;
|
||||||
|
}
|
67
src/api.ts
Normal file
67
src/api.ts
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
import { fullParser, simpleParser } from './internal';
|
||||||
|
import { inspectOne, stringifyNode, stringifyTree } from './internal/util';
|
||||||
|
import { VfmNode, VfmSimpleNode } from './node';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generated a VfmNode tree from the VFM string.
|
||||||
|
*/
|
||||||
|
export function parse(input: string, opts: Partial<{ nestLimit: number; }> = {}): VfmNode[] {
|
||||||
|
const nodes = fullParser(input, {
|
||||||
|
nestLimit: opts.nestLimit,
|
||||||
|
});
|
||||||
|
return nodes;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a VfmSimpleNode tree from the VFM string.
|
||||||
|
*/
|
||||||
|
export function parseSimple(input: string): VfmSimpleNode[] {
|
||||||
|
const nodes = simpleParser(input);
|
||||||
|
return nodes;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a VFM string from the VfmNode tree.
|
||||||
|
*/
|
||||||
|
export function toString(tree: VfmNode[]): string
|
||||||
|
export function toString(node: VfmNode): string
|
||||||
|
export function toString(node: VfmNode | VfmNode[]): string {
|
||||||
|
if (Array.isArray(node)) {
|
||||||
|
return stringifyTree(node);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return stringifyNode(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inspects the VfmNode tree.
|
||||||
|
*/
|
||||||
|
export function inspect(node: VfmNode, action: (node: VfmNode) => void): void
|
||||||
|
export function inspect(nodes: VfmNode[], action: (node: VfmNode) => void): void
|
||||||
|
export function inspect(node: (VfmNode | VfmNode[]), action: (node: VfmNode) => void): void {
|
||||||
|
if (Array.isArray(node)) {
|
||||||
|
for (const n of node) {
|
||||||
|
inspectOne(n, action);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
inspectOne(node, action);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inspects the VfmNode tree and returns as an array the nodes that match the conditions
|
||||||
|
* of the predicate function.
|
||||||
|
*/
|
||||||
|
export function extract(nodes: VfmNode[], predicate: (node: VfmNode) => boolean): VfmNode[] {
|
||||||
|
const dest = [] as VfmNode[];
|
||||||
|
|
||||||
|
inspect(nodes, (node) => {
|
||||||
|
if (predicate(node)) {
|
||||||
|
dest.push(node);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return dest;
|
||||||
|
}
|
22
src/cli/misc/inputLine.ts
Normal file
22
src/cli/misc/inputLine.ts
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
import readLine from 'readline';
|
||||||
|
|
||||||
|
export class InputCanceledError extends Error {
|
||||||
|
constructor(message?: string) {
|
||||||
|
super(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function(message: string): Promise<string> {
|
||||||
|
return new Promise<string>((resolve, reject) => {
|
||||||
|
const rl = readLine.createInterface(process.stdin, process.stdout);
|
||||||
|
rl.question(message, (ans) => {
|
||||||
|
rl.close();
|
||||||
|
resolve(ans);
|
||||||
|
});
|
||||||
|
rl.on('SIGINT', () => {
|
||||||
|
console.log('');
|
||||||
|
rl.close();
|
||||||
|
reject(new InputCanceledError('SIGINT interrupted'));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
47
src/cli/parse.ts
Normal file
47
src/cli/parse.ts
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
import { performance } from 'perf_hooks';
|
||||||
|
import inputLine, { InputCanceledError } from './misc/inputLine';
|
||||||
|
import { parse } from '..';
|
||||||
|
|
||||||
|
async function entryPoint(): Promise<void> {
|
||||||
|
console.log('intaractive parser');
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
let input: string;
|
||||||
|
try {
|
||||||
|
input = await inputLine('> ');
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
if (err instanceof InputCanceledError) {
|
||||||
|
console.log('bye.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
// replace special chars
|
||||||
|
input = input
|
||||||
|
.replace(/\\n/g, '\n')
|
||||||
|
.replace(/\\t/g, '\t')
|
||||||
|
.replace(/\\u00a0/g, '\u00a0');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const parseTimeStart = performance.now();
|
||||||
|
const result = parse(input);
|
||||||
|
const parseTimeEnd = performance.now();
|
||||||
|
console.log(JSON.stringify(result));
|
||||||
|
const parseTime = (parseTimeEnd - parseTimeStart).toFixed(3);
|
||||||
|
console.log(`parsing time: ${parseTime}ms`);
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
console.log('parsing error:');
|
||||||
|
console.log(err);
|
||||||
|
}
|
||||||
|
console.log();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
entryPoint()
|
||||||
|
.catch(err => {
|
||||||
|
console.log(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
47
src/cli/parseSimple.ts
Normal file
47
src/cli/parseSimple.ts
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
import { performance } from 'perf_hooks';
|
||||||
|
import inputLine, { InputCanceledError } from './misc/inputLine';
|
||||||
|
import { parseSimple } from '..';
|
||||||
|
|
||||||
|
async function entryPoint(): Promise<void> {
|
||||||
|
console.log('intaractive simple parser');
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
let input: string;
|
||||||
|
try {
|
||||||
|
input = await inputLine('> ');
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
if (err instanceof InputCanceledError) {
|
||||||
|
console.log('bye.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
// replace special chars
|
||||||
|
input = input
|
||||||
|
.replace(/\\n/g, '\n')
|
||||||
|
.replace(/\\t/g, '\t')
|
||||||
|
.replace(/\\u00a0/g, '\u00a0');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const parseTimeStart = performance.now();
|
||||||
|
const result = parseSimple(input);
|
||||||
|
const parseTimeEnd = performance.now();
|
||||||
|
console.log(JSON.stringify(result));
|
||||||
|
const parseTime = (parseTimeEnd - parseTimeStart).toFixed(3);
|
||||||
|
console.log(`parsing time: ${parseTime}ms`);
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
console.log('parsing error:');
|
||||||
|
console.log(err);
|
||||||
|
}
|
||||||
|
console.log();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
entryPoint()
|
||||||
|
.catch(err => {
|
||||||
|
console.log(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
67
src/index.ts
Normal file
67
src/index.ts
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
export {
|
||||||
|
parse,
|
||||||
|
parseSimple,
|
||||||
|
toString,
|
||||||
|
inspect,
|
||||||
|
extract,
|
||||||
|
} from './api';
|
||||||
|
|
||||||
|
export {
|
||||||
|
NodeType,
|
||||||
|
VfmNode,
|
||||||
|
VfmSimpleNode,
|
||||||
|
VfmBlock,
|
||||||
|
VfmInline,
|
||||||
|
} from './node';
|
||||||
|
|
||||||
|
export {
|
||||||
|
// block
|
||||||
|
VfmQuote,
|
||||||
|
VfmSearch,
|
||||||
|
VfmCodeBlock,
|
||||||
|
VfmMathBlock,
|
||||||
|
VfmCenter,
|
||||||
|
|
||||||
|
// inline
|
||||||
|
VfmUnicodeEmoji,
|
||||||
|
VfmEmojiCode,
|
||||||
|
VfmBold,
|
||||||
|
VfmSmall,
|
||||||
|
VfmItalic,
|
||||||
|
VfmStrike,
|
||||||
|
VfmInlineCode,
|
||||||
|
VfmMathInline,
|
||||||
|
VfmMention,
|
||||||
|
VfmHashtag,
|
||||||
|
VfmUrl,
|
||||||
|
VfmLink,
|
||||||
|
VfmFn,
|
||||||
|
VfmPlain,
|
||||||
|
VfmText,
|
||||||
|
} from './node';
|
||||||
|
|
||||||
|
export {
|
||||||
|
// block
|
||||||
|
QUOTE,
|
||||||
|
SEARCH,
|
||||||
|
CODE_BLOCK,
|
||||||
|
MATH_BLOCK,
|
||||||
|
CENTER,
|
||||||
|
|
||||||
|
// inline
|
||||||
|
UNI_EMOJI,
|
||||||
|
EMOJI_CODE,
|
||||||
|
BOLD,
|
||||||
|
SMALL,
|
||||||
|
ITALIC,
|
||||||
|
STRIKE,
|
||||||
|
INLINE_CODE,
|
||||||
|
MATH_INLINE,
|
||||||
|
MENTION,
|
||||||
|
HASHTAG,
|
||||||
|
N_URL,
|
||||||
|
LINK,
|
||||||
|
FN,
|
||||||
|
PLAIN,
|
||||||
|
TEXT,
|
||||||
|
} from './node';
|
273
src/internal/core/index.ts
Normal file
273
src/internal/core/index.ts
Normal file
|
@ -0,0 +1,273 @@
|
||||||
|
//
|
||||||
|
// Parsimmon-like stateful parser combinators
|
||||||
|
//
|
||||||
|
|
||||||
|
export type Success<T> = {
|
||||||
|
success: true;
|
||||||
|
value: T;
|
||||||
|
index: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type Failure = { success: false };
|
||||||
|
|
||||||
|
export type Result<T> = Success<T> | Failure;
|
||||||
|
|
||||||
|
interface State {
|
||||||
|
trace?: boolean,
|
||||||
|
linkLabel?: boolean,
|
||||||
|
nestLimit: number,
|
||||||
|
depth: number,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ParserHandler<T> = (input: string, index: number, state: State) => Result<T>
|
||||||
|
|
||||||
|
export function success<T>(index: number, value: T): Success<T> {
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: value,
|
||||||
|
index: index,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function failure(): Failure {
|
||||||
|
return { success: false };
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Parser<T> {
|
||||||
|
public name?: string;
|
||||||
|
public handler: ParserHandler<T>;
|
||||||
|
|
||||||
|
constructor(handler: ParserHandler<T>, name?: string) {
|
||||||
|
this.handler = (input, index, state) : Failure | Success<T> => {
|
||||||
|
if (state.trace && this.name != null) {
|
||||||
|
const pos = `${index}`;
|
||||||
|
console.log(`${pos.padEnd(6, ' ')}enter ${this.name}`);
|
||||||
|
const result = handler(input, index, state);
|
||||||
|
if (result.success) {
|
||||||
|
const pos = `${index}:${result.index}`;
|
||||||
|
console.log(`${pos.padEnd(6, ' ')}match ${this.name}`);
|
||||||
|
} else {
|
||||||
|
const pos = `${index}`;
|
||||||
|
console.log(`${pos.padEnd(6, ' ')}fail ${this.name}`);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
return handler(input, index, state);
|
||||||
|
};
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
map<U>(fn: (value: T) => U): Parser<U> {
|
||||||
|
return new Parser((input, index, state) => {
|
||||||
|
const result = this.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
return success(result.index, fn(result.value));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
text(): Parser<string> {
|
||||||
|
return new Parser((input, index, state) => {
|
||||||
|
const result = this.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
const text = input.slice(index, result.index);
|
||||||
|
return success(result.index, text);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
many(min: number): Parser<T[]> {
|
||||||
|
return new Parser((input, index, state) => {
|
||||||
|
let result;
|
||||||
|
let latestIndex = index;
|
||||||
|
const accum: T[] = [];
|
||||||
|
while (latestIndex < input.length) {
|
||||||
|
result = this.handler(input, latestIndex, state);
|
||||||
|
if (!result.success) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
latestIndex = result.index;
|
||||||
|
accum.push(result.value);
|
||||||
|
}
|
||||||
|
if (accum.length < min) {
|
||||||
|
return failure();
|
||||||
|
}
|
||||||
|
return success(latestIndex, accum);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
sep(separator: Parser<unknown>, min: number): Parser<T[]> {
|
||||||
|
if (min < 1) {
|
||||||
|
throw new Error('"min" must be a value greater than or equal to 1.');
|
||||||
|
}
|
||||||
|
return seq(
|
||||||
|
this,
|
||||||
|
seq(
|
||||||
|
separator,
|
||||||
|
this,
|
||||||
|
).select(1).many(min - 1),
|
||||||
|
).map(result => [result[0], ...result[1]]);
|
||||||
|
}
|
||||||
|
|
||||||
|
select<K extends keyof T>(key: K): Parser<T[K]> {
|
||||||
|
return this.map(v => v[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
option(): Parser<T | null> {
|
||||||
|
return alt([
|
||||||
|
this,
|
||||||
|
succeeded(null),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function str<T extends string>(value: T): Parser<T> {
|
||||||
|
return new Parser((input, index, _state) => {
|
||||||
|
if ((input.length - index) < value.length) {
|
||||||
|
return failure();
|
||||||
|
}
|
||||||
|
if (input.substr(index, value.length) !== value) {
|
||||||
|
return failure();
|
||||||
|
}
|
||||||
|
return success(index + value.length, value);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function regexp<T extends RegExp>(pattern: T): Parser<string> {
|
||||||
|
const re = RegExp(`^(?:${pattern.source})`, pattern.flags);
|
||||||
|
return new Parser((input, index, _state) => {
|
||||||
|
const text = input.slice(index);
|
||||||
|
const result = re.exec(text);
|
||||||
|
if (result == null) {
|
||||||
|
return failure();
|
||||||
|
}
|
||||||
|
return success(index + result[0].length, result[0]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
type ParsedType<T extends Parser<unknown>> = T extends Parser<infer U> ? U : never;
|
||||||
|
|
||||||
|
export type SeqParseResult<T extends unknown[]> =
|
||||||
|
T extends [] ? []
|
||||||
|
: T extends [infer F, ...infer R]
|
||||||
|
? (
|
||||||
|
F extends Parser<unknown> ? [ParsedType<F>, ...SeqParseResult<R>] : [unknown, ...SeqParseResult<R>]
|
||||||
|
)
|
||||||
|
: unknown[];
|
||||||
|
|
||||||
|
export function seq<Parsers extends Parser<unknown>[]>(...parsers: Parsers): Parser<SeqParseResult<Parsers>> {
|
||||||
|
return new Parser((input, index, state) => {
|
||||||
|
let result;
|
||||||
|
let latestIndex = index;
|
||||||
|
const accum = [];
|
||||||
|
for (let i = 0; i < parsers.length; i++) {
|
||||||
|
result = parsers[i].handler(input, latestIndex, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
latestIndex = result.index;
|
||||||
|
accum.push(result.value);
|
||||||
|
}
|
||||||
|
return success(latestIndex, accum as SeqParseResult<Parsers>);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function alt<Parsers extends Parser<unknown>[]>(parsers: Parsers): Parser<ParsedType<Parsers[number]>> {
|
||||||
|
return new Parser<ParsedType<Parsers[number]>>((input, index, state): Result<ParsedType<Parsers[number]>> => {
|
||||||
|
for (let i = 0; i < parsers.length; i++) {
|
||||||
|
const parser: Parsers[number] = parsers[i];
|
||||||
|
const result = parser.handler(input, index, state);
|
||||||
|
if (result.success) {
|
||||||
|
return result as Result<ParsedType<Parsers[number]>>;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return failure();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function succeeded<T>(value: T): Parser<T> {
|
||||||
|
return new Parser((_input, index, _state) => {
|
||||||
|
return success(index, value);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function notMatch(parser: Parser<unknown>): Parser<null> {
|
||||||
|
return new Parser((input, index, state) => {
|
||||||
|
const result = parser.handler(input, index, state);
|
||||||
|
return !result.success
|
||||||
|
? success(index, null)
|
||||||
|
: failure();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export const cr = str('\r');
|
||||||
|
export const lf = str('\n');
|
||||||
|
export const crlf = str('\r\n');
|
||||||
|
export const newline = alt([crlf, cr, lf]);
|
||||||
|
|
||||||
|
export const char = new Parser((input, index, _state) => {
|
||||||
|
if ((input.length - index) < 1) {
|
||||||
|
return failure();
|
||||||
|
}
|
||||||
|
const value = input.charAt(index);
|
||||||
|
return success(index + 1, value);
|
||||||
|
});
|
||||||
|
|
||||||
|
export const lineBegin = new Parser((input, index, state) => {
|
||||||
|
if (index === 0) {
|
||||||
|
return success(index, null);
|
||||||
|
}
|
||||||
|
if (cr.handler(input, index - 1, state).success) {
|
||||||
|
return success(index, null);
|
||||||
|
}
|
||||||
|
if (lf.handler(input, index - 1, state).success) {
|
||||||
|
return success(index, null);
|
||||||
|
}
|
||||||
|
return failure();
|
||||||
|
});
|
||||||
|
|
||||||
|
export const lineEnd = new Parser((input, index, state) => {
|
||||||
|
if (index === input.length) {
|
||||||
|
return success(index, null);
|
||||||
|
}
|
||||||
|
if (cr.handler(input, index, state).success) {
|
||||||
|
return success(index, null);
|
||||||
|
}
|
||||||
|
if (lf.handler(input, index, state).success) {
|
||||||
|
return success(index, null);
|
||||||
|
}
|
||||||
|
return failure();
|
||||||
|
});
|
||||||
|
|
||||||
|
export function lazy<T>(fn: () => Parser<T>): Parser<T> {
|
||||||
|
const parser: Parser<T> = new Parser((input, index, state) => {
|
||||||
|
parser.handler = fn().handler;
|
||||||
|
return parser.handler(input, index, state);
|
||||||
|
});
|
||||||
|
return parser;
|
||||||
|
}
|
||||||
|
|
||||||
|
//type Syntax<T> = (rules: Record<string, Parser<T>>) => Parser<T>;
|
||||||
|
//type SyntaxReturn<T> = T extends (rules: Record<string, Parser<any>>) => infer R ? R : never;
|
||||||
|
//export function createLanguage2<T extends Record<string, Syntax<any>>>(syntaxes: T): { [K in keyof T]: SyntaxReturn<T[K]> } {
|
||||||
|
|
||||||
|
type ParserTable<T> = { [K in keyof T]: Parser<T[K]> };
|
||||||
|
|
||||||
|
// TODO: I want to make the function type declaration look nice.
|
||||||
|
export function createLanguage<T>(syntaxes: { [K in keyof T]: (r: ParserTable<T>) => Parser<T[K]> }): ParserTable<T> {
|
||||||
|
// @ts-expect-error initializing object so type error here
|
||||||
|
const rules: ParserTable<T> = {};
|
||||||
|
for (const key of Object.keys(syntaxes) as (keyof T & string)[]) {
|
||||||
|
rules[key] = lazy(() => {
|
||||||
|
const parser = syntaxes[key](rules);
|
||||||
|
if (parser == null) {
|
||||||
|
throw new Error('syntax must return a parser.');
|
||||||
|
}
|
||||||
|
parser.name = key;
|
||||||
|
return parser;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return rules;
|
||||||
|
}
|
27
src/internal/index.ts
Normal file
27
src/internal/index.ts
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
import * as V from '..';
|
||||||
|
import { language } from './parser';
|
||||||
|
import { mergeText } from './util';
|
||||||
|
|
||||||
|
export type FullParserOpts = {
|
||||||
|
nestLimit?: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function fullParser(input: string, opts: FullParserOpts): V.VfmNode[] {
|
||||||
|
const result = language.fullParser.handler(input, 0, {
|
||||||
|
nestLimit: (opts.nestLimit != null) ? opts.nestLimit : 20,
|
||||||
|
depth: 0,
|
||||||
|
linkLabel: false,
|
||||||
|
trace: false,
|
||||||
|
});
|
||||||
|
if (!result.success) throw new Error('Unexpected parse error');
|
||||||
|
return mergeText(result.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function simpleParser(input: string): V.VfmSimpleNode[] {
|
||||||
|
const result = language.simpleParser.handler(input, 0, {
|
||||||
|
depth: 0,
|
||||||
|
nestLimit: 1 / 0, // reliable infinite
|
||||||
|
});
|
||||||
|
if (!result.success) throw new Error('Unexpected parse error');
|
||||||
|
return mergeText(result.value);
|
||||||
|
}
|
795
src/internal/parser.ts
Normal file
795
src/internal/parser.ts
Normal file
|
@ -0,0 +1,795 @@
|
||||||
|
import * as V from '..';
|
||||||
|
import * as P from './core';
|
||||||
|
import { mergeText } from './util';
|
||||||
|
import { SeqParseResult } from './core';
|
||||||
|
|
||||||
|
// NOTE:
|
||||||
|
// Even though you have added a file in the tsd test, if there is no type definition file in "@twemoji/parser/dist/lib/regex", an error will occur.
|
||||||
|
// Ignore this error.
|
||||||
|
/* eslint @typescript-eslint/ban-ts-comment: 1 */
|
||||||
|
// @ts-ignore
|
||||||
|
import twemojiRegex from '@twemoji/parser/dist/lib/regex';
|
||||||
|
|
||||||
|
type ArgPair = { k: string, v: string | true };
|
||||||
|
type Args = Record<string, string | true>;
|
||||||
|
|
||||||
|
const space = P.regexp(/[\u0020\u3000\t]/);
|
||||||
|
const alphaAndNum = P.regexp(/[a-z0-9]/i);
|
||||||
|
const newLine = P.alt([P.crlf, P.cr, P.lf]);
|
||||||
|
|
||||||
|
function seqOrText<Parsers extends P.Parser<unknown>[]>(...parsers: Parsers): P.Parser<SeqParseResult<Parsers> | string> {
|
||||||
|
return new P.Parser<SeqParseResult<Parsers> | string>((input, index, state) => {
|
||||||
|
// TODO: typesafe implementation
|
||||||
|
const accum: unknown[] = [];
|
||||||
|
let latestIndex = index;
|
||||||
|
for (let i = 0 ; i < parsers.length; i++) {
|
||||||
|
const result = parsers[i].handler(input, latestIndex, state);
|
||||||
|
if (!result.success) {
|
||||||
|
if (latestIndex === index) {
|
||||||
|
return P.failure();
|
||||||
|
} else {
|
||||||
|
return P.success(latestIndex, input.slice(index, latestIndex));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
accum.push(result.value);
|
||||||
|
latestIndex = result.index;
|
||||||
|
}
|
||||||
|
return P.success(latestIndex, accum as SeqParseResult<Parsers>);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const notLinkLabel = new P.Parser((_input, index, state) => {
|
||||||
|
return (!state.linkLabel)
|
||||||
|
? P.success(index, null)
|
||||||
|
: P.failure();
|
||||||
|
});
|
||||||
|
|
||||||
|
const nestable = new P.Parser((_input, index, state) => {
|
||||||
|
return (state.depth < state.nestLimit)
|
||||||
|
? P.success(index, null)
|
||||||
|
: P.failure();
|
||||||
|
});
|
||||||
|
|
||||||
|
function nest<T>(parser: P.Parser<T>, fallback?: P.Parser<string>): P.Parser<T | string> {
|
||||||
|
// nesting limited? -> No: specified parser, Yes: fallback parser (default = P.char)
|
||||||
|
const inner = P.alt([
|
||||||
|
P.seq(nestable, parser).select(1),
|
||||||
|
(fallback != null) ? fallback : P.char,
|
||||||
|
]);
|
||||||
|
return new P.Parser<T | string>((input, index, state) => {
|
||||||
|
state.depth++;
|
||||||
|
const result = inner.handler(input, index, state);
|
||||||
|
state.depth--;
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TypeTable {
|
||||||
|
fullParser: (V.VfmNode | string)[],
|
||||||
|
simpleParser: (V.VfmSimpleNode | string)[],
|
||||||
|
full: V.VfmNode | string,
|
||||||
|
simple: V.VfmSimpleNode | string,
|
||||||
|
inline: V.VfmInline | string,
|
||||||
|
quote: V.NodeType<'quote'>,
|
||||||
|
codeBlock: V.NodeType<'blockCode'>,
|
||||||
|
mathBlock: V.NodeType<'mathBlock'>,
|
||||||
|
centerTag: V.NodeType<'center'>,
|
||||||
|
big: V.NodeType<'fn'> | string,
|
||||||
|
boldAsta: V.NodeType<'bold'> | string,
|
||||||
|
boldTag: V.NodeType<'bold'> | string,
|
||||||
|
boldUnder: V.NodeType<'bold'>,
|
||||||
|
smallTag: V.NodeType<'small'> | string,
|
||||||
|
italicTag: V.NodeType<'italic'> | string,
|
||||||
|
italicAsta: V.NodeType<'italic'>,
|
||||||
|
italicUnder: V.NodeType<'italic'>,
|
||||||
|
strikeTag: V.NodeType<'strike'> | string,
|
||||||
|
strikeWave: V.NodeType<'strike'> | string,
|
||||||
|
unicodeEmoji: V.NodeType<'unicodeEmoji'>,
|
||||||
|
plainTag: V.NodeType<'plain'>,
|
||||||
|
fn: V.NodeType<'fn'> | string,
|
||||||
|
inlineCode: V.NodeType<'inlineCode'>,
|
||||||
|
mathInline: V.NodeType<'mathInline'>,
|
||||||
|
mention: V.NodeType<'mention'> | string,
|
||||||
|
hashtag: V.NodeType<'hashtag'>,
|
||||||
|
emojiCode: V.NodeType<'emojiCode'>,
|
||||||
|
link: V.NodeType<'link'>,
|
||||||
|
url: V.NodeType<'url'> | string,
|
||||||
|
urlAlt: V.NodeType<'url'>,
|
||||||
|
search: V.NodeType<'search'>,
|
||||||
|
text: string,
|
||||||
|
}
|
||||||
|
|
||||||
|
export const language = P.createLanguage<TypeTable>({
|
||||||
|
fullParser: r => {
|
||||||
|
return r.full.many(0);
|
||||||
|
},
|
||||||
|
|
||||||
|
simpleParser: r => {
|
||||||
|
return r.simple.many(0);
|
||||||
|
},
|
||||||
|
|
||||||
|
full: r => {
|
||||||
|
return P.alt([
|
||||||
|
// Regexp
|
||||||
|
r.unicodeEmoji,
|
||||||
|
// "<center>" block
|
||||||
|
r.centerTag,
|
||||||
|
// "<small>"
|
||||||
|
r.smallTag,
|
||||||
|
// "<plain>"
|
||||||
|
r.plainTag,
|
||||||
|
// "<b>"
|
||||||
|
r.boldTag,
|
||||||
|
// "<i>"
|
||||||
|
r.italicTag,
|
||||||
|
// "<s>"
|
||||||
|
r.strikeTag,
|
||||||
|
// "<http"
|
||||||
|
r.urlAlt,
|
||||||
|
// "***"
|
||||||
|
r.big,
|
||||||
|
// "**"
|
||||||
|
r.boldAsta,
|
||||||
|
// "*"
|
||||||
|
r.italicAsta,
|
||||||
|
// "__"
|
||||||
|
r.boldUnder,
|
||||||
|
// "_"
|
||||||
|
r.italicUnder,
|
||||||
|
// "```" block
|
||||||
|
r.codeBlock,
|
||||||
|
// "`"
|
||||||
|
r.inlineCode,
|
||||||
|
// ">" block
|
||||||
|
r.quote,
|
||||||
|
// "\\[" block
|
||||||
|
r.mathBlock,
|
||||||
|
// "\\("
|
||||||
|
r.mathInline,
|
||||||
|
// "~~"
|
||||||
|
r.strikeWave,
|
||||||
|
// "$[""
|
||||||
|
r.fn,
|
||||||
|
// "@"
|
||||||
|
r.mention,
|
||||||
|
// "#"
|
||||||
|
r.hashtag,
|
||||||
|
// ":"
|
||||||
|
r.emojiCode,
|
||||||
|
// "?[" or "["
|
||||||
|
r.link,
|
||||||
|
// http
|
||||||
|
r.url,
|
||||||
|
// block
|
||||||
|
r.search,
|
||||||
|
r.text,
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
|
||||||
|
simple: r => {
|
||||||
|
return P.alt([
|
||||||
|
r.unicodeEmoji, // Regexp
|
||||||
|
r.emojiCode, // ":"
|
||||||
|
r.plainTag, // "<plain>" // to NOT parse emojiCode inside `<plain>`
|
||||||
|
r.text,
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
|
||||||
|
inline: r => {
|
||||||
|
return P.alt([
|
||||||
|
// Regexp
|
||||||
|
r.unicodeEmoji,
|
||||||
|
// "<small>"
|
||||||
|
r.smallTag,
|
||||||
|
// "<plain>"
|
||||||
|
r.plainTag,
|
||||||
|
// "<b>"
|
||||||
|
r.boldTag,
|
||||||
|
// "<i>"
|
||||||
|
r.italicTag,
|
||||||
|
// "<s>"
|
||||||
|
r.strikeTag,
|
||||||
|
// <http
|
||||||
|
r.urlAlt,
|
||||||
|
// "***"
|
||||||
|
r.big,
|
||||||
|
// "**"
|
||||||
|
r.boldAsta,
|
||||||
|
// "*"
|
||||||
|
r.italicAsta,
|
||||||
|
// "__"
|
||||||
|
r.boldUnder,
|
||||||
|
// "_"
|
||||||
|
r.italicUnder,
|
||||||
|
// "`"
|
||||||
|
r.inlineCode,
|
||||||
|
// "\\("
|
||||||
|
r.mathInline,
|
||||||
|
// "~~"
|
||||||
|
r.strikeWave,
|
||||||
|
// "$[""
|
||||||
|
r.fn,
|
||||||
|
// "@"
|
||||||
|
r.mention,
|
||||||
|
// "#"
|
||||||
|
r.hashtag,
|
||||||
|
// ":"
|
||||||
|
r.emojiCode,
|
||||||
|
// "?[" or "["
|
||||||
|
r.link,
|
||||||
|
// http
|
||||||
|
r.url,
|
||||||
|
r.text,
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
|
||||||
|
quote: r => {
|
||||||
|
const lines: P.Parser<string[]> = P.seq(
|
||||||
|
P.str('>'),
|
||||||
|
space.option(),
|
||||||
|
P.seq(P.notMatch(newLine), P.char).select(1).many(0).text(),
|
||||||
|
).select(2).sep(newLine, 1);
|
||||||
|
const parser = P.seq(
|
||||||
|
newLine.option(),
|
||||||
|
newLine.option(),
|
||||||
|
P.lineBegin,
|
||||||
|
lines,
|
||||||
|
newLine.option(),
|
||||||
|
newLine.option(),
|
||||||
|
).select(3);
|
||||||
|
return new P.Parser((input, index, state) => {
|
||||||
|
let result;
|
||||||
|
// parse quote
|
||||||
|
result = parser.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
const contents = result.value;
|
||||||
|
const quoteIndex = result.index;
|
||||||
|
// disallow empty content if single line
|
||||||
|
if (contents.length === 1 && contents[0].length === 0) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
// parse inner content
|
||||||
|
const contentParser = nest(r.fullParser).many(0);
|
||||||
|
result = contentParser.handler(contents.join('\n'), 0, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
return P.success(quoteIndex, V.QUOTE(mergeText(result.value)));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
codeBlock: () => {
|
||||||
|
const mark = P.str('```');
|
||||||
|
return P.seq(
|
||||||
|
newLine.option(),
|
||||||
|
P.lineBegin,
|
||||||
|
mark,
|
||||||
|
P.seq(P.notMatch(newLine), P.char).select(1).many(0),
|
||||||
|
newLine,
|
||||||
|
P.seq(P.notMatch(P.seq(newLine, mark, P.lineEnd)), P.char).select(1).many(1),
|
||||||
|
newLine,
|
||||||
|
mark,
|
||||||
|
P.lineEnd,
|
||||||
|
newLine.option(),
|
||||||
|
).map(result => {
|
||||||
|
const lang = result[3].join('').trim();
|
||||||
|
const code = result[5].join('');
|
||||||
|
return V.CODE_BLOCK(code, (lang.length > 0 ? lang : null));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
mathBlock: () => {
|
||||||
|
const open = P.str('\\[');
|
||||||
|
const close = P.str('\\]');
|
||||||
|
return P.seq(
|
||||||
|
newLine.option(),
|
||||||
|
P.lineBegin,
|
||||||
|
open,
|
||||||
|
newLine.option(),
|
||||||
|
P.seq(P.notMatch(P.seq(newLine.option(), close)), P.char).select(1).many(1),
|
||||||
|
newLine.option(),
|
||||||
|
close,
|
||||||
|
P.lineEnd,
|
||||||
|
newLine.option(),
|
||||||
|
).map(result => {
|
||||||
|
const formula = result[4].join('');
|
||||||
|
return V.MATH_BLOCK(formula);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
centerTag: r => {
|
||||||
|
const open = P.str('<center>');
|
||||||
|
const close = P.str('</center>');
|
||||||
|
return P.seq(
|
||||||
|
newLine.option(),
|
||||||
|
P.lineBegin,
|
||||||
|
open,
|
||||||
|
newLine.option(),
|
||||||
|
P.seq(P.notMatch(P.seq(newLine.option(), close)), nest(r.inline)).select(1).many(1),
|
||||||
|
newLine.option(),
|
||||||
|
close,
|
||||||
|
P.lineEnd,
|
||||||
|
newLine.option(),
|
||||||
|
).map(result => {
|
||||||
|
return V.CENTER(mergeText(result[4]));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
big: r => {
|
||||||
|
const mark = P.str('***');
|
||||||
|
return seqOrText(
|
||||||
|
mark,
|
||||||
|
P.seq(P.notMatch(mark), nest(r.inline)).select(1).many(1),
|
||||||
|
mark,
|
||||||
|
).map(result => {
|
||||||
|
if (typeof result === 'string') return result;
|
||||||
|
return V.FN('tada', {}, mergeText(result[1]));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
boldAsta: r => {
|
||||||
|
const mark = P.str('**');
|
||||||
|
return seqOrText(
|
||||||
|
mark,
|
||||||
|
P.seq(P.notMatch(mark), nest(r.inline)).select(1).many(1),
|
||||||
|
mark,
|
||||||
|
).map(result => {
|
||||||
|
if (typeof result === 'string') return result;
|
||||||
|
return V.BOLD(mergeText(result[1]));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
boldTag: r => {
|
||||||
|
const open = P.str('<b>');
|
||||||
|
const close = P.str('</b>');
|
||||||
|
return seqOrText(
|
||||||
|
open,
|
||||||
|
P.seq(P.notMatch(close), nest(r.inline)).select(1).many(1),
|
||||||
|
close,
|
||||||
|
).map(result => {
|
||||||
|
if (typeof result === 'string') return result;
|
||||||
|
return V.BOLD(mergeText(result[1]));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
boldUnder: () => {
|
||||||
|
const mark = P.str('__');
|
||||||
|
return P.seq(
|
||||||
|
mark,
|
||||||
|
P.alt([alphaAndNum, space]).many(1),
|
||||||
|
mark,
|
||||||
|
).map(result => V.BOLD(mergeText(result[1])));
|
||||||
|
},
|
||||||
|
|
||||||
|
smallTag: r => {
|
||||||
|
const open = P.str('<small>');
|
||||||
|
const close = P.str('</small>');
|
||||||
|
return seqOrText(
|
||||||
|
open,
|
||||||
|
P.seq(P.notMatch(close), nest(r.inline)).select(1).many(1),
|
||||||
|
close,
|
||||||
|
).map(result => {
|
||||||
|
if (typeof result === 'string') return result;
|
||||||
|
return V.SMALL(mergeText(result[1]));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
italicTag: r => {
|
||||||
|
const open = P.str('<i>');
|
||||||
|
const close = P.str('</i>');
|
||||||
|
return seqOrText(
|
||||||
|
open,
|
||||||
|
P.seq(P.notMatch(close), nest(r.inline)).select(1).many(1),
|
||||||
|
close,
|
||||||
|
).map(result => {
|
||||||
|
if (typeof result === 'string') return result;
|
||||||
|
return V.ITALIC(mergeText(result[1]));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
italicAsta: () => {
|
||||||
|
const mark = P.str('*');
|
||||||
|
const parser = P.seq(
|
||||||
|
mark,
|
||||||
|
P.alt([alphaAndNum, space]).many(1),
|
||||||
|
mark,
|
||||||
|
);
|
||||||
|
return new P.Parser((input, index, state) => {
|
||||||
|
const result = parser.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
// check before
|
||||||
|
const beforeStr = input.slice(0, index);
|
||||||
|
if (/[a-z0-9]$/i.test(beforeStr)) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
return P.success(result.index, V.ITALIC(mergeText(result.value[1])));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
italicUnder: () => {
|
||||||
|
const mark = P.str('_');
|
||||||
|
const parser = P.seq(
|
||||||
|
mark,
|
||||||
|
P.alt([alphaAndNum, space]).many(1),
|
||||||
|
mark,
|
||||||
|
);
|
||||||
|
return new P.Parser((input, index, state) => {
|
||||||
|
const result = parser.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
// check before
|
||||||
|
const beforeStr = input.slice(0, index);
|
||||||
|
if (/[a-z0-9]$/i.test(beforeStr)) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
return P.success(result.index, V.ITALIC(mergeText(result.value[1])));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
strikeTag: r => {
|
||||||
|
const open = P.str('<s>');
|
||||||
|
const close = P.str('</s>');
|
||||||
|
return seqOrText(
|
||||||
|
open,
|
||||||
|
P.seq(P.notMatch(close), nest(r.inline)).select(1).many(1),
|
||||||
|
close,
|
||||||
|
).map(result => {
|
||||||
|
if (typeof result === 'string') return result;
|
||||||
|
return V.STRIKE(mergeText(result[1]));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
strikeWave: r => {
|
||||||
|
const mark = P.str('~~');
|
||||||
|
return seqOrText(
|
||||||
|
mark,
|
||||||
|
P.seq(P.notMatch(P.alt([mark, newLine])), nest(r.inline)).select(1).many(1),
|
||||||
|
mark,
|
||||||
|
).map(result => {
|
||||||
|
if (typeof result === 'string') return result;
|
||||||
|
return V.STRIKE(mergeText(result[1]));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
unicodeEmoji: () => {
|
||||||
|
const emoji = RegExp(twemojiRegex.source);
|
||||||
|
return P.regexp(emoji).map(content => V.UNI_EMOJI(content));
|
||||||
|
},
|
||||||
|
|
||||||
|
plainTag: () => {
|
||||||
|
const open = P.str('<plain>');
|
||||||
|
const close = P.str('</plain>');
|
||||||
|
return P.seq(
|
||||||
|
open,
|
||||||
|
newLine.option(),
|
||||||
|
P.seq(
|
||||||
|
P.notMatch(P.seq(newLine.option(), close)),
|
||||||
|
P.char,
|
||||||
|
).select(1).many(1).text(),
|
||||||
|
newLine.option(),
|
||||||
|
close,
|
||||||
|
).select(2).map(result => V.PLAIN(result));
|
||||||
|
},
|
||||||
|
|
||||||
|
fn: r => {
|
||||||
|
const fnName = new P.Parser((input, index, state) => {
|
||||||
|
const result = P.regexp(/[a-z0-9_]+/i).handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
return P.success(result.index, result.value);
|
||||||
|
});
|
||||||
|
const arg: P.Parser<ArgPair> = P.seq(
|
||||||
|
P.regexp(/[a-z0-9_]+/i),
|
||||||
|
P.seq(
|
||||||
|
P.str('='),
|
||||||
|
P.regexp(/[a-z0-9_.-]+/i),
|
||||||
|
).select(1).option(),
|
||||||
|
).map(result => {
|
||||||
|
return {
|
||||||
|
k: result[0],
|
||||||
|
v: (result[1] != null) ? result[1] : true,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const args = P.seq(
|
||||||
|
P.str('.'),
|
||||||
|
arg.sep(P.str(','), 1),
|
||||||
|
).select(1).map(pairs => {
|
||||||
|
const result: Args = { };
|
||||||
|
for (const pair of pairs) {
|
||||||
|
result[pair.k] = pair.v;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
const fnClose = P.str(']');
|
||||||
|
return seqOrText(
|
||||||
|
P.str('$['),
|
||||||
|
fnName,
|
||||||
|
args.option(),
|
||||||
|
P.str(' '),
|
||||||
|
P.seq(P.notMatch(fnClose), nest(r.inline)).select(1).many(1),
|
||||||
|
fnClose,
|
||||||
|
).map(result => {
|
||||||
|
if (typeof result === 'string') return result;
|
||||||
|
const name = result[1];
|
||||||
|
const args: Args = result[2] || {};
|
||||||
|
const content = result[4];
|
||||||
|
return V.FN(name, args, mergeText(content));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
inlineCode: () => {
|
||||||
|
const mark = P.str('`');
|
||||||
|
return P.seq(
|
||||||
|
mark,
|
||||||
|
P.seq(
|
||||||
|
P.notMatch(P.alt([mark, P.str('´'), newLine])),
|
||||||
|
P.char,
|
||||||
|
).select(1).many(1),
|
||||||
|
mark,
|
||||||
|
).map(result => V.INLINE_CODE(result[1].join('')));
|
||||||
|
},
|
||||||
|
|
||||||
|
mathInline: () => {
|
||||||
|
const open = P.str('\\(');
|
||||||
|
const close = P.str('\\)');
|
||||||
|
return P.seq(
|
||||||
|
open,
|
||||||
|
P.seq(
|
||||||
|
P.notMatch(P.alt([close, newLine])),
|
||||||
|
P.char,
|
||||||
|
).select(1).many(1),
|
||||||
|
close,
|
||||||
|
).map(result => V.MATH_INLINE(result[1].join('')));
|
||||||
|
},
|
||||||
|
|
||||||
|
mention: () => {
|
||||||
|
const parser = P.seq(
|
||||||
|
notLinkLabel,
|
||||||
|
P.str('@'),
|
||||||
|
P.regexp(/[a-z0-9_-]+/i),
|
||||||
|
P.seq(
|
||||||
|
P.str('@'),
|
||||||
|
P.regexp(/[a-z0-9_.-]+/i),
|
||||||
|
).select(1).option(),
|
||||||
|
);
|
||||||
|
return new P.Parser<V.VfmMention | string>((input, index, state) => {
|
||||||
|
let result;
|
||||||
|
result = parser.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
// check before (not mention)
|
||||||
|
const beforeStr = input.slice(0, index);
|
||||||
|
if (/[a-z0-9]$/i.test(beforeStr)) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
let invalidMention = false;
|
||||||
|
const resultIndex = result.index;
|
||||||
|
const username: string = result.value[2];
|
||||||
|
const hostname: string | null = result.value[3];
|
||||||
|
// remove [.-] of tail of hostname
|
||||||
|
let modifiedHost = hostname;
|
||||||
|
if (hostname != null) {
|
||||||
|
result = /[.-]+$/.exec(hostname);
|
||||||
|
if (result != null) {
|
||||||
|
modifiedHost = hostname.slice(0, (-1 * result[0].length));
|
||||||
|
if (modifiedHost.length === 0) {
|
||||||
|
// disallow invalid char only hostname
|
||||||
|
invalidMention = true;
|
||||||
|
modifiedHost = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// remove "-" of tail of username
|
||||||
|
let modifiedName = username;
|
||||||
|
result = /-+$/.exec(username);
|
||||||
|
if (result != null) {
|
||||||
|
if (modifiedHost == null) {
|
||||||
|
modifiedName = username.slice(0, (-1 * result[0].length));
|
||||||
|
} else {
|
||||||
|
// cannnot to remove tail of username if exist hostname
|
||||||
|
invalidMention = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// disallow "-" of head of username
|
||||||
|
if (modifiedName.length === 0 || modifiedName[0] === '-') {
|
||||||
|
invalidMention = true;
|
||||||
|
}
|
||||||
|
// disallow [.-] of head of hostname
|
||||||
|
if (modifiedHost != null && /^[.-]/.test(modifiedHost)) {
|
||||||
|
invalidMention = true;
|
||||||
|
}
|
||||||
|
// generate a text if mention is invalid
|
||||||
|
if (invalidMention) {
|
||||||
|
return P.success(resultIndex, input.slice(index, resultIndex));
|
||||||
|
}
|
||||||
|
const acct = modifiedHost != null ? `@${modifiedName}@${modifiedHost}` : `@${modifiedName}`;
|
||||||
|
return P.success(index + acct.length, V.MENTION(modifiedName, modifiedHost, acct));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
hashtag: () => {
|
||||||
|
const mark = P.str('#');
|
||||||
|
const hashTagChar = P.seq(
|
||||||
|
P.notMatch(P.alt([P.regexp(/[ \u3000\t.,!?'"#:/[\]【】()「」()<>]/), space, newLine])),
|
||||||
|
P.char,
|
||||||
|
).select(1);
|
||||||
|
const innerItem: P.Parser<unknown> = P.lazy(() => P.alt([
|
||||||
|
P.seq(
|
||||||
|
P.str('('), nest(innerItem, hashTagChar).many(0), P.str(')'),
|
||||||
|
),
|
||||||
|
P.seq(
|
||||||
|
P.str('['), nest(innerItem, hashTagChar).many(0), P.str(']'),
|
||||||
|
),
|
||||||
|
P.seq(
|
||||||
|
P.str('「'), nest(innerItem, hashTagChar).many(0), P.str('」'),
|
||||||
|
),
|
||||||
|
P.seq(
|
||||||
|
P.str('('), nest(innerItem, hashTagChar).many(0), P.str(')'),
|
||||||
|
),
|
||||||
|
hashTagChar,
|
||||||
|
]));
|
||||||
|
const parser = P.seq(
|
||||||
|
notLinkLabel,
|
||||||
|
mark,
|
||||||
|
innerItem.many(1).text(),
|
||||||
|
).select(2);
|
||||||
|
return new P.Parser((input, index, state) => {
|
||||||
|
const result = parser.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
// check before
|
||||||
|
const beforeStr = input.slice(0, index);
|
||||||
|
if (/[a-z0-9]$/i.test(beforeStr)) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
const resultIndex = result.index;
|
||||||
|
const resultValue = result.value;
|
||||||
|
// disallow number only
|
||||||
|
if (/^[0-9]+$/.test(resultValue)) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
return P.success(resultIndex, V.HASHTAG(resultValue));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
emojiCode: () => {
|
||||||
|
const side = P.notMatch(P.regexp(/[a-z0-9]/i));
|
||||||
|
const mark = P.str(':');
|
||||||
|
return P.seq(
|
||||||
|
P.alt([P.lineBegin, side]),
|
||||||
|
mark,
|
||||||
|
P.regexp(/[a-z0-9_+-]+/i),
|
||||||
|
mark,
|
||||||
|
P.alt([P.lineEnd, side]),
|
||||||
|
).select(2).map(name => V.EMOJI_CODE(name));
|
||||||
|
},
|
||||||
|
|
||||||
|
link: r => {
|
||||||
|
const labelInline = new P.Parser((input, index, state) => {
|
||||||
|
state.linkLabel = true;
|
||||||
|
const result = r.inline.handler(input, index, state);
|
||||||
|
state.linkLabel = false;
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
const closeLabel = P.str(']');
|
||||||
|
const parser = P.seq(
|
||||||
|
notLinkLabel,
|
||||||
|
P.alt([P.str('?['), P.str('[')]),
|
||||||
|
P.seq(
|
||||||
|
P.notMatch(P.alt([closeLabel, newLine])),
|
||||||
|
nest(labelInline),
|
||||||
|
).select(1).many(1),
|
||||||
|
closeLabel,
|
||||||
|
P.str('('),
|
||||||
|
P.alt([r.urlAlt, r.url]),
|
||||||
|
P.str(')'),
|
||||||
|
);
|
||||||
|
return new P.Parser<V.VfmLink>((input, index, state) => {
|
||||||
|
const result = parser.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
|
||||||
|
const [, prefix, label,,, url] = result.value;
|
||||||
|
|
||||||
|
const silent = (prefix === '?[');
|
||||||
|
if (typeof url === 'string') return P.failure();
|
||||||
|
|
||||||
|
return P.success(result.index, V.LINK(silent, url.props.url, mergeText(label)));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
url: () => {
|
||||||
|
const urlChar = P.regexp(/[.,a-z0-9_/:%#@$&?!~=+-]/i);
|
||||||
|
const innerItem: P.Parser<unknown> = P.lazy(() => P.alt([
|
||||||
|
P.seq(
|
||||||
|
P.str('('), nest(innerItem, urlChar).many(0), P.str(')'),
|
||||||
|
),
|
||||||
|
P.seq(
|
||||||
|
P.str('['), nest(innerItem, urlChar).many(0), P.str(']'),
|
||||||
|
),
|
||||||
|
urlChar,
|
||||||
|
]));
|
||||||
|
const parser = P.seq(
|
||||||
|
notLinkLabel,
|
||||||
|
P.regexp(/https?:\/\//),
|
||||||
|
innerItem.many(1).text(),
|
||||||
|
);
|
||||||
|
return new P.Parser<V.VfmUrl | string>((input, index, state) => {
|
||||||
|
let result;
|
||||||
|
result = parser.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
const resultIndex = result.index;
|
||||||
|
let modifiedIndex = resultIndex;
|
||||||
|
const schema: string = result.value[1];
|
||||||
|
let content: string = result.value[2];
|
||||||
|
// remove the ".," at the right end
|
||||||
|
result = /[.,]+$/.exec(content);
|
||||||
|
if (result != null) {
|
||||||
|
modifiedIndex -= result[0].length;
|
||||||
|
content = content.slice(0, (-1 * result[0].length));
|
||||||
|
if (content.length === 0) {
|
||||||
|
return P.success(resultIndex, input.slice(index, resultIndex));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return P.success(modifiedIndex, V.N_URL(schema + content, false));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
urlAlt: () => {
|
||||||
|
const open = P.str('<');
|
||||||
|
const close = P.str('>');
|
||||||
|
const parser = P.seq(
|
||||||
|
notLinkLabel,
|
||||||
|
open,
|
||||||
|
P.regexp(/https?:\/\//),
|
||||||
|
P.seq(P.notMatch(P.alt([close, space])), P.char).select(1).many(1),
|
||||||
|
close,
|
||||||
|
).text();
|
||||||
|
return new P.Parser((input, index, state) => {
|
||||||
|
const result = parser.handler(input, index, state);
|
||||||
|
if (!result.success) {
|
||||||
|
return P.failure();
|
||||||
|
}
|
||||||
|
const text = result.value.slice(1, (result.value.length - 1));
|
||||||
|
return P.success(result.index, V.N_URL(text, true));
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
search: () => {
|
||||||
|
const button = P.alt([
|
||||||
|
P.regexp(/\[(検索|search)\]/i),
|
||||||
|
P.regexp(/(検索|search)/i),
|
||||||
|
]);
|
||||||
|
return P.seq(
|
||||||
|
newLine.option(),
|
||||||
|
P.lineBegin,
|
||||||
|
P.seq(
|
||||||
|
P.notMatch(P.alt([
|
||||||
|
newLine,
|
||||||
|
P.seq(space, button, P.lineEnd),
|
||||||
|
])),
|
||||||
|
P.char,
|
||||||
|
).select(1).many(1),
|
||||||
|
space,
|
||||||
|
button,
|
||||||
|
P.lineEnd,
|
||||||
|
newLine.option(),
|
||||||
|
).map(result => {
|
||||||
|
const query = result[2].join('');
|
||||||
|
return V.SEARCH(query, `${query}${result[3]}${result[4]}`);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
text: () => P.char,
|
||||||
|
});
|
171
src/internal/util.ts
Normal file
171
src/internal/util.ts
Normal file
|
@ -0,0 +1,171 @@
|
||||||
|
import { isVfmBlock, VfmInline, VfmNode, VfmText, TEXT } from '../node';
|
||||||
|
|
||||||
|
type ArrayRecursive<T> = T | Array<ArrayRecursive<T>>
|
||||||
|
|
||||||
|
export function mergeText<T extends VfmNode>(nodes: ArrayRecursive<((T extends VfmInline ? VfmInline : VfmNode) | string)>[]): (T | VfmText)[] {
|
||||||
|
const dest: (T | VfmText)[] = [];
|
||||||
|
const storedChars: string[] = [];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a text node from the stored chars, And push it.
|
||||||
|
*/
|
||||||
|
function generateText(): void {
|
||||||
|
if (storedChars.length > 0) {
|
||||||
|
dest.push(TEXT(storedChars.join('')));
|
||||||
|
storedChars.length = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const flatten = nodes.flat(1) as (string | T)[];
|
||||||
|
for (const node of flatten) {
|
||||||
|
if (typeof node === 'string') {
|
||||||
|
// Store the char.
|
||||||
|
storedChars.push(node);
|
||||||
|
}
|
||||||
|
else if (!Array.isArray(node) && node.type === 'text') {
|
||||||
|
storedChars.push((node as VfmText).props.text);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
generateText();
|
||||||
|
dest.push(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
generateText();
|
||||||
|
|
||||||
|
return dest;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stringifyNode(node: VfmNode): string {
|
||||||
|
switch (node.type) {
|
||||||
|
// block
|
||||||
|
case 'quote': {
|
||||||
|
return stringifyTree(node.children).split('\n').map(line => `> ${line}`).join('\n');
|
||||||
|
}
|
||||||
|
case 'search': {
|
||||||
|
return node.props.content;
|
||||||
|
}
|
||||||
|
case 'blockCode': {
|
||||||
|
return `\`\`\`${ node.props.lang ?? '' }\n${ node.props.code }\n\`\`\``;
|
||||||
|
}
|
||||||
|
case 'mathBlock': {
|
||||||
|
return `\\[\n${ node.props.formula }\n\\]`;
|
||||||
|
}
|
||||||
|
case 'center': {
|
||||||
|
return `<center>\n${ stringifyTree(node.children) }\n</center>`;
|
||||||
|
}
|
||||||
|
// inline
|
||||||
|
case 'emojiCode': {
|
||||||
|
return `:${ node.props.name }:`;
|
||||||
|
}
|
||||||
|
case 'unicodeEmoji': {
|
||||||
|
return node.props.emoji;
|
||||||
|
}
|
||||||
|
case 'bold': {
|
||||||
|
return `**${ stringifyTree(node.children) }**`;
|
||||||
|
}
|
||||||
|
case 'small': {
|
||||||
|
return `<small>${ stringifyTree(node.children) }</small>`;
|
||||||
|
}
|
||||||
|
case 'italic': {
|
||||||
|
return `<i>${ stringifyTree(node.children) }</i>`;
|
||||||
|
}
|
||||||
|
case 'strike': {
|
||||||
|
return `~~${ stringifyTree(node.children) }~~`;
|
||||||
|
}
|
||||||
|
case 'inlineCode': {
|
||||||
|
return `\`${ node.props.code }\``;
|
||||||
|
}
|
||||||
|
case 'mathInline': {
|
||||||
|
return `\\(${ node.props.formula }\\)`;
|
||||||
|
}
|
||||||
|
case 'mention': {
|
||||||
|
return node.props.acct;
|
||||||
|
}
|
||||||
|
case 'hashtag': {
|
||||||
|
return `#${ node.props.hashtag }`;
|
||||||
|
}
|
||||||
|
case 'url': {
|
||||||
|
if (node.props.brackets) {
|
||||||
|
return `<${ node.props.url }>`;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return node.props.url;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case 'link': {
|
||||||
|
const prefix = node.props.silent ? '?' : '';
|
||||||
|
return `${ prefix }[${ stringifyTree(node.children) }](${ node.props.url })`;
|
||||||
|
}
|
||||||
|
case 'fn': {
|
||||||
|
const argFields = Object.keys(node.props.args).map(key => {
|
||||||
|
const value = node.props.args[key];
|
||||||
|
if (value === true) {
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return `${ key }=${ value }`;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const args = (argFields.length > 0) ? '.' + argFields.join(',') : '';
|
||||||
|
return `$[${ node.props.name }${ args } ${ stringifyTree(node.children) }]`;
|
||||||
|
}
|
||||||
|
case 'plain': {
|
||||||
|
return `<plain>\n${ stringifyTree(node.children) }\n</plain>`;
|
||||||
|
}
|
||||||
|
case 'text': {
|
||||||
|
return node.props.text;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error('unknown vfm node');
|
||||||
|
}
|
||||||
|
|
||||||
|
enum stringifyState {
|
||||||
|
none = 0,
|
||||||
|
inline,
|
||||||
|
block
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stringifyTree(nodes: VfmNode[]): string {
|
||||||
|
const dest: VfmNode[] = [];
|
||||||
|
let state: stringifyState = stringifyState.none;
|
||||||
|
|
||||||
|
for (const node of nodes) {
|
||||||
|
// Add line breaks when appropriate.
|
||||||
|
// none -> inline : No
|
||||||
|
// none -> block : No
|
||||||
|
// inline -> inline : No
|
||||||
|
// inline -> block : Yes
|
||||||
|
// block -> inline : Yes
|
||||||
|
// block -> block : Yes
|
||||||
|
|
||||||
|
let pushLf = true;
|
||||||
|
if (isVfmBlock(node)) {
|
||||||
|
if (state === stringifyState.none) {
|
||||||
|
pushLf = false;
|
||||||
|
}
|
||||||
|
state = stringifyState.block;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (state === stringifyState.none || state === stringifyState.inline) {
|
||||||
|
pushLf = false;
|
||||||
|
}
|
||||||
|
state = stringifyState.inline;
|
||||||
|
}
|
||||||
|
if (pushLf) {
|
||||||
|
dest.push(TEXT('\n'));
|
||||||
|
}
|
||||||
|
|
||||||
|
dest.push(node);
|
||||||
|
}
|
||||||
|
|
||||||
|
return dest.map(n => stringifyNode(n)).join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function inspectOne(node: VfmNode, action : (node: VfmNode) => void): void {
|
||||||
|
action(node);
|
||||||
|
if (node.children != null) {
|
||||||
|
for (const child of node.children) {
|
||||||
|
inspectOne(child, action);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
213
src/node.ts
Normal file
213
src/node.ts
Normal file
|
@ -0,0 +1,213 @@
|
||||||
|
export type VfmNode = VfmBlock | VfmInline;
|
||||||
|
|
||||||
|
export type VfmSimpleNode = VfmUnicodeEmoji | VfmEmojiCode | VfmText | VfmPlain;
|
||||||
|
|
||||||
|
export type VfmBlock = VfmQuote | VfmSearch | VfmCodeBlock | VfmMathBlock | VfmCenter;
|
||||||
|
|
||||||
|
const blockTypes: VfmNode['type'][] = [ 'quote', 'search', 'blockCode', 'mathBlock', 'center' ];
|
||||||
|
export function isVfmBlock(node: VfmNode): node is VfmBlock {
|
||||||
|
return blockTypes.includes(node.type);
|
||||||
|
}
|
||||||
|
|
||||||
|
export type VfmQuote = {
|
||||||
|
type: 'quote';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmNode[];
|
||||||
|
};
|
||||||
|
export const QUOTE = (children: VfmNode[]): NodeType<'quote'> => { return { type: 'quote', children }; };
|
||||||
|
|
||||||
|
export type VfmSearch = {
|
||||||
|
type: 'search';
|
||||||
|
props: {
|
||||||
|
query: string;
|
||||||
|
content: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const SEARCH = (query: string, content: string): NodeType<'search'> => { return { type: 'search', props: { query, content } }; };
|
||||||
|
|
||||||
|
export type VfmCodeBlock = {
|
||||||
|
type: 'blockCode';
|
||||||
|
props: {
|
||||||
|
code: string;
|
||||||
|
lang: string | null;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const CODE_BLOCK = (code: string, lang: string | null): NodeType<'blockCode'> => { return { type: 'blockCode', props: { code, lang } }; };
|
||||||
|
|
||||||
|
export type VfmMathBlock = {
|
||||||
|
type: 'mathBlock';
|
||||||
|
props: {
|
||||||
|
formula: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const MATH_BLOCK = (formula: string): NodeType<'mathBlock'> => { return { type: 'mathBlock', props: { formula } }; };
|
||||||
|
|
||||||
|
export type VfmCenter = {
|
||||||
|
type: 'center';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
export const CENTER = (children: VfmInline[]): NodeType<'center'> => { return { type: 'center', children }; };
|
||||||
|
|
||||||
|
export type VfmInline = VfmUnicodeEmoji | VfmEmojiCode | VfmBold | VfmSmall | VfmItalic | VfmStrike |
|
||||||
|
VfmInlineCode | VfmMathInline | VfmMention | VfmHashtag | VfmUrl | VfmLink | VfmFn | VfmPlain | VfmText;
|
||||||
|
|
||||||
|
export type VfmUnicodeEmoji = {
|
||||||
|
type: 'unicodeEmoji';
|
||||||
|
props: {
|
||||||
|
emoji: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const UNI_EMOJI = (value: string): NodeType<'unicodeEmoji'> => { return { type: 'unicodeEmoji', props: { emoji: value } }; };
|
||||||
|
|
||||||
|
export type VfmEmojiCode = {
|
||||||
|
type: 'emojiCode';
|
||||||
|
props: {
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const EMOJI_CODE = (name: string): NodeType<'emojiCode'> => { return { type: 'emojiCode', props: { name: name } }; };
|
||||||
|
|
||||||
|
export type VfmBold = {
|
||||||
|
type: 'bold';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
export const BOLD = (children: VfmInline[]): NodeType<'bold'> => { return { type: 'bold', children }; };
|
||||||
|
|
||||||
|
export type VfmSmall = {
|
||||||
|
type: 'small';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
export const SMALL = (children: VfmInline[]): NodeType<'small'> => { return { type: 'small', children }; };
|
||||||
|
|
||||||
|
export type VfmItalic = {
|
||||||
|
type: 'italic';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
export const ITALIC = (children: VfmInline[]): NodeType<'italic'> => { return { type: 'italic', children }; };
|
||||||
|
|
||||||
|
export type VfmStrike = {
|
||||||
|
type: 'strike';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
export const STRIKE = (children: VfmInline[]): NodeType<'strike'> => { return { type: 'strike', children }; };
|
||||||
|
|
||||||
|
export type VfmInlineCode = {
|
||||||
|
type: 'inlineCode';
|
||||||
|
props: {
|
||||||
|
code: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const INLINE_CODE = (code: string): NodeType<'inlineCode'> => { return { type: 'inlineCode', props: { code } }; };
|
||||||
|
|
||||||
|
export type VfmMathInline = {
|
||||||
|
type: 'mathInline';
|
||||||
|
props: {
|
||||||
|
formula: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const MATH_INLINE = (formula: string): NodeType<'mathInline'> => { return { type: 'mathInline', props: { formula } }; };
|
||||||
|
|
||||||
|
export type VfmMention = {
|
||||||
|
type: 'mention';
|
||||||
|
props: {
|
||||||
|
username: string;
|
||||||
|
host: string | null;
|
||||||
|
acct: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const MENTION = (username: string, host: string | null, acct: string): NodeType<'mention'> => { return { type: 'mention', props: { username, host, acct } }; };
|
||||||
|
|
||||||
|
export type VfmHashtag = {
|
||||||
|
type: 'hashtag';
|
||||||
|
props: {
|
||||||
|
hashtag: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const HASHTAG = (value: string): NodeType<'hashtag'> => { return { type: 'hashtag', props: { hashtag: value } }; };
|
||||||
|
|
||||||
|
export type VfmUrl = {
|
||||||
|
type: 'url';
|
||||||
|
props: {
|
||||||
|
url: string;
|
||||||
|
brackets?: boolean;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const N_URL = (value: string, brackets?: boolean): NodeType<'url'> => {
|
||||||
|
const node: VfmUrl = { type: 'url', props: { url: value } };
|
||||||
|
if (brackets) node.props.brackets = brackets;
|
||||||
|
return node;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type VfmLink = {
|
||||||
|
type: 'link';
|
||||||
|
props: {
|
||||||
|
silent: boolean;
|
||||||
|
url: string;
|
||||||
|
};
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
export const LINK = (silent: boolean, url: string, children: VfmInline[]): NodeType<'link'> => { return { type: 'link', props: { silent, url }, children }; };
|
||||||
|
|
||||||
|
export type VfmFn = {
|
||||||
|
type: 'fn';
|
||||||
|
props: {
|
||||||
|
name: string;
|
||||||
|
args: Record<string, string | true>;
|
||||||
|
};
|
||||||
|
children: VfmInline[];
|
||||||
|
};
|
||||||
|
export const FN = (name: string, args: VfmFn['props']['args'], children: VfmFn['children']): NodeType<'fn'> => { return { type: 'fn', props: { name, args }, children }; };
|
||||||
|
|
||||||
|
export type VfmPlain = {
|
||||||
|
type: 'plain';
|
||||||
|
props?: Record<string, unknown>;
|
||||||
|
children: VfmText[];
|
||||||
|
};
|
||||||
|
export const PLAIN = (text: string): NodeType<'plain'> => { return { type: 'plain', children: [TEXT(text)] }; };
|
||||||
|
|
||||||
|
export type VfmText = {
|
||||||
|
type: 'text';
|
||||||
|
props: {
|
||||||
|
text: string;
|
||||||
|
};
|
||||||
|
children?: [];
|
||||||
|
};
|
||||||
|
export const TEXT = (value: string): NodeType<'text'> => { return { type: 'text', props: { text: value } }; };
|
||||||
|
|
||||||
|
export type NodeType<T extends VfmNode['type']> =
|
||||||
|
T extends 'quote' ? VfmQuote :
|
||||||
|
T extends 'search' ? VfmSearch :
|
||||||
|
T extends 'blockCode' ? VfmCodeBlock :
|
||||||
|
T extends 'mathBlock' ? VfmMathBlock :
|
||||||
|
T extends 'center' ? VfmCenter :
|
||||||
|
T extends 'unicodeEmoji' ? VfmUnicodeEmoji :
|
||||||
|
T extends 'emojiCode' ? VfmEmojiCode :
|
||||||
|
T extends 'bold' ? VfmBold :
|
||||||
|
T extends 'small' ? VfmSmall :
|
||||||
|
T extends 'italic' ? VfmItalic :
|
||||||
|
T extends 'strike' ? VfmStrike :
|
||||||
|
T extends 'inlineCode' ? VfmInlineCode :
|
||||||
|
T extends 'mathInline' ? VfmMathInline :
|
||||||
|
T extends 'mention' ? VfmMention :
|
||||||
|
T extends 'hashtag' ? VfmHashtag :
|
||||||
|
T extends 'url' ? VfmUrl :
|
||||||
|
T extends 'link' ? VfmLink :
|
||||||
|
T extends 'fn' ? VfmFn :
|
||||||
|
T extends 'plain' ? VfmPlain :
|
||||||
|
T extends 'text' ? VfmText :
|
||||||
|
never;
|
30
test-d/index.ts
Normal file
30
test-d/index.ts
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
/**
|
||||||
|
* Unit testing TypeScript types.
|
||||||
|
* with https://github.com/SamVerschueren/tsd
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { expectType } from 'tsd';
|
||||||
|
import { NodeType, VfmUrl } from '../src';
|
||||||
|
import * as P from '../src/internal/core';
|
||||||
|
|
||||||
|
describe('#NodeType', () => {
|
||||||
|
test('returns node that has sprcified type', () => {
|
||||||
|
const x = null as unknown as NodeType<'url'>;
|
||||||
|
expectType<VfmUrl>(x);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('parser internals', () => {
|
||||||
|
test('seq', () => {
|
||||||
|
const first = null as unknown as P.Parser<'first'>;
|
||||||
|
const second = null as unknown as P.Parser<'second'>;
|
||||||
|
const third = null as unknown as P.Parser<'third' | 'third-second'>;
|
||||||
|
expectType<P.Parser<['first', 'second', 'third' | 'third-second']>>(P.seq(first, second, third));
|
||||||
|
});
|
||||||
|
test('alt', () => {
|
||||||
|
const first = null as unknown as P.Parser<'first'>;
|
||||||
|
const second = null as unknown as P.Parser<'second'>;
|
||||||
|
const third = null as unknown as P.Parser<'third' | 'third-second'>;
|
||||||
|
expectType<P.Parser<'first' | 'second' | 'third' | 'third-second'>>(P.alt([first, second, third]));
|
||||||
|
});
|
||||||
|
});
|
214
test/api.ts
Normal file
214
test/api.ts
Normal file
|
@ -0,0 +1,214 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import * as vfm from '../src/index';
|
||||||
|
import {
|
||||||
|
TEXT, CENTER, FN, UNI_EMOJI, MENTION, EMOJI_CODE, HASHTAG, N_URL, BOLD, SMALL, ITALIC, STRIKE, QUOTE, MATH_BLOCK, SEARCH, CODE_BLOCK, LINK
|
||||||
|
} from '../src/index';
|
||||||
|
|
||||||
|
describe('API', () => {
|
||||||
|
describe('toString', () => {
|
||||||
|
test('basic', () => {
|
||||||
|
const input =
|
||||||
|
`before
|
||||||
|
<center>
|
||||||
|
Hello $[tada everynyan! 🎉]
|
||||||
|
|
||||||
|
I'm @bygul, A bot of valkyrie!
|
||||||
|
|
||||||
|
https://toastielab.dev/toastie_t0ast/ai
|
||||||
|
</center>
|
||||||
|
after`;
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('single node', () => {
|
||||||
|
const input = '$[tada Hello]';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)[0]), '$[tada Hello]');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('quote', () => {
|
||||||
|
const input = `
|
||||||
|
> abc
|
||||||
|
>
|
||||||
|
> 123
|
||||||
|
`;
|
||||||
|
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), '> abc\n> \n> 123');
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('search', () => {
|
||||||
|
const input = 'VFM 書き方 123 Search';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('block code', () => {
|
||||||
|
const input = '```\nabc\n```';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('math block', () => {
|
||||||
|
const input = '\\[\ny = 2x + 1\n\\]';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('center', () => {
|
||||||
|
const input = '<center>\nabc\n</center>';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
// test('center (single line)', () => {
|
||||||
|
// const input = '<center>abc</center>';
|
||||||
|
// assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
// });
|
||||||
|
|
||||||
|
test('emoji code', () => {
|
||||||
|
const input = ':abc:';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('unicode emoji', () => {
|
||||||
|
const input = '今起きた😇';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('big', () => {
|
||||||
|
const input = '***abc***';
|
||||||
|
const output = '$[tada abc]';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), output);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('bold', () => {
|
||||||
|
const input = '**abc**';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
// test('bold tag', () => {
|
||||||
|
// const input = '<b>abc</b>';
|
||||||
|
// assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
// });
|
||||||
|
|
||||||
|
test('small', () => {
|
||||||
|
const input = '<small>abc</small>';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
// test('italic', () => {
|
||||||
|
// const input = '*abc*';
|
||||||
|
// assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
// });
|
||||||
|
|
||||||
|
test('italic tag', () => {
|
||||||
|
const input = '<i>abc</i>';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('strike', () => {
|
||||||
|
const input = '~~foo~~';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
});
|
||||||
|
|
||||||
|
// test('strike tag', () => {
|
||||||
|
// const input = '<s>foo</s>';
|
||||||
|
// assert.strictEqual(vfm.toString(vfm.parse(input)), input);
|
||||||
|
// });
|
||||||
|
|
||||||
|
test('inline code', () => {
|
||||||
|
const input = 'AiScript: `#abc = 2`';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), 'AiScript: `#abc = 2`');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('math inline', () => {
|
||||||
|
const input = '\\(y = 2x + 3\\)';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), '\\(y = 2x + 3\\)');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('hashtag', () => {
|
||||||
|
const input = 'a #misskey b';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), 'a #misskey b');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('link', () => {
|
||||||
|
const input = '[Ai](https://toastielab.dev/toastie_t0ast/ai)';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), '[Ai](https://toastielab.dev/toastie_t0ast/ai)');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('silent link', () => {
|
||||||
|
const input = '?[Ai](https://toastielab.dev/toastie_t0ast/ai)';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), '?[Ai](https://toastielab.dev/toastie_t0ast/ai)');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('fn', () => {
|
||||||
|
const input = '$[tada Hello]';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), '$[tada Hello]');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('fn with arguments', () => {
|
||||||
|
const input = '$[spin.speed=1s,alternate Hello]';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), '$[spin.speed=1s,alternate Hello]');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('plain', () => {
|
||||||
|
const input = 'a\n<plain>\nHello\nworld\n</plain>\nb';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), 'a\n<plain>\nHello\nworld\n</plain>\nb');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('1 line plain', () => {
|
||||||
|
const input = 'a\n<plain>Hello</plain>\nb';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input)), 'a\n<plain>\nHello\n</plain>\nb');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('preserve url brackets', () => {
|
||||||
|
const input1 = 'https://toastielab.dev/toastie_t0ast/ai';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input1)), input1);
|
||||||
|
|
||||||
|
const input2 = '<https://toastielab.dev/toastie_t0ast/ai>';
|
||||||
|
assert.strictEqual(vfm.toString(vfm.parse(input2)), input2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('inspect', () => {
|
||||||
|
test('replace text', () => {
|
||||||
|
const input = 'good morning $[tada everynyan!]';
|
||||||
|
const result = vfm.parse(input);
|
||||||
|
vfm.inspect(result, node => {
|
||||||
|
if (node.type == 'text') {
|
||||||
|
node.props.text = node.props.text.replace(/good morning/g, 'hello');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
assert.strictEqual(vfm.toString(result), 'hello $[tada everynyan!]');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('replace text (one item)', () => {
|
||||||
|
const input = 'good morning $[tada everyone!]';
|
||||||
|
const result = vfm.parse(input);
|
||||||
|
vfm.inspect(result[1], node => {
|
||||||
|
if (node.type == 'text') {
|
||||||
|
node.props.text = node.props.text.replace(/one/g, 'nyan');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
assert.strictEqual(vfm.toString(result), 'good morning $[tada everynyan!]');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('extract', () => {
|
||||||
|
test('basic', () => {
|
||||||
|
const nodes = vfm.parse('@hoge @piyo @bebeyo');
|
||||||
|
const expect = [
|
||||||
|
MENTION('hoge', null, '@hoge'),
|
||||||
|
MENTION('piyo', null, '@piyo'),
|
||||||
|
MENTION('bebeyo', null, '@bebeyo')
|
||||||
|
];
|
||||||
|
assert.deepStrictEqual(vfm.extract(nodes, node => node.type == 'mention'), expect);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('nested', () => {
|
||||||
|
const nodes = vfm.parse('abc:hoge:$[tada 123 @hoge :foo:]:piyo:');
|
||||||
|
const expect = [
|
||||||
|
EMOJI_CODE('hoge'),
|
||||||
|
EMOJI_CODE('foo'),
|
||||||
|
EMOJI_CODE('piyo')
|
||||||
|
];
|
||||||
|
assert.deepStrictEqual(vfm.extract(nodes, node => node.type == 'emojiCode'), expect);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
1515
test/parser.ts
Normal file
1515
test/parser.ts
Normal file
File diff suppressed because it is too large
Load diff
26
tsconfig.json
Normal file
26
tsconfig.json
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES6",
|
||||||
|
"module": "commonjs",
|
||||||
|
"declaration": true,
|
||||||
|
"outDir": "./built/",
|
||||||
|
"removeComments": true,
|
||||||
|
"strict": true,
|
||||||
|
"strictFunctionTypes": true,
|
||||||
|
"strictNullChecks": true,
|
||||||
|
"experimentalDecorators": true,
|
||||||
|
"noImplicitReturns": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
},
|
||||||
|
"typeRoots": [
|
||||||
|
"node_modules/@types",
|
||||||
|
"src/@types",
|
||||||
|
],
|
||||||
|
"include": [
|
||||||
|
"src/**/*",
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"node_modules",
|
||||||
|
"test/**/*",
|
||||||
|
]
|
||||||
|
}
|
Loading…
Reference in a new issue