first commit
This commit is contained in:
60
node_modules/parseley/CHANGELOG.md
generated
vendored
Normal file
60
node_modules/parseley/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
# Changelog
|
||||
|
||||
## Version 0.12.1
|
||||
|
||||
* Runtime check for input of `parse` and `parse1` to be a string.
|
||||
|
||||
## Version 0.12.0
|
||||
|
||||
* Support for escape sequences according to specifications ([#97](https://github.com/mxxii/parseley/issues/97)).
|
||||
|
||||
Now follows <https://www.w3.org/TR/selectors-3/#lex> for parsing and <https://w3c.github.io/csswg-drafts/cssom/#common-serializing-idioms> for serializing.
|
||||
|
||||
Possibly breaking changes:
|
||||
|
||||
* parsed strings (attribute values) retained escape sequences previously, now they are unescaped;
|
||||
* strings with `"` character were serialized as single-quoted previously, now all strings serialized as double-quoted, per spec suggestion.
|
||||
|
||||
## Version 0.11.0
|
||||
|
||||
* Targeting Node.js version 14 and ES2020;
|
||||
* Now should be discoverable with [denoify](https://github.com/garronej/denoify).
|
||||
|
||||
## Version 0.10.0
|
||||
|
||||
* Bump dependencies - fix "./core module cannot be found" issue.
|
||||
|
||||
## Version 0.9.1
|
||||
|
||||
* Fix namespace parsing;
|
||||
* Remove terser, use only `rollup-plugin-cleanup` to condition published files.
|
||||
|
||||
## Version 0.9.0
|
||||
|
||||
* Replaced `moo` and `nearley` with my [leac](https://github.com/mxxii/leac) and [peberminta](https://github.com/mxxii/peberminta) packages. Now `parseley` with all dependencies are TypeScript, dual CommonJS/ES module packages;
|
||||
* Package is marked as free of side effects and tersed;
|
||||
* Deno version is provided, with the help of `denoify`.
|
||||
|
||||
## Version 0.8.0
|
||||
|
||||
* Drop Node.js version 10 support. 12.22.x is required;
|
||||
* Fix typos in type definitions.
|
||||
|
||||
## Version 0.7.0
|
||||
|
||||
* Switched to TypeScript;
|
||||
* Added type definitions for AST;
|
||||
* Hybrid package (ESM, CommonJS);
|
||||
* Renamed `sort()` to `normalize()` in order to better reflect what it does;
|
||||
* Replaced `compareArrays()` with `compareSpecificity()` and `compareSelectors()` - more sensible API;
|
||||
* Generated [documentation](https://github.com/mxxii/parseley/tree/main/docs).
|
||||
|
||||
## Version 0.6.0
|
||||
|
||||
Added `sort()` and `compareArrays()` functions.
|
||||
|
||||
## Version 0.5.0
|
||||
|
||||
Initial release.
|
||||
|
||||
Aiming at Node.js version 10 and up.
|
21
node_modules/parseley/LICENSE
generated
vendored
Normal file
21
node_modules/parseley/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021-2022 KillyMXI <killy@mxii.eu.org>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
159
node_modules/parseley/README.md
generated
vendored
Normal file
159
node_modules/parseley/README.md
generated
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
# parseley
|
||||
|
||||

|
||||

|
||||
[](https://github.com/mxxii/parseley/blob/main/LICENSE)
|
||||
[](https://www.npmjs.com/package/parseley)
|
||||
[](https://www.npmjs.com/package/parseley)
|
||||
[](https://deno.land/x/parseley)
|
||||
|
||||
**Par**ser for CSS **sele**ctors.
|
||||
|
||||
----
|
||||
|
||||
|
||||
## Features
|
||||
|
||||
* Convert CSS selector strings into objects that are easy to work with;
|
||||
|
||||
* Serialize back if needed;
|
||||
|
||||
* Get specificity for free.
|
||||
|
||||
|
||||
## Changelog
|
||||
|
||||
Available here: [CHANGELOG.md](https://github.com/mxxii/parseley/blob/main/CHANGELOG.md).
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
### Node
|
||||
|
||||
```shell
|
||||
> npm i parseley
|
||||
```
|
||||
|
||||
```typescript
|
||||
import * as parseley from 'parseley';
|
||||
```
|
||||
|
||||
### Deno
|
||||
|
||||
```typescript
|
||||
import * as parseley from 'https://deno.land/x/parseley@.../parseley.ts';
|
||||
```
|
||||
|
||||
|
||||
## Usage example
|
||||
|
||||
```js
|
||||
import { parse1, serialize, normalize } from 'parseley';
|
||||
import { inspect } from 'node:util';
|
||||
|
||||
const str = 'div#id1 > .class2.class1[attr1]';
|
||||
|
||||
const ast = parse1(str);
|
||||
console.log(inspect(ast, { breakLength: 45, depth: null }));
|
||||
|
||||
const serialized = serialize(ast);
|
||||
console.log(`Serialized: '${serialized}'`);
|
||||
|
||||
normalize(ast);
|
||||
const normalized = serialize(ast);
|
||||
console.log(`Normalized: '${normalized}'`);
|
||||
```
|
||||
|
||||
<details><summary>Example output</summary>
|
||||
|
||||
```text
|
||||
{
|
||||
type: 'compound',
|
||||
list: [
|
||||
{
|
||||
type: 'class',
|
||||
name: 'class2',
|
||||
specificity: [ 0, 1, 0 ]
|
||||
},
|
||||
{
|
||||
type: 'class',
|
||||
name: 'class1',
|
||||
specificity: [ 0, 1, 0 ]
|
||||
},
|
||||
{
|
||||
type: 'attrPresence',
|
||||
name: 'attr1',
|
||||
namespace: null,
|
||||
specificity: [ 0, 1, 0 ]
|
||||
},
|
||||
{
|
||||
type: 'combinator',
|
||||
combinator: '>',
|
||||
left: {
|
||||
type: 'compound',
|
||||
list: [
|
||||
{
|
||||
type: 'tag',
|
||||
name: 'div',
|
||||
namespace: null,
|
||||
specificity: [ 0, 0, 1 ]
|
||||
},
|
||||
{
|
||||
type: 'id',
|
||||
name: 'id1',
|
||||
specificity: [ 1, 0, 0 ]
|
||||
}
|
||||
],
|
||||
specificity: [ 1, 0, 1 ]
|
||||
},
|
||||
specificity: [ 1, 0, 1 ]
|
||||
}
|
||||
],
|
||||
specificity: [ 1, 3, 1 ]
|
||||
}
|
||||
Serialized: 'div#id1>.class2.class1[attr1]'
|
||||
Normalized: 'div#id1>.class1.class2[attr1]'
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
## Documentation
|
||||
|
||||
* [Functions](https://github.com/mxxii/parseley/blob/main/docs/index.md)
|
||||
* [AST types](https://github.com/mxxii/parseley/blob/main/docs/modules/Ast.md)
|
||||
* [Snapshots](https://github.com/mxxii/parseley/blob/main/test/snapshots/snapshots.ts.md)
|
||||
|
||||
|
||||
## Input reference
|
||||
|
||||
<https://www.w3.org/TR/selectors-4/#grammar>
|
||||
|
||||
<https://www.w3.org/TR/css-syntax-3/#token-diagrams>
|
||||
|
||||
Terminology used in this project is more or less consistent to the spec, with some exceptions made for clarity. The term "type" is way too overloaded in particular, the term "tag" is used where appropriate instead.
|
||||
|
||||
Any pseudo elements are left for possible future implementation. I have no immediate need for them and they require some careful consideration.
|
||||
|
||||
|
||||
## Output AST
|
||||
|
||||
Consistency: overall AST shape is always the same. This makes client code simpler, at least for a certain processing tasks.
|
||||
|
||||
For example, always use compound selectors, even when there is only one simple selector inside.
|
||||
|
||||
Comma-separated selectors might not be needed for every use case. So there are two functions - one can parse commas and always returns the top-level list regardless of the comma presence in a particular selector, and the other can't parse commas and returns a compound selector AST directly.
|
||||
|
||||
Complex selectors are represented in the way that makes the left side to be an another condition on the right side element. This was made with the right-to-left processing direction in mind. One consequence of this is that there is no such thing as a "complex selector" node in the AST hierarchy, but there are "combinator" nodes attached to "compound selector" nodes.
|
||||
|
||||
All AST nodes have their specificity computed (except the top-level list of comma-separated selectors where it doesn't really make sense).
|
||||
|
||||
|
||||
## Motivation and inspiration
|
||||
|
||||
| Package | Hits | Misses
|
||||
| ---------- | --------- | ---------
|
||||
| [parsel](https://github.com/leaverou/parsel) | Sensible AST; specificity calculation; cool name | Not friendly to node.js; based on regex
|
||||
| [css-what](https://github.com/fb55/css-what) and [css-select](https://github.com/fb55/css-select) | The idea to process complex selectors in right-to-left order | `css-select` is a solution for a different problem compared to what I needed; `css-what` produces only a list of tokens
|
||||
| [scalpel](https://github.com/gajus/scalpel) | Introduced me to [nearley](https://nearley.js.org/) parsing toolkit (albeit I'm not using it here anymore) | AST it produces is very far from what I can use
|
||||
| [css-selector-parser](https://github.com/mdevils/css-selector-parser) | Configurable and lightweight | Again, AST is far from my needs
|
148
node_modules/parseley/lib/ast.d.ts
generated
vendored
Normal file
148
node_modules/parseley/lib/ast.d.ts
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
/**
|
||||
* Specificity as defined by Selectors spec.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#specificity}
|
||||
*
|
||||
* Three levels: for id, class, tag (type).
|
||||
*
|
||||
* Extra level(s) used in HTML styling don't fit the scope of this package
|
||||
* and no space reserved for them.
|
||||
*/
|
||||
export type Specificity = [number, number, number];
|
||||
/**
|
||||
* The `*` selector.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#the-universal-selector}
|
||||
*
|
||||
* `parseley` considers tag name and universal selectors to be unrelated entities
|
||||
* for simplicity of processing.
|
||||
*/
|
||||
export type UniversalSelector = {
|
||||
type: 'universal';
|
||||
namespace: string | null;
|
||||
specificity: Specificity;
|
||||
};
|
||||
/**
|
||||
* Tag name (type) selector.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#type-selectors}
|
||||
*
|
||||
* `parseley` considers tag name and universal selectors to be unrelated entities
|
||||
* for simplicity of processing.
|
||||
*/
|
||||
export type TagSelector = {
|
||||
type: 'tag';
|
||||
name: string;
|
||||
namespace: string | null;
|
||||
specificity: Specificity;
|
||||
};
|
||||
/**
|
||||
* Class selector.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#class-html}
|
||||
*/
|
||||
export type ClassSelector = {
|
||||
type: 'class';
|
||||
name: string;
|
||||
specificity: Specificity;
|
||||
};
|
||||
/**
|
||||
* Id selector.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#id-selectors}
|
||||
*/
|
||||
export type IdSelector = {
|
||||
type: 'id';
|
||||
name: string;
|
||||
specificity: Specificity;
|
||||
};
|
||||
/**
|
||||
* Attribute presence selector.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#attribute-selectors}
|
||||
*
|
||||
* `parseley` considers attribute presence and value selectors to be unrelated entities
|
||||
* for simplicity of processing.
|
||||
*/
|
||||
export type AttributePresenceSelector = {
|
||||
type: 'attrPresence';
|
||||
name: string;
|
||||
namespace: string | null;
|
||||
specificity: Specificity;
|
||||
};
|
||||
/**
|
||||
* Attribute value selector.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#attribute-selectors}
|
||||
*
|
||||
* `parseley` considers attribute presence and value selectors to be unrelated entities
|
||||
* for simplicity of processing.
|
||||
*/
|
||||
export type AttributeValueSelector = {
|
||||
type: 'attrValue';
|
||||
name: string;
|
||||
namespace: string | null;
|
||||
matcher: '=' | '~=' | '|=' | '^=' | '$=' | '*=';
|
||||
value: string;
|
||||
modifier: 'i' | 's' | null;
|
||||
specificity: Specificity;
|
||||
};
|
||||
/**
|
||||
* Represents a selectors combinator with what's on the left side of it.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#combinators}
|
||||
*/
|
||||
export type Combinator = {
|
||||
type: 'combinator';
|
||||
combinator: ' ' | '+' | '>' | '~' | '||';
|
||||
left: CompoundSelector;
|
||||
specificity: Specificity;
|
||||
};
|
||||
/**
|
||||
* Any thing representing a single condition on an element.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#simple}
|
||||
*
|
||||
* `parseley` deviates from the spec here by adding `Combinator` to the enumeration.
|
||||
* This is done for simplicity of processing.
|
||||
*
|
||||
* Combinator effectively considered an extra condition on a specific element
|
||||
* (*"have this kind of element in relation"*).
|
||||
*/
|
||||
export type SimpleSelector = UniversalSelector | TagSelector | ClassSelector | IdSelector | AttributePresenceSelector | AttributeValueSelector | Combinator;
|
||||
/**
|
||||
* Compound selector - a set of conditions describing a single element.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#compound}
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#complex}
|
||||
*
|
||||
* Important note: due to the way `parseley` represents combinators,
|
||||
* every compound selector is also a complex selector with everything
|
||||
* connected from the left side.
|
||||
* Specificity value also includes any extra weight added by the left side.
|
||||
*
|
||||
* If there is a combinator in the selector - it is guaranteed to be
|
||||
* the last entry in the list of inner selectors.
|
||||
*/
|
||||
export type CompoundSelector = {
|
||||
type: 'compound';
|
||||
list: SimpleSelector[];
|
||||
specificity: Specificity;
|
||||
};
|
||||
/**
|
||||
* Represents a comma-separated list of compound selectors.
|
||||
*
|
||||
* {@link https://www.w3.org/TR/selectors/#selector-list}
|
||||
*
|
||||
* As this kind of selector can combine different ways to match elements,
|
||||
* a single specificity value doesn't make sense for it and therefore absent.
|
||||
*/
|
||||
export type ListSelector = {
|
||||
type: 'list';
|
||||
list: CompoundSelector[];
|
||||
};
|
||||
/**
|
||||
* Any kind of selector supported by `parseley`.
|
||||
*/
|
||||
export type Selector = ListSelector | CompoundSelector | SimpleSelector;
|
311
node_modules/parseley/lib/parseley.cjs
generated
vendored
Normal file
311
node_modules/parseley/lib/parseley.cjs
generated
vendored
Normal file
@@ -0,0 +1,311 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var leac = require('leac');
|
||||
var p = require('peberminta');
|
||||
|
||||
function _interopNamespace(e) {
|
||||
if (e && e.__esModule) return e;
|
||||
var n = Object.create(null);
|
||||
if (e) {
|
||||
Object.keys(e).forEach(function (k) {
|
||||
if (k !== 'default') {
|
||||
var d = Object.getOwnPropertyDescriptor(e, k);
|
||||
Object.defineProperty(n, k, d.get ? d : {
|
||||
enumerable: true,
|
||||
get: function () { return e[k]; }
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
n["default"] = e;
|
||||
return Object.freeze(n);
|
||||
}
|
||||
|
||||
var p__namespace = /*#__PURE__*/_interopNamespace(p);
|
||||
|
||||
var ast = /*#__PURE__*/Object.freeze({
|
||||
__proto__: null
|
||||
});
|
||||
|
||||
const ws = `(?:[ \\t\\r\\n\\f]*)`;
|
||||
const nl = `(?:\\n|\\r\\n|\\r|\\f)`;
|
||||
const nonascii = `[^\\x00-\\x7F]`;
|
||||
const unicode = `(?:\\\\[0-9a-f]{1,6}(?:\\r\\n|[ \\n\\r\\t\\f])?)`;
|
||||
const escape = `(?:\\\\[^\\n\\r\\f0-9a-f])`;
|
||||
const nmstart = `(?:[_a-z]|${nonascii}|${unicode}|${escape})`;
|
||||
const nmchar = `(?:[_a-z0-9-]|${nonascii}|${unicode}|${escape})`;
|
||||
const name = `(?:${nmchar}+)`;
|
||||
const ident = `(?:[-]?${nmstart}${nmchar}*)`;
|
||||
const string1 = `'([^\\n\\r\\f\\\\']|\\\\${nl}|${nonascii}|${unicode}|${escape})*'`;
|
||||
const string2 = `"([^\\n\\r\\f\\\\"]|\\\\${nl}|${nonascii}|${unicode}|${escape})*"`;
|
||||
const lexSelector = leac.createLexer([
|
||||
{ name: 'ws', regex: new RegExp(ws) },
|
||||
{ name: 'hash', regex: new RegExp(`#${name}`, 'i') },
|
||||
{ name: 'ident', regex: new RegExp(ident, 'i') },
|
||||
{ name: 'str1', regex: new RegExp(string1, 'i') },
|
||||
{ name: 'str2', regex: new RegExp(string2, 'i') },
|
||||
{ name: '*' },
|
||||
{ name: '.' },
|
||||
{ name: ',' },
|
||||
{ name: '[' },
|
||||
{ name: ']' },
|
||||
{ name: '=' },
|
||||
{ name: '>' },
|
||||
{ name: '|' },
|
||||
{ name: '+' },
|
||||
{ name: '~' },
|
||||
{ name: '^' },
|
||||
{ name: '$' },
|
||||
]);
|
||||
const lexEscapedString = leac.createLexer([
|
||||
{ name: 'unicode', regex: new RegExp(unicode, 'i') },
|
||||
{ name: 'escape', regex: new RegExp(escape, 'i') },
|
||||
{ name: 'any', regex: new RegExp('[\\s\\S]', 'i') }
|
||||
]);
|
||||
function sumSpec([a0, a1, a2], [b0, b1, b2]) {
|
||||
return [a0 + b0, a1 + b1, a2 + b2];
|
||||
}
|
||||
function sumAllSpec(ss) {
|
||||
return ss.reduce(sumSpec, [0, 0, 0]);
|
||||
}
|
||||
const unicodeEscapedSequence_ = p__namespace.token((t) => t.name === 'unicode' ? String.fromCodePoint(parseInt(t.text.slice(1), 16)) : undefined);
|
||||
const escapedSequence_ = p__namespace.token((t) => t.name === 'escape' ? t.text.slice(1) : undefined);
|
||||
const anyChar_ = p__namespace.token((t) => t.name === 'any' ? t.text : undefined);
|
||||
const escapedString_ = p__namespace.map(p__namespace.many(p__namespace.or(unicodeEscapedSequence_, escapedSequence_, anyChar_)), (cs) => cs.join(''));
|
||||
function unescape(escapedString) {
|
||||
const lexerResult = lexEscapedString(escapedString);
|
||||
const result = escapedString_({ tokens: lexerResult.tokens, options: undefined }, 0);
|
||||
return result.value;
|
||||
}
|
||||
function literal(name) {
|
||||
return p__namespace.token((t) => t.name === name ? true : undefined);
|
||||
}
|
||||
const whitespace_ = p__namespace.token((t) => t.name === 'ws' ? null : undefined);
|
||||
const optionalWhitespace_ = p__namespace.option(whitespace_, null);
|
||||
function optionallySpaced(parser) {
|
||||
return p__namespace.middle(optionalWhitespace_, parser, optionalWhitespace_);
|
||||
}
|
||||
const identifier_ = p__namespace.token((t) => t.name === 'ident' ? unescape(t.text) : undefined);
|
||||
const hashId_ = p__namespace.token((t) => t.name === 'hash' ? unescape(t.text.slice(1)) : undefined);
|
||||
const string_ = p__namespace.token((t) => t.name.startsWith('str') ? unescape(t.text.slice(1, -1)) : undefined);
|
||||
const namespace_ = p__namespace.left(p__namespace.option(identifier_, ''), literal('|'));
|
||||
const qualifiedName_ = p__namespace.eitherOr(p__namespace.ab(namespace_, identifier_, (ns, name) => ({ name: name, namespace: ns })), p__namespace.map(identifier_, (name) => ({ name: name, namespace: null })));
|
||||
const uniSelector_ = p__namespace.eitherOr(p__namespace.ab(namespace_, literal('*'), (ns) => ({ type: 'universal', namespace: ns, specificity: [0, 0, 0] })), p__namespace.map(literal('*'), () => ({ type: 'universal', namespace: null, specificity: [0, 0, 0] })));
|
||||
const tagSelector_ = p__namespace.map(qualifiedName_, ({ name, namespace }) => ({
|
||||
type: 'tag',
|
||||
name: name,
|
||||
namespace: namespace,
|
||||
specificity: [0, 0, 1]
|
||||
}));
|
||||
const classSelector_ = p__namespace.ab(literal('.'), identifier_, (fullstop, name) => ({
|
||||
type: 'class',
|
||||
name: name,
|
||||
specificity: [0, 1, 0]
|
||||
}));
|
||||
const idSelector_ = p__namespace.map(hashId_, (name) => ({
|
||||
type: 'id',
|
||||
name: name,
|
||||
specificity: [1, 0, 0]
|
||||
}));
|
||||
const attrModifier_ = p__namespace.token((t) => {
|
||||
if (t.name === 'ident') {
|
||||
if (t.text === 'i' || t.text === 'I') {
|
||||
return 'i';
|
||||
}
|
||||
if (t.text === 's' || t.text === 'S') {
|
||||
return 's';
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
const attrValue_ = p__namespace.eitherOr(p__namespace.ab(string_, p__namespace.option(p__namespace.right(optionalWhitespace_, attrModifier_), null), (v, mod) => ({ value: v, modifier: mod })), p__namespace.ab(identifier_, p__namespace.option(p__namespace.right(whitespace_, attrModifier_), null), (v, mod) => ({ value: v, modifier: mod })));
|
||||
const attrMatcher_ = p__namespace.choice(p__namespace.map(literal('='), () => '='), p__namespace.ab(literal('~'), literal('='), () => '~='), p__namespace.ab(literal('|'), literal('='), () => '|='), p__namespace.ab(literal('^'), literal('='), () => '^='), p__namespace.ab(literal('$'), literal('='), () => '$='), p__namespace.ab(literal('*'), literal('='), () => '*='));
|
||||
const attrPresenceSelector_ = p__namespace.abc(literal('['), optionallySpaced(qualifiedName_), literal(']'), (lbr, { name, namespace }) => ({
|
||||
type: 'attrPresence',
|
||||
name: name,
|
||||
namespace: namespace,
|
||||
specificity: [0, 1, 0]
|
||||
}));
|
||||
const attrValueSelector_ = p__namespace.middle(literal('['), p__namespace.abc(optionallySpaced(qualifiedName_), attrMatcher_, optionallySpaced(attrValue_), ({ name, namespace }, matcher, { value, modifier }) => ({
|
||||
type: 'attrValue',
|
||||
name: name,
|
||||
namespace: namespace,
|
||||
matcher: matcher,
|
||||
value: value,
|
||||
modifier: modifier,
|
||||
specificity: [0, 1, 0]
|
||||
})), literal(']'));
|
||||
const attrSelector_ = p__namespace.eitherOr(attrPresenceSelector_, attrValueSelector_);
|
||||
const typeSelector_ = p__namespace.eitherOr(uniSelector_, tagSelector_);
|
||||
const subclassSelector_ = p__namespace.choice(idSelector_, classSelector_, attrSelector_);
|
||||
const compoundSelector_ = p__namespace.map(p__namespace.eitherOr(p__namespace.flatten(typeSelector_, p__namespace.many(subclassSelector_)), p__namespace.many1(subclassSelector_)), (ss) => {
|
||||
return {
|
||||
type: 'compound',
|
||||
list: ss,
|
||||
specificity: sumAllSpec(ss.map(s => s.specificity))
|
||||
};
|
||||
});
|
||||
const combinator_ = p__namespace.choice(p__namespace.map(literal('>'), () => '>'), p__namespace.map(literal('+'), () => '+'), p__namespace.map(literal('~'), () => '~'), p__namespace.ab(literal('|'), literal('|'), () => '||'));
|
||||
const combinatorSeparator_ = p__namespace.eitherOr(optionallySpaced(combinator_), p__namespace.map(whitespace_, () => ' '));
|
||||
const complexSelector_ = p__namespace.leftAssoc2(compoundSelector_, p__namespace.map(combinatorSeparator_, (c) => (left, right) => ({
|
||||
type: 'compound',
|
||||
list: [...right.list, { type: 'combinator', combinator: c, left: left, specificity: left.specificity }],
|
||||
specificity: sumSpec(left.specificity, right.specificity)
|
||||
})), compoundSelector_);
|
||||
const listSelector_ = p__namespace.leftAssoc2(p__namespace.map(complexSelector_, (s) => ({ type: 'list', list: [s] })), p__namespace.map(optionallySpaced(literal(',')), () => (acc, next) => ({ type: 'list', list: [...acc.list, next] })), complexSelector_);
|
||||
function parse_(parser, str) {
|
||||
if (!(typeof str === 'string' || str instanceof String)) {
|
||||
throw new Error('Expected a selector string. Actual input is not a string!');
|
||||
}
|
||||
const lexerResult = lexSelector(str);
|
||||
if (!lexerResult.complete) {
|
||||
throw new Error(`The input "${str}" was only partially tokenized, stopped at offset ${lexerResult.offset}!\n` +
|
||||
prettyPrintPosition(str, lexerResult.offset));
|
||||
}
|
||||
const result = optionallySpaced(parser)({ tokens: lexerResult.tokens, options: undefined }, 0);
|
||||
if (!result.matched) {
|
||||
throw new Error(`No match for "${str}" input!`);
|
||||
}
|
||||
if (result.position < lexerResult.tokens.length) {
|
||||
const token = lexerResult.tokens[result.position];
|
||||
throw new Error(`The input "${str}" was only partially parsed, stopped at offset ${token.offset}!\n` +
|
||||
prettyPrintPosition(str, token.offset, token.len));
|
||||
}
|
||||
return result.value;
|
||||
}
|
||||
function prettyPrintPosition(str, offset, len = 1) {
|
||||
return `${str.replace(/(\t)|(\r)|(\n)/g, (m, t, r) => t ? '\u2409' : r ? '\u240d' : '\u240a')}\n${''.padEnd(offset)}${'^'.repeat(len)}`;
|
||||
}
|
||||
function parse(str) {
|
||||
return parse_(listSelector_, str);
|
||||
}
|
||||
function parse1(str) {
|
||||
return parse_(complexSelector_, str);
|
||||
}
|
||||
|
||||
function serialize(selector) {
|
||||
if (!selector.type) {
|
||||
throw new Error('This is not an AST node.');
|
||||
}
|
||||
switch (selector.type) {
|
||||
case 'universal':
|
||||
return _serNs(selector.namespace) + '*';
|
||||
case 'tag':
|
||||
return _serNs(selector.namespace) + _serIdent(selector.name);
|
||||
case 'class':
|
||||
return '.' + _serIdent(selector.name);
|
||||
case 'id':
|
||||
return '#' + _serIdent(selector.name);
|
||||
case 'attrPresence':
|
||||
return `[${_serNs(selector.namespace)}${_serIdent(selector.name)}]`;
|
||||
case 'attrValue':
|
||||
return `[${_serNs(selector.namespace)}${_serIdent(selector.name)}${selector.matcher}"${_serStr(selector.value)}"${(selector.modifier ? selector.modifier : '')}]`;
|
||||
case 'combinator':
|
||||
return serialize(selector.left) + selector.combinator;
|
||||
case 'compound':
|
||||
return selector.list.reduce((acc, node) => {
|
||||
if (node.type === 'combinator') {
|
||||
return serialize(node) + acc;
|
||||
}
|
||||
else {
|
||||
return acc + serialize(node);
|
||||
}
|
||||
}, '');
|
||||
case 'list':
|
||||
return selector.list.map(serialize).join(',');
|
||||
}
|
||||
}
|
||||
function _serNs(ns) {
|
||||
return (ns || ns === '')
|
||||
? _serIdent(ns) + '|'
|
||||
: '';
|
||||
}
|
||||
function _codePoint(char) {
|
||||
return `\\${char.codePointAt(0).toString(16)} `;
|
||||
}
|
||||
function _serIdent(str) {
|
||||
return str.replace(
|
||||
/(^[0-9])|(^-[0-9])|(^-$)|([-0-9a-zA-Z_]|[^\x00-\x7F])|(\x00)|([\x01-\x1f]|\x7f)|([\s\S])/g, (m, d1, d2, hy, safe, nl, ctrl, other) => d1 ? _codePoint(d1) :
|
||||
d2 ? '-' + _codePoint(d2.slice(1)) :
|
||||
hy ? '\\-' :
|
||||
safe ? safe :
|
||||
nl ? '\ufffd' :
|
||||
ctrl ? _codePoint(ctrl) :
|
||||
'\\' + other);
|
||||
}
|
||||
function _serStr(str) {
|
||||
return str.replace(
|
||||
/(")|(\\)|(\x00)|([\x01-\x1f]|\x7f)/g, (m, dq, bs, nl, ctrl) => dq ? '\\"' :
|
||||
bs ? '\\\\' :
|
||||
nl ? '\ufffd' :
|
||||
_codePoint(ctrl));
|
||||
}
|
||||
function normalize(selector) {
|
||||
if (!selector.type) {
|
||||
throw new Error('This is not an AST node.');
|
||||
}
|
||||
switch (selector.type) {
|
||||
case 'compound': {
|
||||
selector.list.forEach(normalize);
|
||||
selector.list.sort((a, b) => _compareArrays(_getSelectorPriority(a), _getSelectorPriority(b)));
|
||||
break;
|
||||
}
|
||||
case 'combinator': {
|
||||
normalize(selector.left);
|
||||
break;
|
||||
}
|
||||
case 'list': {
|
||||
selector.list.forEach(normalize);
|
||||
selector.list.sort((a, b) => (serialize(a) < serialize(b)) ? -1 : 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return selector;
|
||||
}
|
||||
function _getSelectorPriority(selector) {
|
||||
switch (selector.type) {
|
||||
case 'universal':
|
||||
return [1];
|
||||
case 'tag':
|
||||
return [1];
|
||||
case 'id':
|
||||
return [2];
|
||||
case 'class':
|
||||
return [3, selector.name];
|
||||
case 'attrPresence':
|
||||
return [4, serialize(selector)];
|
||||
case 'attrValue':
|
||||
return [5, serialize(selector)];
|
||||
case 'combinator':
|
||||
return [15, serialize(selector)];
|
||||
}
|
||||
}
|
||||
function compareSelectors(a, b) {
|
||||
return _compareArrays(a.specificity, b.specificity);
|
||||
}
|
||||
function compareSpecificity(a, b) {
|
||||
return _compareArrays(a, b);
|
||||
}
|
||||
function _compareArrays(a, b) {
|
||||
if (!Array.isArray(a) || !Array.isArray(b)) {
|
||||
throw new Error('Arguments must be arrays.');
|
||||
}
|
||||
const shorter = (a.length < b.length) ? a.length : b.length;
|
||||
for (let i = 0; i < shorter; i++) {
|
||||
if (a[i] === b[i]) {
|
||||
continue;
|
||||
}
|
||||
return (a[i] < b[i]) ? -1 : 1;
|
||||
}
|
||||
return a.length - b.length;
|
||||
}
|
||||
|
||||
exports.Ast = ast;
|
||||
exports.compareSelectors = compareSelectors;
|
||||
exports.compareSpecificity = compareSpecificity;
|
||||
exports.normalize = normalize;
|
||||
exports.parse = parse;
|
||||
exports.parse1 = parse1;
|
||||
exports.serialize = serialize;
|
39
node_modules/parseley/lib/parseley.d.ts
generated
vendored
Normal file
39
node_modules/parseley/lib/parseley.d.ts
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
import * as Ast from './ast';
|
||||
export { Ast };
|
||||
export { parse, parse1 } from './parser';
|
||||
/**
|
||||
* Convert a selector AST back to a string representation.
|
||||
*
|
||||
* Note: formatting is not preserved in the AST.
|
||||
*
|
||||
* @param selector - A selector AST object.
|
||||
*/
|
||||
export declare function serialize(selector: Ast.Selector): string;
|
||||
/**
|
||||
* Modifies the given AST **in place** to have all internal arrays
|
||||
* in a stable order. Returns the AST.
|
||||
*
|
||||
* Intended for consistent processing and normalized `serialize()` output.
|
||||
*
|
||||
* @param selector - A selector AST object.
|
||||
*/
|
||||
export declare function normalize(selector: Ast.Selector): Ast.Selector;
|
||||
/**
|
||||
* Compare selectors based on their specificity.
|
||||
*
|
||||
* Usable as a comparator for sorting.
|
||||
*
|
||||
* @param a - First selector.
|
||||
* @param b - Second selector.
|
||||
*/
|
||||
export declare function compareSelectors(a: Ast.SimpleSelector | Ast.CompoundSelector, b: Ast.SimpleSelector | Ast.CompoundSelector): number;
|
||||
/**
|
||||
* Compare specificity values without reducing them
|
||||
* as arbitrary base numbers.
|
||||
*
|
||||
* Usable as a comparator for sorting.
|
||||
*
|
||||
* @param a - First specificity value.
|
||||
* @param b - Second specificity value.
|
||||
*/
|
||||
export declare function compareSpecificity(a: Ast.Specificity, b: Ast.Specificity): number;
|
281
node_modules/parseley/lib/parseley.mjs
generated
vendored
Normal file
281
node_modules/parseley/lib/parseley.mjs
generated
vendored
Normal file
@@ -0,0 +1,281 @@
|
||||
import { createLexer } from 'leac';
|
||||
import * as p from 'peberminta';
|
||||
|
||||
var ast = /*#__PURE__*/Object.freeze({
|
||||
__proto__: null
|
||||
});
|
||||
|
||||
const ws = `(?:[ \\t\\r\\n\\f]*)`;
|
||||
const nl = `(?:\\n|\\r\\n|\\r|\\f)`;
|
||||
const nonascii = `[^\\x00-\\x7F]`;
|
||||
const unicode = `(?:\\\\[0-9a-f]{1,6}(?:\\r\\n|[ \\n\\r\\t\\f])?)`;
|
||||
const escape = `(?:\\\\[^\\n\\r\\f0-9a-f])`;
|
||||
const nmstart = `(?:[_a-z]|${nonascii}|${unicode}|${escape})`;
|
||||
const nmchar = `(?:[_a-z0-9-]|${nonascii}|${unicode}|${escape})`;
|
||||
const name = `(?:${nmchar}+)`;
|
||||
const ident = `(?:[-]?${nmstart}${nmchar}*)`;
|
||||
const string1 = `'([^\\n\\r\\f\\\\']|\\\\${nl}|${nonascii}|${unicode}|${escape})*'`;
|
||||
const string2 = `"([^\\n\\r\\f\\\\"]|\\\\${nl}|${nonascii}|${unicode}|${escape})*"`;
|
||||
const lexSelector = createLexer([
|
||||
{ name: 'ws', regex: new RegExp(ws) },
|
||||
{ name: 'hash', regex: new RegExp(`#${name}`, 'i') },
|
||||
{ name: 'ident', regex: new RegExp(ident, 'i') },
|
||||
{ name: 'str1', regex: new RegExp(string1, 'i') },
|
||||
{ name: 'str2', regex: new RegExp(string2, 'i') },
|
||||
{ name: '*' },
|
||||
{ name: '.' },
|
||||
{ name: ',' },
|
||||
{ name: '[' },
|
||||
{ name: ']' },
|
||||
{ name: '=' },
|
||||
{ name: '>' },
|
||||
{ name: '|' },
|
||||
{ name: '+' },
|
||||
{ name: '~' },
|
||||
{ name: '^' },
|
||||
{ name: '$' },
|
||||
]);
|
||||
const lexEscapedString = createLexer([
|
||||
{ name: 'unicode', regex: new RegExp(unicode, 'i') },
|
||||
{ name: 'escape', regex: new RegExp(escape, 'i') },
|
||||
{ name: 'any', regex: new RegExp('[\\s\\S]', 'i') }
|
||||
]);
|
||||
function sumSpec([a0, a1, a2], [b0, b1, b2]) {
|
||||
return [a0 + b0, a1 + b1, a2 + b2];
|
||||
}
|
||||
function sumAllSpec(ss) {
|
||||
return ss.reduce(sumSpec, [0, 0, 0]);
|
||||
}
|
||||
const unicodeEscapedSequence_ = p.token((t) => t.name === 'unicode' ? String.fromCodePoint(parseInt(t.text.slice(1), 16)) : undefined);
|
||||
const escapedSequence_ = p.token((t) => t.name === 'escape' ? t.text.slice(1) : undefined);
|
||||
const anyChar_ = p.token((t) => t.name === 'any' ? t.text : undefined);
|
||||
const escapedString_ = p.map(p.many(p.or(unicodeEscapedSequence_, escapedSequence_, anyChar_)), (cs) => cs.join(''));
|
||||
function unescape(escapedString) {
|
||||
const lexerResult = lexEscapedString(escapedString);
|
||||
const result = escapedString_({ tokens: lexerResult.tokens, options: undefined }, 0);
|
||||
return result.value;
|
||||
}
|
||||
function literal(name) {
|
||||
return p.token((t) => t.name === name ? true : undefined);
|
||||
}
|
||||
const whitespace_ = p.token((t) => t.name === 'ws' ? null : undefined);
|
||||
const optionalWhitespace_ = p.option(whitespace_, null);
|
||||
function optionallySpaced(parser) {
|
||||
return p.middle(optionalWhitespace_, parser, optionalWhitespace_);
|
||||
}
|
||||
const identifier_ = p.token((t) => t.name === 'ident' ? unescape(t.text) : undefined);
|
||||
const hashId_ = p.token((t) => t.name === 'hash' ? unescape(t.text.slice(1)) : undefined);
|
||||
const string_ = p.token((t) => t.name.startsWith('str') ? unescape(t.text.slice(1, -1)) : undefined);
|
||||
const namespace_ = p.left(p.option(identifier_, ''), literal('|'));
|
||||
const qualifiedName_ = p.eitherOr(p.ab(namespace_, identifier_, (ns, name) => ({ name: name, namespace: ns })), p.map(identifier_, (name) => ({ name: name, namespace: null })));
|
||||
const uniSelector_ = p.eitherOr(p.ab(namespace_, literal('*'), (ns) => ({ type: 'universal', namespace: ns, specificity: [0, 0, 0] })), p.map(literal('*'), () => ({ type: 'universal', namespace: null, specificity: [0, 0, 0] })));
|
||||
const tagSelector_ = p.map(qualifiedName_, ({ name, namespace }) => ({
|
||||
type: 'tag',
|
||||
name: name,
|
||||
namespace: namespace,
|
||||
specificity: [0, 0, 1]
|
||||
}));
|
||||
const classSelector_ = p.ab(literal('.'), identifier_, (fullstop, name) => ({
|
||||
type: 'class',
|
||||
name: name,
|
||||
specificity: [0, 1, 0]
|
||||
}));
|
||||
const idSelector_ = p.map(hashId_, (name) => ({
|
||||
type: 'id',
|
||||
name: name,
|
||||
specificity: [1, 0, 0]
|
||||
}));
|
||||
const attrModifier_ = p.token((t) => {
|
||||
if (t.name === 'ident') {
|
||||
if (t.text === 'i' || t.text === 'I') {
|
||||
return 'i';
|
||||
}
|
||||
if (t.text === 's' || t.text === 'S') {
|
||||
return 's';
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
const attrValue_ = p.eitherOr(p.ab(string_, p.option(p.right(optionalWhitespace_, attrModifier_), null), (v, mod) => ({ value: v, modifier: mod })), p.ab(identifier_, p.option(p.right(whitespace_, attrModifier_), null), (v, mod) => ({ value: v, modifier: mod })));
|
||||
const attrMatcher_ = p.choice(p.map(literal('='), () => '='), p.ab(literal('~'), literal('='), () => '~='), p.ab(literal('|'), literal('='), () => '|='), p.ab(literal('^'), literal('='), () => '^='), p.ab(literal('$'), literal('='), () => '$='), p.ab(literal('*'), literal('='), () => '*='));
|
||||
const attrPresenceSelector_ = p.abc(literal('['), optionallySpaced(qualifiedName_), literal(']'), (lbr, { name, namespace }) => ({
|
||||
type: 'attrPresence',
|
||||
name: name,
|
||||
namespace: namespace,
|
||||
specificity: [0, 1, 0]
|
||||
}));
|
||||
const attrValueSelector_ = p.middle(literal('['), p.abc(optionallySpaced(qualifiedName_), attrMatcher_, optionallySpaced(attrValue_), ({ name, namespace }, matcher, { value, modifier }) => ({
|
||||
type: 'attrValue',
|
||||
name: name,
|
||||
namespace: namespace,
|
||||
matcher: matcher,
|
||||
value: value,
|
||||
modifier: modifier,
|
||||
specificity: [0, 1, 0]
|
||||
})), literal(']'));
|
||||
const attrSelector_ = p.eitherOr(attrPresenceSelector_, attrValueSelector_);
|
||||
const typeSelector_ = p.eitherOr(uniSelector_, tagSelector_);
|
||||
const subclassSelector_ = p.choice(idSelector_, classSelector_, attrSelector_);
|
||||
const compoundSelector_ = p.map(p.eitherOr(p.flatten(typeSelector_, p.many(subclassSelector_)), p.many1(subclassSelector_)), (ss) => {
|
||||
return {
|
||||
type: 'compound',
|
||||
list: ss,
|
||||
specificity: sumAllSpec(ss.map(s => s.specificity))
|
||||
};
|
||||
});
|
||||
const combinator_ = p.choice(p.map(literal('>'), () => '>'), p.map(literal('+'), () => '+'), p.map(literal('~'), () => '~'), p.ab(literal('|'), literal('|'), () => '||'));
|
||||
const combinatorSeparator_ = p.eitherOr(optionallySpaced(combinator_), p.map(whitespace_, () => ' '));
|
||||
const complexSelector_ = p.leftAssoc2(compoundSelector_, p.map(combinatorSeparator_, (c) => (left, right) => ({
|
||||
type: 'compound',
|
||||
list: [...right.list, { type: 'combinator', combinator: c, left: left, specificity: left.specificity }],
|
||||
specificity: sumSpec(left.specificity, right.specificity)
|
||||
})), compoundSelector_);
|
||||
const listSelector_ = p.leftAssoc2(p.map(complexSelector_, (s) => ({ type: 'list', list: [s] })), p.map(optionallySpaced(literal(',')), () => (acc, next) => ({ type: 'list', list: [...acc.list, next] })), complexSelector_);
|
||||
function parse_(parser, str) {
|
||||
if (!(typeof str === 'string' || str instanceof String)) {
|
||||
throw new Error('Expected a selector string. Actual input is not a string!');
|
||||
}
|
||||
const lexerResult = lexSelector(str);
|
||||
if (!lexerResult.complete) {
|
||||
throw new Error(`The input "${str}" was only partially tokenized, stopped at offset ${lexerResult.offset}!\n` +
|
||||
prettyPrintPosition(str, lexerResult.offset));
|
||||
}
|
||||
const result = optionallySpaced(parser)({ tokens: lexerResult.tokens, options: undefined }, 0);
|
||||
if (!result.matched) {
|
||||
throw new Error(`No match for "${str}" input!`);
|
||||
}
|
||||
if (result.position < lexerResult.tokens.length) {
|
||||
const token = lexerResult.tokens[result.position];
|
||||
throw new Error(`The input "${str}" was only partially parsed, stopped at offset ${token.offset}!\n` +
|
||||
prettyPrintPosition(str, token.offset, token.len));
|
||||
}
|
||||
return result.value;
|
||||
}
|
||||
function prettyPrintPosition(str, offset, len = 1) {
|
||||
return `${str.replace(/(\t)|(\r)|(\n)/g, (m, t, r) => t ? '\u2409' : r ? '\u240d' : '\u240a')}\n${''.padEnd(offset)}${'^'.repeat(len)}`;
|
||||
}
|
||||
function parse(str) {
|
||||
return parse_(listSelector_, str);
|
||||
}
|
||||
function parse1(str) {
|
||||
return parse_(complexSelector_, str);
|
||||
}
|
||||
|
||||
function serialize(selector) {
|
||||
if (!selector.type) {
|
||||
throw new Error('This is not an AST node.');
|
||||
}
|
||||
switch (selector.type) {
|
||||
case 'universal':
|
||||
return _serNs(selector.namespace) + '*';
|
||||
case 'tag':
|
||||
return _serNs(selector.namespace) + _serIdent(selector.name);
|
||||
case 'class':
|
||||
return '.' + _serIdent(selector.name);
|
||||
case 'id':
|
||||
return '#' + _serIdent(selector.name);
|
||||
case 'attrPresence':
|
||||
return `[${_serNs(selector.namespace)}${_serIdent(selector.name)}]`;
|
||||
case 'attrValue':
|
||||
return `[${_serNs(selector.namespace)}${_serIdent(selector.name)}${selector.matcher}"${_serStr(selector.value)}"${(selector.modifier ? selector.modifier : '')}]`;
|
||||
case 'combinator':
|
||||
return serialize(selector.left) + selector.combinator;
|
||||
case 'compound':
|
||||
return selector.list.reduce((acc, node) => {
|
||||
if (node.type === 'combinator') {
|
||||
return serialize(node) + acc;
|
||||
}
|
||||
else {
|
||||
return acc + serialize(node);
|
||||
}
|
||||
}, '');
|
||||
case 'list':
|
||||
return selector.list.map(serialize).join(',');
|
||||
}
|
||||
}
|
||||
function _serNs(ns) {
|
||||
return (ns || ns === '')
|
||||
? _serIdent(ns) + '|'
|
||||
: '';
|
||||
}
|
||||
function _codePoint(char) {
|
||||
return `\\${char.codePointAt(0).toString(16)} `;
|
||||
}
|
||||
function _serIdent(str) {
|
||||
return str.replace(
|
||||
/(^[0-9])|(^-[0-9])|(^-$)|([-0-9a-zA-Z_]|[^\x00-\x7F])|(\x00)|([\x01-\x1f]|\x7f)|([\s\S])/g, (m, d1, d2, hy, safe, nl, ctrl, other) => d1 ? _codePoint(d1) :
|
||||
d2 ? '-' + _codePoint(d2.slice(1)) :
|
||||
hy ? '\\-' :
|
||||
safe ? safe :
|
||||
nl ? '\ufffd' :
|
||||
ctrl ? _codePoint(ctrl) :
|
||||
'\\' + other);
|
||||
}
|
||||
function _serStr(str) {
|
||||
return str.replace(
|
||||
/(")|(\\)|(\x00)|([\x01-\x1f]|\x7f)/g, (m, dq, bs, nl, ctrl) => dq ? '\\"' :
|
||||
bs ? '\\\\' :
|
||||
nl ? '\ufffd' :
|
||||
_codePoint(ctrl));
|
||||
}
|
||||
function normalize(selector) {
|
||||
if (!selector.type) {
|
||||
throw new Error('This is not an AST node.');
|
||||
}
|
||||
switch (selector.type) {
|
||||
case 'compound': {
|
||||
selector.list.forEach(normalize);
|
||||
selector.list.sort((a, b) => _compareArrays(_getSelectorPriority(a), _getSelectorPriority(b)));
|
||||
break;
|
||||
}
|
||||
case 'combinator': {
|
||||
normalize(selector.left);
|
||||
break;
|
||||
}
|
||||
case 'list': {
|
||||
selector.list.forEach(normalize);
|
||||
selector.list.sort((a, b) => (serialize(a) < serialize(b)) ? -1 : 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return selector;
|
||||
}
|
||||
function _getSelectorPriority(selector) {
|
||||
switch (selector.type) {
|
||||
case 'universal':
|
||||
return [1];
|
||||
case 'tag':
|
||||
return [1];
|
||||
case 'id':
|
||||
return [2];
|
||||
case 'class':
|
||||
return [3, selector.name];
|
||||
case 'attrPresence':
|
||||
return [4, serialize(selector)];
|
||||
case 'attrValue':
|
||||
return [5, serialize(selector)];
|
||||
case 'combinator':
|
||||
return [15, serialize(selector)];
|
||||
}
|
||||
}
|
||||
function compareSelectors(a, b) {
|
||||
return _compareArrays(a.specificity, b.specificity);
|
||||
}
|
||||
function compareSpecificity(a, b) {
|
||||
return _compareArrays(a, b);
|
||||
}
|
||||
function _compareArrays(a, b) {
|
||||
if (!Array.isArray(a) || !Array.isArray(b)) {
|
||||
throw new Error('Arguments must be arrays.');
|
||||
}
|
||||
const shorter = (a.length < b.length) ? a.length : b.length;
|
||||
for (let i = 0; i < shorter; i++) {
|
||||
if (a[i] === b[i]) {
|
||||
continue;
|
||||
}
|
||||
return (a[i] < b[i]) ? -1 : 1;
|
||||
}
|
||||
return a.length - b.length;
|
||||
}
|
||||
|
||||
export { ast as Ast, compareSelectors, compareSpecificity, normalize, parse, parse1, serialize };
|
19
node_modules/parseley/lib/parser.d.ts
generated
vendored
Normal file
19
node_modules/parseley/lib/parser.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import * as ast from './ast';
|
||||
/**
|
||||
* Parse a CSS selector string.
|
||||
*
|
||||
* This function supports comma-separated selector lists
|
||||
* and always returns an AST starting from a node of type `list`.
|
||||
*
|
||||
* @param str - CSS selector string (can contain commas).
|
||||
*/
|
||||
export declare function parse(str: string): ast.ListSelector;
|
||||
/**
|
||||
* Parse a CSS selector string.
|
||||
*
|
||||
* This function does not support comma-separated selector lists
|
||||
* and always returns an AST starting from a node of type `compound`.
|
||||
*
|
||||
* @param str - CSS selector string (no commas).
|
||||
*/
|
||||
export declare function parse1(str: string): ast.CompoundSelector;
|
92
node_modules/parseley/package.json
generated
vendored
Normal file
92
node_modules/parseley/package.json
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"name": "parseley",
|
||||
"version": "0.12.1",
|
||||
"description": "CSS selectors parser",
|
||||
"keywords": [
|
||||
"CSS",
|
||||
"selectors",
|
||||
"parser",
|
||||
"AST",
|
||||
"serializer",
|
||||
"specificity"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/mxxii/parseley.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/mxxii/parseley/issues"
|
||||
},
|
||||
"homepage": "https://github.com/mxxii/parseley",
|
||||
"author": "KillyMXI",
|
||||
"funding": "https://ko-fi.com/killymxi",
|
||||
"license": "MIT",
|
||||
"exports": {
|
||||
"import": "./lib/parseley.mjs",
|
||||
"require": "./lib/parseley.cjs"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "./lib/parseley.cjs",
|
||||
"module": "./lib/parseley.mjs",
|
||||
"types": "./lib/parseley.d.ts",
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"sideEffects": false,
|
||||
"scripts": {
|
||||
"build:deno": "denoify",
|
||||
"build:docs": "typedoc --plugin typedoc-plugin-markdown",
|
||||
"build:rollup": "rollup -c",
|
||||
"build:types": "tsc --declaration --emitDeclarationOnly",
|
||||
"build": "npm run clean && npm run build:rollup && npm run build:types && npm run build:docs && npm run build:deno",
|
||||
"checkAll": "npm run lint && npm test",
|
||||
"clean": "rimraf lib",
|
||||
"example": "node ./example/example.mjs",
|
||||
"lint:eslint": "eslint .",
|
||||
"lint:md": "markdownlint-cli2",
|
||||
"lint": "npm run lint:eslint && npm run lint:md",
|
||||
"prepublishOnly": "npm run build && npm run checkAll",
|
||||
"test": "ava --timeout=20s"
|
||||
},
|
||||
"dependencies": {
|
||||
"leac": "^0.6.0",
|
||||
"peberminta": "^0.9.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "^11.1.0",
|
||||
"@tsconfig/node14": "^1.0.3",
|
||||
"@types/node": "^14.18.42",
|
||||
"@typescript-eslint/eslint-plugin": "^5.59.0",
|
||||
"@typescript-eslint/parser": "^5.59.0",
|
||||
"ava": "^5.2.0",
|
||||
"denoify": "^1.5.3",
|
||||
"eslint": "^8.39.0",
|
||||
"eslint-plugin-jsonc": "^2.7.0",
|
||||
"eslint-plugin-tsdoc": "^0.2.17",
|
||||
"markdownlint-cli2": "^0.7.0",
|
||||
"rimraf": "^5.0.0",
|
||||
"rollup": "^2.79.1",
|
||||
"rollup-plugin-cleanup": "^3.2.1",
|
||||
"ts-node": "^10.9.1",
|
||||
"tslib": "^2.5.0",
|
||||
"typedoc": "~0.23.28",
|
||||
"typedoc-plugin-markdown": "~3.14.0",
|
||||
"typescript": "~4.9.5"
|
||||
},
|
||||
"ava": {
|
||||
"extensions": {
|
||||
"ts": "module"
|
||||
},
|
||||
"files": [
|
||||
"test/**/*"
|
||||
],
|
||||
"nodeArguments": [
|
||||
"--loader=ts-node/esm",
|
||||
"--experimental-specifier-resolution=node"
|
||||
],
|
||||
"verbose": true
|
||||
},
|
||||
"denoify": {
|
||||
"out": "./deno"
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user