first commit
This commit is contained in:
1
node_modules/leac/lib/leac.cjs
generated
vendored
Normal file
1
node_modules/leac/lib/leac.cjs
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
"use strict";Object.defineProperty(exports,"__esModule",{value:!0});const e=/\n/g;function t(t){const o=[...t.matchAll(e)].map((e=>e.index||0));o.unshift(-1);const s=n(o,0,o.length);return e=>r(s,e)}function n(e,t,r){if(r-t==1)return{offset:e[t],index:t+1};const o=Math.ceil((t+r)/2),s=n(e,t,o),l=n(e,o,r);return{offset:s.offset,low:s,high:l}}function r(e,t){return function(e){return Object.prototype.hasOwnProperty.call(e,"index")}(e)?{line:e.index,column:t-e.offset}:r(e.high.offset<t?e.high:e.low,t)}function o(e,t){return{...e,regex:s(e,t)}}function s(e,t){if(0===e.name.length)throw new Error(`Rule #${t} has empty name, which is not allowed.`);if(function(e){return Object.prototype.hasOwnProperty.call(e,"regex")}(e))return function(e){if(e.global)throw new Error(`Regular expression /${e.source}/${e.flags} contains the global flag, which is not allowed.`);return e.sticky?e:new RegExp(e.source,e.flags+"y")}(e.regex);if(function(e){return Object.prototype.hasOwnProperty.call(e,"str")}(e)){if(0===e.str.length)throw new Error(`Rule #${t} ("${e.name}") has empty "str" property, which is not allowed.`);return new RegExp(l(e.str),"y")}return new RegExp(l(e.name),"y")}function l(e){return e.replace(/[-[\]{}()*+!<=:?./\\^$|#\s,]/g,"\\$&")}exports.createLexer=function(e,n="",r={}){const s="string"!=typeof n?n:r,l="string"==typeof n?n:"",c=e.map(o),i=!!s.lineNumbers;return function(e,n=0){const r=i?t(e):()=>({line:0,column:0});let o=n;const s=[];e:for(;o<e.length;){let t=!1;for(const n of c){n.regex.lastIndex=o;const c=n.regex.exec(e);if(c&&c[0].length>0){if(!n.discard){const e=r(o),t="string"==typeof n.replace?c[0].replace(new RegExp(n.regex.source,n.regex.flags),n.replace):c[0];s.push({state:l,name:n.name,text:t,offset:o,len:c[0].length,line:e.line,column:e.column})}if(o=n.regex.lastIndex,t=!0,n.push){const t=n.push(e,o);s.push(...t.tokens),o=t.offset}if(n.pop)break e;break}}if(!t)break}return{tokens:s,offset:o,complete:e.length<=o}}};
|
||||
165
node_modules/leac/lib/leac.d.ts
generated
vendored
Normal file
165
node_modules/leac/lib/leac.d.ts
generated
vendored
Normal file
@@ -0,0 +1,165 @@
|
||||
/** Lexer options (not many so far). */
|
||||
export declare type Options = {
|
||||
/**
|
||||
* Enable line and column numbers computation.
|
||||
*/
|
||||
lineNumbers?: boolean;
|
||||
};
|
||||
/** Result returned by a lexer function. */
|
||||
export declare type LexerResult = {
|
||||
/** Array of tokens. */
|
||||
tokens: Token[];
|
||||
/** Final offset. */
|
||||
offset: number;
|
||||
/**
|
||||
* True if whole input string was processed.
|
||||
*
|
||||
* Check this to see whether some input left untokenized.
|
||||
*/
|
||||
complete: boolean;
|
||||
};
|
||||
/**
|
||||
* Lexer function.
|
||||
*
|
||||
* @param str - A string to tokenize.
|
||||
* @param offset - Initial offset. Used when composing lexers.
|
||||
*/
|
||||
export declare type Lexer = (str: string, offset?: number) => LexerResult;
|
||||
/** Token object, a result of matching an individual lexing rule. */
|
||||
export declare type Token = {
|
||||
/** Name of the lexer containing the rule produced this token. */
|
||||
state: string;
|
||||
/** Name of the rule produced this token. */
|
||||
name: string;
|
||||
/** Text matched by the rule. _(Unless a replace value was used by a RegexRule.)_ */
|
||||
text: string;
|
||||
/** Start index of the match in the input string. */
|
||||
offset: number;
|
||||
/**
|
||||
* The length of the matched substring.
|
||||
*
|
||||
* _(Might be different from the text length in case replace value
|
||||
* was used in a RegexRule.)_
|
||||
*/
|
||||
len: number;
|
||||
/**
|
||||
* Line number in the source string (1-based).
|
||||
*
|
||||
* _(Always zero if not enabled in the lexer options.)_
|
||||
*/
|
||||
line: number;
|
||||
/**
|
||||
* Column number within the line in the source string (1-based).
|
||||
*
|
||||
* _(Always zero if line numbers not enabled in the lexer options.)_
|
||||
*/
|
||||
column: number;
|
||||
};
|
||||
/**
|
||||
* Lexing rule.
|
||||
*
|
||||
* Base rule looks for exact match by it's name.
|
||||
*
|
||||
* If the name and the lookup string have to be different
|
||||
* then specify `str` property as defined in {@link StringRule}.
|
||||
*/
|
||||
export interface Rule {
|
||||
/** The name of the rule, also the name of tokens produced by this rule. */
|
||||
name: string;
|
||||
/**
|
||||
* Matched token won't be added to the output array if this set to `true`.
|
||||
*
|
||||
* (_Think twice before using this._)
|
||||
* */
|
||||
discard?: boolean;
|
||||
/**
|
||||
* Switch to another lexer function after this match,
|
||||
* concatenate it's results and continue from where it stopped.
|
||||
*/
|
||||
push?: Lexer;
|
||||
/**
|
||||
* Stop after this match and return.
|
||||
*
|
||||
* If there is a parent parser - it will continue from this point.
|
||||
*/
|
||||
pop?: boolean;
|
||||
}
|
||||
/**
|
||||
* String rule - looks for exact string match that
|
||||
* can be different from the name of the rule.
|
||||
*/
|
||||
export interface StringRule extends Rule {
|
||||
/**
|
||||
* Specify the exact string to match
|
||||
* if it is different from the name of the rule.
|
||||
*/
|
||||
str: string;
|
||||
}
|
||||
/**
|
||||
* Regex rule - looks for a regular expression match.
|
||||
*/
|
||||
export interface RegexRule extends Rule {
|
||||
/**
|
||||
* Regular expression to match.
|
||||
*
|
||||
* - Can't have the global flag.
|
||||
*
|
||||
* - All regular expressions are used as sticky,
|
||||
* you don't have to specify the sticky flag.
|
||||
*
|
||||
* - Empty matches are considered as non-matches -
|
||||
* no token will be emitted in that case.
|
||||
*/
|
||||
regex: RegExp;
|
||||
/**
|
||||
* Replacement string can include patterns,
|
||||
* the same as [String.prototype.replace()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/replace#specifying_a_string_as_a_parameter).
|
||||
*
|
||||
* This will only affect the text property of an output token, not it's offset or length.
|
||||
*
|
||||
* Note: the regex has to be able to match the matched substring when taken out of context
|
||||
* in order for replace to work - boundary/neighborhood conditions may prevent this.
|
||||
*/
|
||||
replace?: string;
|
||||
}
|
||||
/**
|
||||
* Non-empty array of rules.
|
||||
*
|
||||
* Rules are processed in provided order, first match is taken.
|
||||
*
|
||||
* Rules can have the same name. For example, you can have
|
||||
* separate rules for various keywords and use the same name "keyword".
|
||||
*/
|
||||
export declare type Rules = [
|
||||
(Rule | StringRule | RegexRule),
|
||||
...(Rule | StringRule | RegexRule)[]
|
||||
];
|
||||
/**
|
||||
* Create a lexer function.
|
||||
*
|
||||
* @param rules - Non-empty array of lexing rules.
|
||||
*
|
||||
* Rules are processed in provided order, first match is taken.
|
||||
*
|
||||
* Rules can have the same name - you can have separate rules
|
||||
* for keywords and use the same name "keyword" for example.
|
||||
*
|
||||
* @param state - The name of this lexer. Use when composing lexers.
|
||||
* Empty string by default.
|
||||
*
|
||||
* @param options - Lexer options object.
|
||||
*/
|
||||
export declare function createLexer(rules: Rules, state?: string, options?: Options): Lexer;
|
||||
/**
|
||||
* Create a lexer function.
|
||||
*
|
||||
* @param rules - Non-empty array of lexing rules.
|
||||
*
|
||||
* Rules are processed in provided order, first match is taken.
|
||||
*
|
||||
* Rules can have the same name - you can have separate rules
|
||||
* for keywords and use the same name "keyword" for example.
|
||||
*
|
||||
* @param options - Lexer options object.
|
||||
*/
|
||||
export declare function createLexer(rules: Rules, options?: Options): Lexer;
|
||||
1
node_modules/leac/lib/leac.mjs
generated
vendored
Normal file
1
node_modules/leac/lib/leac.mjs
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
const e=/\n/g;function n(n){const o=[...n.matchAll(e)].map((e=>e.index||0));o.unshift(-1);const s=t(o,0,o.length);return e=>r(s,e)}function t(e,n,r){if(r-n==1)return{offset:e[n],index:n+1};const o=Math.ceil((n+r)/2),s=t(e,n,o),l=t(e,o,r);return{offset:s.offset,low:s,high:l}}function r(e,n){return function(e){return Object.prototype.hasOwnProperty.call(e,"index")}(e)?{line:e.index,column:n-e.offset}:r(e.high.offset<n?e.high:e.low,n)}function o(e,t="",r={}){const o="string"!=typeof t?t:r,l="string"==typeof t?t:"",c=e.map(s),f=!!o.lineNumbers;return function(e,t=0){const r=f?n(e):()=>({line:0,column:0});let o=t;const s=[];e:for(;o<e.length;){let n=!1;for(const t of c){t.regex.lastIndex=o;const c=t.regex.exec(e);if(c&&c[0].length>0){if(!t.discard){const e=r(o),n="string"==typeof t.replace?c[0].replace(new RegExp(t.regex.source,t.regex.flags),t.replace):c[0];s.push({state:l,name:t.name,text:n,offset:o,len:c[0].length,line:e.line,column:e.column})}if(o=t.regex.lastIndex,n=!0,t.push){const n=t.push(e,o);s.push(...n.tokens),o=n.offset}if(t.pop)break e;break}}if(!n)break}return{tokens:s,offset:o,complete:e.length<=o}}}function s(e,n){return{...e,regex:l(e,n)}}function l(e,n){if(0===e.name.length)throw new Error(`Rule #${n} has empty name, which is not allowed.`);if(function(e){return Object.prototype.hasOwnProperty.call(e,"regex")}(e))return function(e){if(e.global)throw new Error(`Regular expression /${e.source}/${e.flags} contains the global flag, which is not allowed.`);return e.sticky?e:new RegExp(e.source,e.flags+"y")}(e.regex);if(function(e){return Object.prototype.hasOwnProperty.call(e,"str")}(e)){if(0===e.str.length)throw new Error(`Rule #${n} ("${e.name}") has empty "str" property, which is not allowed.`);return new RegExp(c(e.str),"y")}return new RegExp(c(e.name),"y")}function c(e){return e.replace(/[-[\]{}()*+!<=:?./\\^$|#\s,]/g,"\\$&")}export{o as createLexer};
|
||||
Reference in New Issue
Block a user