refactor(managers): Use moo fallback for unknown fragments (#9870)

This commit is contained in:
Sergei Zharinov 2021-05-05 19:29:56 +04:00 committed by GitHub
parent 03e1ade35f
commit b974ba0abb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 27 additions and 49 deletions

View file

@ -85,23 +85,23 @@ const lexer = moo.states({
].join('|') ].join('|')
), ),
}, },
unknown: { match: /[^]/, lineBreaks: true }, unknown: moo.fallback,
}, },
longDoubleQuoted: { longDoubleQuoted: {
stringFinish: { match: '"""', pop: 1 }, stringFinish: { match: '"""', pop: 1 },
char: { match: /[^]/, lineBreaks: true }, char: moo.fallback,
}, },
doubleQuoted: { doubleQuoted: {
stringFinish: { match: '"', pop: 1 }, stringFinish: { match: '"', pop: 1 },
char: { match: /[^]/, lineBreaks: true }, char: moo.fallback,
}, },
longSingleQuoted: { longSingleQuoted: {
stringFinish: { match: "'''", pop: 1 }, stringFinish: { match: "'''", pop: 1 },
char: { match: /[^]/, lineBreaks: true }, char: moo.fallback,
}, },
singleQuoted: { singleQuoted: {
stringFinish: { match: "'", pop: 1 }, stringFinish: { match: "'", pop: 1 },
char: { match: /[^]/, lineBreaks: true }, char: moo.fallback,
}, },
}); });

View file

@ -21,7 +21,7 @@ const lexer = moo.states({
match: /^#(?:addin|tool|module)\s+"(?:nuget|dotnet):[^"]+"\s*$/, match: /^#(?:addin|tool|module)\s+"(?:nuget|dotnet):[^"]+"\s*$/,
value: (s: string) => s.trim().slice(1, -1), value: (s: string) => s.trim().slice(1, -1),
}, },
unknown: { match: /[^]/, lineBreaks: true }, unknown: moo.fallback,
}, },
}); });

View file

@ -56,11 +56,10 @@ export enum TokenType {
TripleDoubleQuotedStart = 'tripleDoubleQuotedStart', TripleDoubleQuotedStart = 'tripleDoubleQuotedStart',
TripleQuotedFinish = 'tripleQuotedFinish', TripleQuotedFinish = 'tripleQuotedFinish',
Char = 'char', Chars = 'chars',
EscapedChar = 'escapedChar', EscapedChar = 'escapedChar',
String = 'string', String = 'string',
UnknownLexeme = 'unknownChar',
UnknownFragment = 'unknownFragment', UnknownFragment = 'unknownFragment',
} }

View file

@ -35,7 +35,7 @@ describe(getName(), () => {
TokenType.RightBrace, TokenType.RightBrace,
TokenType.RightBrace, TokenType.RightBrace,
], ],
'@': [TokenType.UnknownLexeme], '@': [TokenType.UnknownFragment],
"'\\''": [ "'\\''": [
TokenType.SingleQuotedStart, TokenType.SingleQuotedStart,
TokenType.EscapedChar, TokenType.EscapedChar,
@ -54,23 +54,22 @@ describe(getName(), () => {
], ],
"'x'": [ "'x'": [
TokenType.SingleQuotedStart, TokenType.SingleQuotedStart,
TokenType.Char, TokenType.Chars,
TokenType.SingleQuotedFinish, TokenType.SingleQuotedFinish,
], ],
"'\n'": [ "'\n'": [
TokenType.SingleQuotedStart, TokenType.SingleQuotedStart,
TokenType.Char, TokenType.Chars,
TokenType.SingleQuotedFinish, TokenType.SingleQuotedFinish,
], ],
"'$x'": [ "'$x'": [
TokenType.SingleQuotedStart, TokenType.SingleQuotedStart,
TokenType.Char, TokenType.Chars,
TokenType.Char,
TokenType.SingleQuotedFinish, TokenType.SingleQuotedFinish,
], ],
"''''''": ['tripleQuotedStart', 'tripleQuotedFinish'], "''''''": ['tripleQuotedStart', 'tripleQuotedFinish'],
"'''x'''": ['tripleQuotedStart', TokenType.Char, 'tripleQuotedFinish'], "'''x'''": ['tripleQuotedStart', TokenType.Chars, 'tripleQuotedFinish'],
"'''\n'''": ['tripleQuotedStart', TokenType.Char, 'tripleQuotedFinish'], "'''\n'''": ['tripleQuotedStart', TokenType.Chars, 'tripleQuotedFinish'],
"'''\\''''": [ "'''\\''''": [
'tripleQuotedStart', 'tripleQuotedStart',
TokenType.EscapedChar, TokenType.EscapedChar,
@ -106,12 +105,12 @@ describe(getName(), () => {
], ],
'"x"': [ '"x"': [
TokenType.DoubleQuotedStart, TokenType.DoubleQuotedStart,
TokenType.Char, TokenType.Chars,
TokenType.DoubleQuotedFinish, TokenType.DoubleQuotedFinish,
], ],
'"\n"': [ '"\n"': [
TokenType.DoubleQuotedStart, TokenType.DoubleQuotedStart,
TokenType.Char, TokenType.Chars,
TokenType.DoubleQuotedFinish, TokenType.DoubleQuotedFinish,
], ],
// eslint-disable-next-line no-template-curly-in-string // eslint-disable-next-line no-template-curly-in-string
@ -130,9 +129,7 @@ describe(getName(), () => {
'"${x()}"': [ '"${x()}"': [
TokenType.DoubleQuotedStart, TokenType.DoubleQuotedStart,
TokenType.IgnoredInterpolationStart, TokenType.IgnoredInterpolationStart,
TokenType.UnknownLexeme, TokenType.UnknownFragment,
TokenType.UnknownLexeme,
TokenType.UnknownLexeme,
TokenType.RightBrace, TokenType.RightBrace,
TokenType.DoubleQuotedFinish, TokenType.DoubleQuotedFinish,
], ],
@ -140,7 +137,7 @@ describe(getName(), () => {
'"${x{}}"': [ '"${x{}}"': [
TokenType.DoubleQuotedStart, TokenType.DoubleQuotedStart,
TokenType.IgnoredInterpolationStart, TokenType.IgnoredInterpolationStart,
TokenType.UnknownLexeme, TokenType.UnknownFragment,
TokenType.LeftBrace, TokenType.LeftBrace,
TokenType.RightBrace, TokenType.RightBrace,
TokenType.RightBrace, TokenType.RightBrace,

View file

@ -55,26 +55,26 @@ const lexer = moo.states({
match: '"', match: '"',
push: TokenType.DoubleQuotedStart, push: TokenType.DoubleQuotedStart,
}, },
[TokenType.UnknownLexeme]: { match: /./ }, [TokenType.UnknownFragment]: moo.fallback,
}, },
// Tokenize triple-quoted string literal characters // Tokenize triple-quoted string literal characters
[TokenType.TripleSingleQuotedStart]: { [TokenType.TripleSingleQuotedStart]: {
...escapedChars, ...escapedChars,
[TokenType.TripleQuotedFinish]: { match: "'''", pop: 1 }, [TokenType.TripleQuotedFinish]: { match: "'''", pop: 1 },
[TokenType.Char]: { match: /[^]/, lineBreaks: true }, [TokenType.Chars]: moo.fallback,
}, },
[TokenType.TripleDoubleQuotedStart]: { [TokenType.TripleDoubleQuotedStart]: {
...escapedChars, ...escapedChars,
[TokenType.TripleQuotedFinish]: { match: '"""', pop: 1 }, [TokenType.TripleQuotedFinish]: { match: '"""', pop: 1 },
[TokenType.Char]: { match: /[^]/, lineBreaks: true }, [TokenType.Chars]: moo.fallback,
}, },
// Tokenize single-quoted string literal characters // Tokenize single-quoted string literal characters
[TokenType.SingleQuotedStart]: { [TokenType.SingleQuotedStart]: {
...escapedChars, ...escapedChars,
[TokenType.SingleQuotedFinish]: { match: "'", pop: 1 }, [TokenType.SingleQuotedFinish]: { match: "'", pop: 1 },
[TokenType.Char]: { match: /[^]/, lineBreaks: true }, [TokenType.Chars]: moo.fallback,
}, },
// Tokenize double-quoted string literal chars and interpolations // Tokenize double-quoted string literal chars and interpolations
@ -91,7 +91,7 @@ const lexer = moo.states({
match: /\${/, match: /\${/,
push: TokenType.IgnoredInterpolationStart, push: TokenType.IgnoredInterpolationStart,
}, },
[TokenType.Char]: { match: /[^]/, lineBreaks: true }, [TokenType.Chars]: moo.fallback,
}, },
// Ignore interpolation of complex expressions˙, // Ignore interpolation of complex expressions˙,
@ -102,34 +102,17 @@ const lexer = moo.states({
push: TokenType.IgnoredInterpolationStart, push: TokenType.IgnoredInterpolationStart,
}, },
[TokenType.RightBrace]: { match: '}', pop: 1 }, [TokenType.RightBrace]: { match: '}', pop: 1 },
[TokenType.UnknownLexeme]: { match: /[^]/, lineBreaks: true }, [TokenType.UnknownFragment]: moo.fallback,
}, },
}); });
/*
Turn UnknownLexeme chars to UnknownFragment strings
*/
function processUnknownLexeme(acc: Token[], token: Token): Token[] {
if (token.type === TokenType.UnknownLexeme) {
const prevToken: Token = acc[acc.length - 1];
if (prevToken?.type === TokenType.UnknownFragment) {
prevToken.value += token.value;
} else {
acc.push({ ...token, type: TokenType.UnknownFragment });
}
} else {
acc.push(token);
}
return acc;
}
// //
// Turn separated chars of string literal to single String token // Turn substrings of chars and escaped chars into single String token
// //
function processChar(acc: Token[], token: Token): Token[] { function processChars(acc: Token[], token: Token): Token[] {
const tokenType = token.type; const tokenType = token.type;
const prevToken: Token = acc[acc.length - 1]; const prevToken: Token = acc[acc.length - 1];
if ([TokenType.Char, TokenType.EscapedChar].includes(tokenType)) { if ([TokenType.Chars, TokenType.EscapedChar].includes(tokenType)) {
if (prevToken?.type === TokenType.String) { if (prevToken?.type === TokenType.String) {
prevToken.value += token.value; prevToken.value += token.value;
} else { } else {
@ -221,8 +204,7 @@ export function extractRawTokens(input: string): Token[] {
export function processTokens(tokens: Token[]): Token[] { export function processTokens(tokens: Token[]): Token[] {
return tokens return tokens
.reduce(processUnknownLexeme, []) .reduce(processChars, [])
.reduce(processChar, [])
.reduce(processInterpolation, []) .reduce(processInterpolation, [])
.filter(filterTokens); .filter(filterTokens);
} }