mirror of
https://github.com/renovatebot/renovate.git
synced 2025-01-13 15:36:25 +00:00
refactor(managers): Use moo fallback for unknown fragments (#9870)
This commit is contained in:
parent
03e1ade35f
commit
b974ba0abb
5 changed files with 27 additions and 49 deletions
|
@ -85,23 +85,23 @@ const lexer = moo.states({
|
||||||
].join('|')
|
].join('|')
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
unknown: { match: /[^]/, lineBreaks: true },
|
unknown: moo.fallback,
|
||||||
},
|
},
|
||||||
longDoubleQuoted: {
|
longDoubleQuoted: {
|
||||||
stringFinish: { match: '"""', pop: 1 },
|
stringFinish: { match: '"""', pop: 1 },
|
||||||
char: { match: /[^]/, lineBreaks: true },
|
char: moo.fallback,
|
||||||
},
|
},
|
||||||
doubleQuoted: {
|
doubleQuoted: {
|
||||||
stringFinish: { match: '"', pop: 1 },
|
stringFinish: { match: '"', pop: 1 },
|
||||||
char: { match: /[^]/, lineBreaks: true },
|
char: moo.fallback,
|
||||||
},
|
},
|
||||||
longSingleQuoted: {
|
longSingleQuoted: {
|
||||||
stringFinish: { match: "'''", pop: 1 },
|
stringFinish: { match: "'''", pop: 1 },
|
||||||
char: { match: /[^]/, lineBreaks: true },
|
char: moo.fallback,
|
||||||
},
|
},
|
||||||
singleQuoted: {
|
singleQuoted: {
|
||||||
stringFinish: { match: "'", pop: 1 },
|
stringFinish: { match: "'", pop: 1 },
|
||||||
char: { match: /[^]/, lineBreaks: true },
|
char: moo.fallback,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ const lexer = moo.states({
|
||||||
match: /^#(?:addin|tool|module)\s+"(?:nuget|dotnet):[^"]+"\s*$/,
|
match: /^#(?:addin|tool|module)\s+"(?:nuget|dotnet):[^"]+"\s*$/,
|
||||||
value: (s: string) => s.trim().slice(1, -1),
|
value: (s: string) => s.trim().slice(1, -1),
|
||||||
},
|
},
|
||||||
unknown: { match: /[^]/, lineBreaks: true },
|
unknown: moo.fallback,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -56,11 +56,10 @@ export enum TokenType {
|
||||||
TripleDoubleQuotedStart = 'tripleDoubleQuotedStart',
|
TripleDoubleQuotedStart = 'tripleDoubleQuotedStart',
|
||||||
TripleQuotedFinish = 'tripleQuotedFinish',
|
TripleQuotedFinish = 'tripleQuotedFinish',
|
||||||
|
|
||||||
Char = 'char',
|
Chars = 'chars',
|
||||||
EscapedChar = 'escapedChar',
|
EscapedChar = 'escapedChar',
|
||||||
String = 'string',
|
String = 'string',
|
||||||
|
|
||||||
UnknownLexeme = 'unknownChar',
|
|
||||||
UnknownFragment = 'unknownFragment',
|
UnknownFragment = 'unknownFragment',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@ describe(getName(), () => {
|
||||||
TokenType.RightBrace,
|
TokenType.RightBrace,
|
||||||
TokenType.RightBrace,
|
TokenType.RightBrace,
|
||||||
],
|
],
|
||||||
'@': [TokenType.UnknownLexeme],
|
'@': [TokenType.UnknownFragment],
|
||||||
"'\\''": [
|
"'\\''": [
|
||||||
TokenType.SingleQuotedStart,
|
TokenType.SingleQuotedStart,
|
||||||
TokenType.EscapedChar,
|
TokenType.EscapedChar,
|
||||||
|
@ -54,23 +54,22 @@ describe(getName(), () => {
|
||||||
],
|
],
|
||||||
"'x'": [
|
"'x'": [
|
||||||
TokenType.SingleQuotedStart,
|
TokenType.SingleQuotedStart,
|
||||||
TokenType.Char,
|
TokenType.Chars,
|
||||||
TokenType.SingleQuotedFinish,
|
TokenType.SingleQuotedFinish,
|
||||||
],
|
],
|
||||||
"'\n'": [
|
"'\n'": [
|
||||||
TokenType.SingleQuotedStart,
|
TokenType.SingleQuotedStart,
|
||||||
TokenType.Char,
|
TokenType.Chars,
|
||||||
TokenType.SingleQuotedFinish,
|
TokenType.SingleQuotedFinish,
|
||||||
],
|
],
|
||||||
"'$x'": [
|
"'$x'": [
|
||||||
TokenType.SingleQuotedStart,
|
TokenType.SingleQuotedStart,
|
||||||
TokenType.Char,
|
TokenType.Chars,
|
||||||
TokenType.Char,
|
|
||||||
TokenType.SingleQuotedFinish,
|
TokenType.SingleQuotedFinish,
|
||||||
],
|
],
|
||||||
"''''''": ['tripleQuotedStart', 'tripleQuotedFinish'],
|
"''''''": ['tripleQuotedStart', 'tripleQuotedFinish'],
|
||||||
"'''x'''": ['tripleQuotedStart', TokenType.Char, 'tripleQuotedFinish'],
|
"'''x'''": ['tripleQuotedStart', TokenType.Chars, 'tripleQuotedFinish'],
|
||||||
"'''\n'''": ['tripleQuotedStart', TokenType.Char, 'tripleQuotedFinish'],
|
"'''\n'''": ['tripleQuotedStart', TokenType.Chars, 'tripleQuotedFinish'],
|
||||||
"'''\\''''": [
|
"'''\\''''": [
|
||||||
'tripleQuotedStart',
|
'tripleQuotedStart',
|
||||||
TokenType.EscapedChar,
|
TokenType.EscapedChar,
|
||||||
|
@ -106,12 +105,12 @@ describe(getName(), () => {
|
||||||
],
|
],
|
||||||
'"x"': [
|
'"x"': [
|
||||||
TokenType.DoubleQuotedStart,
|
TokenType.DoubleQuotedStart,
|
||||||
TokenType.Char,
|
TokenType.Chars,
|
||||||
TokenType.DoubleQuotedFinish,
|
TokenType.DoubleQuotedFinish,
|
||||||
],
|
],
|
||||||
'"\n"': [
|
'"\n"': [
|
||||||
TokenType.DoubleQuotedStart,
|
TokenType.DoubleQuotedStart,
|
||||||
TokenType.Char,
|
TokenType.Chars,
|
||||||
TokenType.DoubleQuotedFinish,
|
TokenType.DoubleQuotedFinish,
|
||||||
],
|
],
|
||||||
// eslint-disable-next-line no-template-curly-in-string
|
// eslint-disable-next-line no-template-curly-in-string
|
||||||
|
@ -130,9 +129,7 @@ describe(getName(), () => {
|
||||||
'"${x()}"': [
|
'"${x()}"': [
|
||||||
TokenType.DoubleQuotedStart,
|
TokenType.DoubleQuotedStart,
|
||||||
TokenType.IgnoredInterpolationStart,
|
TokenType.IgnoredInterpolationStart,
|
||||||
TokenType.UnknownLexeme,
|
TokenType.UnknownFragment,
|
||||||
TokenType.UnknownLexeme,
|
|
||||||
TokenType.UnknownLexeme,
|
|
||||||
TokenType.RightBrace,
|
TokenType.RightBrace,
|
||||||
TokenType.DoubleQuotedFinish,
|
TokenType.DoubleQuotedFinish,
|
||||||
],
|
],
|
||||||
|
@ -140,7 +137,7 @@ describe(getName(), () => {
|
||||||
'"${x{}}"': [
|
'"${x{}}"': [
|
||||||
TokenType.DoubleQuotedStart,
|
TokenType.DoubleQuotedStart,
|
||||||
TokenType.IgnoredInterpolationStart,
|
TokenType.IgnoredInterpolationStart,
|
||||||
TokenType.UnknownLexeme,
|
TokenType.UnknownFragment,
|
||||||
TokenType.LeftBrace,
|
TokenType.LeftBrace,
|
||||||
TokenType.RightBrace,
|
TokenType.RightBrace,
|
||||||
TokenType.RightBrace,
|
TokenType.RightBrace,
|
||||||
|
|
|
@ -55,26 +55,26 @@ const lexer = moo.states({
|
||||||
match: '"',
|
match: '"',
|
||||||
push: TokenType.DoubleQuotedStart,
|
push: TokenType.DoubleQuotedStart,
|
||||||
},
|
},
|
||||||
[TokenType.UnknownLexeme]: { match: /./ },
|
[TokenType.UnknownFragment]: moo.fallback,
|
||||||
},
|
},
|
||||||
|
|
||||||
// Tokenize triple-quoted string literal characters
|
// Tokenize triple-quoted string literal characters
|
||||||
[TokenType.TripleSingleQuotedStart]: {
|
[TokenType.TripleSingleQuotedStart]: {
|
||||||
...escapedChars,
|
...escapedChars,
|
||||||
[TokenType.TripleQuotedFinish]: { match: "'''", pop: 1 },
|
[TokenType.TripleQuotedFinish]: { match: "'''", pop: 1 },
|
||||||
[TokenType.Char]: { match: /[^]/, lineBreaks: true },
|
[TokenType.Chars]: moo.fallback,
|
||||||
},
|
},
|
||||||
[TokenType.TripleDoubleQuotedStart]: {
|
[TokenType.TripleDoubleQuotedStart]: {
|
||||||
...escapedChars,
|
...escapedChars,
|
||||||
[TokenType.TripleQuotedFinish]: { match: '"""', pop: 1 },
|
[TokenType.TripleQuotedFinish]: { match: '"""', pop: 1 },
|
||||||
[TokenType.Char]: { match: /[^]/, lineBreaks: true },
|
[TokenType.Chars]: moo.fallback,
|
||||||
},
|
},
|
||||||
|
|
||||||
// Tokenize single-quoted string literal characters
|
// Tokenize single-quoted string literal characters
|
||||||
[TokenType.SingleQuotedStart]: {
|
[TokenType.SingleQuotedStart]: {
|
||||||
...escapedChars,
|
...escapedChars,
|
||||||
[TokenType.SingleQuotedFinish]: { match: "'", pop: 1 },
|
[TokenType.SingleQuotedFinish]: { match: "'", pop: 1 },
|
||||||
[TokenType.Char]: { match: /[^]/, lineBreaks: true },
|
[TokenType.Chars]: moo.fallback,
|
||||||
},
|
},
|
||||||
|
|
||||||
// Tokenize double-quoted string literal chars and interpolations
|
// Tokenize double-quoted string literal chars and interpolations
|
||||||
|
@ -91,7 +91,7 @@ const lexer = moo.states({
|
||||||
match: /\${/,
|
match: /\${/,
|
||||||
push: TokenType.IgnoredInterpolationStart,
|
push: TokenType.IgnoredInterpolationStart,
|
||||||
},
|
},
|
||||||
[TokenType.Char]: { match: /[^]/, lineBreaks: true },
|
[TokenType.Chars]: moo.fallback,
|
||||||
},
|
},
|
||||||
|
|
||||||
// Ignore interpolation of complex expressions˙,
|
// Ignore interpolation of complex expressions˙,
|
||||||
|
@ -102,34 +102,17 @@ const lexer = moo.states({
|
||||||
push: TokenType.IgnoredInterpolationStart,
|
push: TokenType.IgnoredInterpolationStart,
|
||||||
},
|
},
|
||||||
[TokenType.RightBrace]: { match: '}', pop: 1 },
|
[TokenType.RightBrace]: { match: '}', pop: 1 },
|
||||||
[TokenType.UnknownLexeme]: { match: /[^]/, lineBreaks: true },
|
[TokenType.UnknownFragment]: moo.fallback,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
/*
|
|
||||||
Turn UnknownLexeme chars to UnknownFragment strings
|
|
||||||
*/
|
|
||||||
function processUnknownLexeme(acc: Token[], token: Token): Token[] {
|
|
||||||
if (token.type === TokenType.UnknownLexeme) {
|
|
||||||
const prevToken: Token = acc[acc.length - 1];
|
|
||||||
if (prevToken?.type === TokenType.UnknownFragment) {
|
|
||||||
prevToken.value += token.value;
|
|
||||||
} else {
|
|
||||||
acc.push({ ...token, type: TokenType.UnknownFragment });
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
acc.push(token);
|
|
||||||
}
|
|
||||||
return acc;
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// Turn separated chars of string literal to single String token
|
// Turn substrings of chars and escaped chars into single String token
|
||||||
//
|
//
|
||||||
function processChar(acc: Token[], token: Token): Token[] {
|
function processChars(acc: Token[], token: Token): Token[] {
|
||||||
const tokenType = token.type;
|
const tokenType = token.type;
|
||||||
const prevToken: Token = acc[acc.length - 1];
|
const prevToken: Token = acc[acc.length - 1];
|
||||||
if ([TokenType.Char, TokenType.EscapedChar].includes(tokenType)) {
|
if ([TokenType.Chars, TokenType.EscapedChar].includes(tokenType)) {
|
||||||
if (prevToken?.type === TokenType.String) {
|
if (prevToken?.type === TokenType.String) {
|
||||||
prevToken.value += token.value;
|
prevToken.value += token.value;
|
||||||
} else {
|
} else {
|
||||||
|
@ -221,8 +204,7 @@ export function extractRawTokens(input: string): Token[] {
|
||||||
|
|
||||||
export function processTokens(tokens: Token[]): Token[] {
|
export function processTokens(tokens: Token[]): Token[] {
|
||||||
return tokens
|
return tokens
|
||||||
.reduce(processUnknownLexeme, [])
|
.reduce(processChars, [])
|
||||||
.reduce(processChar, [])
|
|
||||||
.reduce(processInterpolation, [])
|
.reduce(processInterpolation, [])
|
||||||
.filter(filterTokens);
|
.filter(filterTokens);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue