permet l'ajout des frameworks et des routes

This commit is contained in:
22107988t
2023-09-25 09:41:55 +02:00
parent 0b9f7d4dfb
commit 361112699c
2787 changed files with 864804 additions and 0 deletions

View File

@@ -0,0 +1,44 @@
const { calculateTokenCharactersRange } = require('../helpers')
const { TOKEN_ATTRIBUTE_KEY } = require('../constants/token-types')
const { ATTRIBUTES_CONTEXT } = require('../constants/tokenizer-contexts')
function keyEnd (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_ATTRIBUTE_KEY,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
}
function isKeyBreak (chars) {
return (
chars === '='
|| chars === ' '
|| chars === '\n'
|| chars === '\t'
|| chars === '/'
|| chars === '>'
)
}
function parseSyntax (chars, state, tokens) {
if (isKeyBreak(chars)) {
return keyEnd(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,40 @@
const {
calculateTokenCharactersRange,
isWhitespace
} = require('../helpers')
const { TOKEN_ATTRIBUTE_VALUE } = require('../constants/token-types')
const { ATTRIBUTES_CONTEXT } = require('../constants/tokenizer-contexts')
function valueEnd (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (
isWhitespace(chars)
|| chars === '>'
|| chars === '/'
) {
return valueEnd(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,53 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_ATTRIBUTE_VALUE,
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END
} = require('../constants/token-types')
const {
ATTRIBUTES_CONTEXT,
ATTRIBUTE_VALUE_WRAPPED_CONTEXT
} = require('../constants/tokenizer-contexts')
function wrapper (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
const endWrapperPosition = range.endPosition + 1
tokens.push(
{
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
},
{
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: endWrapperPosition,
endPosition: endWrapperPosition
}
)
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
state.caretPosition++
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = undefined
}
function parseSyntax (chars, state, tokens) {
const wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper
if (chars === wrapperChar) {
return wrapper(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,60 @@
const { isWhitespace } = require('../helpers')
const {
ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
ATTRIBUTES_CONTEXT,
ATTRIBUTE_VALUE_BARE_CONTEXT
} = require('../constants/tokenizer-contexts')
const {
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START
} = require('../constants/token-types')
function wrapper (state, tokens) {
const wrapper = state.decisionBuffer
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
content: wrapper,
startPosition: state.caretPosition,
endPosition: state.caretPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTE_VALUE_WRAPPED_CONTEXT
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = { wrapper }
state.caretPosition++
}
function bare (state) {
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = ATTRIBUTE_VALUE_BARE_CONTEXT
state.caretPosition++
}
function tagEnd (state) {
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (chars === '"' || chars === '\'') {
return wrapper(state, tokens)
}
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens)
}
if (!isWhitespace(chars)) {
return bare(state, tokens)
}
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,66 @@
const {
isWhitespace,
calculateTokenCharactersRange
} = require('../helpers')
const {
ATTRIBUTES_CONTEXT,
OPEN_TAG_END_CONTEXT,
ATTRIBUTE_VALUE_CONTEXT,
ATTRIBUTE_KEY_CONTEXT
} = require('../constants/tokenizer-contexts')
const { TOKEN_ATTRIBUTE_ASSIGNMENT } = require('../constants/token-types')
function tagEnd (state) {
const tagName = state.contextParams[ATTRIBUTES_CONTEXT].tagName
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = OPEN_TAG_END_CONTEXT
state.contextParams[OPEN_TAG_END_CONTEXT] = { tagName }
state.contextParams[ATTRIBUTES_CONTEXT] = undefined
}
function noneWhitespace (state) {
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = ATTRIBUTE_KEY_CONTEXT
state.caretPosition++
}
function equal (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: true })
tokens.push({
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTE_VALUE_CONTEXT
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens)
}
if (chars === '=') {
return equal(state, tokens)
}
if (!isWhitespace(chars)) {
return noneWhitespace(state, tokens)
}
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,34 @@
const { calculateTokenCharactersRange } = require('../helpers')
const { TOKEN_CLOSE_TAG } = require('../constants/token-types')
const { DATA_CONTEXT } = require('../constants/tokenizer-contexts')
function closingCornerBrace (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: true })
tokens.push({
type: TOKEN_CLOSE_TAG,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
if (chars === '>') {
return closingCornerBrace(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,58 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_COMMENT_END,
TOKEN_COMMENT_CONTENT
} = require('../constants/token-types')
const {
DATA_CONTEXT
} = require('../constants/tokenizer-contexts')
const COMMENT_END = '-->'
function commentEnd (state, tokens) {
const contentRange = calculateTokenCharactersRange(state, { keepBuffer: false })
const commentEndRange = {
startPosition: contentRange.endPosition + 1,
endPosition: contentRange.endPosition + COMMENT_END.length,
}
tokens.push({
type: TOKEN_COMMENT_CONTENT,
content: state.accumulatedContent,
startPosition: contentRange.startPosition,
endPosition: contentRange.endPosition
})
tokens.push({
type: TOKEN_COMMENT_END,
content: state.decisionBuffer,
startPosition: commentEndRange.startPosition,
endPosition: commentEndRange.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
if (chars === '-' || chars === '--') {
state.caretPosition++
return
}
if (chars === COMMENT_END) {
return commentEnd(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,155 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_TEXT,
TOKEN_COMMENT_START
} = require('../constants/token-types')
const {
OPEN_TAG_START_CONTEXT,
CLOSE_TAG_CONTEXT,
DOCTYPE_START_CONTEXT,
COMMENT_CONTENT_CONTEXT
} = require('../constants/tokenizer-contexts')
const COMMENT_START = '<!--'
function generateTextToken (state) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
return {
type: TOKEN_TEXT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
}
}
function openingCornerBraceWithText (state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state))
}
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = OPEN_TAG_START_CONTEXT
state.caretPosition++
}
function openingCornerBraceWithSlash (state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state))
}
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = CLOSE_TAG_CONTEXT
state.caretPosition++
}
function doctypeStart (state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state))
}
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = DOCTYPE_START_CONTEXT
state.caretPosition++
}
function commentStart (state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state))
}
const commentStartRange = {
startPosition: state.caretPosition - (COMMENT_START.length - 1),
endPosition: state.caretPosition
}
tokens.push({
type: TOKEN_COMMENT_START,
content: state.decisionBuffer,
startPosition: commentStartRange.startPosition,
endPosition: commentStartRange.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = COMMENT_CONTENT_CONTEXT
state.caretPosition++
}
function handleContentEnd (state, tokens) {
const textContent = state.accumulatedContent + state.decisionBuffer
if (textContent.length !== 0) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_TEXT,
content: textContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
}
}
function isIncompleteDoctype (chars) {
const charsUpperCase = chars.toUpperCase()
return (
charsUpperCase === '<!'
|| charsUpperCase === '<!D'
|| charsUpperCase === '<!DO'
|| charsUpperCase === '<!DOC'
|| charsUpperCase === '<!DOCT'
|| charsUpperCase === '<!DOCTY'
|| charsUpperCase === '<!DOCTYP'
)
}
const OPEN_TAG_START_PATTERN = /^<\w/
function parseSyntax (chars, state, tokens) {
if (OPEN_TAG_START_PATTERN.test(chars)) {
return openingCornerBraceWithText(state, tokens)
}
if (chars === '</') {
return openingCornerBraceWithSlash(state, tokens)
}
if (
chars === '<'
|| chars === '<!'
|| chars === '<!-'
) {
state.caretPosition++
return
}
if (chars === COMMENT_START) {
return commentStart(state, tokens)
}
if (isIncompleteDoctype(chars)) {
state.caretPosition++
return
}
if (chars.toUpperCase() === '<!DOCTYPE') {
return doctypeStart(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax,
handleContentEnd
}

View File

@@ -0,0 +1,37 @@
const { isWhitespace, calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_DOCTYPE_ATTRIBUTE
} = require('../constants/token-types')
const {
DOCTYPE_ATTRIBUTES_CONTEXT
} = require('../constants/tokenizer-contexts')
function attributeEnd (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (isWhitespace(chars) || chars === '>') {
return attributeEnd(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,52 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
TOKEN_DOCTYPE_ATTRIBUTE
} = require('../constants/token-types')
const {
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT
} = require('../constants/tokenizer-contexts')
function wrapper (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
const endWrapperPosition = range.endPosition + 1
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: endWrapperPosition,
endPosition: endWrapperPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT
state.caretPosition++
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = undefined
}
function parseSyntax (chars, state, tokens) {
const wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper
if (chars === wrapperChar) {
return wrapper(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,61 @@
const { isWhitespace } = require('../helpers')
const {
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
DOCTYPE_END_CONTEXT
} = require('../constants/tokenizer-contexts')
const {
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START
} = require('../constants/token-types')
function wrapper (state, tokens) {
const wrapper = state.decisionBuffer
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
content: wrapper,
startPosition: state.caretPosition,
endPosition: state.caretPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = { wrapper }
state.caretPosition++
}
function bare (state) {
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTE_BARE_CONTEXT
state.caretPosition++
}
function closingCornerBrace (state) {
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_END_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (chars === '"' || chars === '\'') {
return wrapper(state, tokens)
}
if (chars === '>') {
return closingCornerBrace(state, tokens)
}
if (!isWhitespace(chars)) {
return bare(state, tokens)
}
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,28 @@
const { calculateTokenCharactersRange } = require('../helpers')
const { TOKEN_DOCTYPE_END } = require('../constants/token-types')
const { DATA_CONTEXT } = require('../constants/tokenizer-contexts')
function closingCornerBrace (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: true })
tokens.push({
type: TOKEN_DOCTYPE_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
return closingCornerBrace(state, tokens)
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,54 @@
const { isWhitespace, calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_DOCTYPE_START
} = require('../constants/token-types')
const {
DOCTYPE_END_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT
} = require('../constants/tokenizer-contexts')
function generateDoctypeStartToken (state) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
return {
type: TOKEN_DOCTYPE_START,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
}
}
function closingCornerBrace (state, tokens) {
tokens.push(generateDoctypeStartToken(state))
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_END_CONTEXT
}
function whitespace (state, tokens) {
tokens.push(generateDoctypeStartToken(state))
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (isWhitespace(chars)) {
return whitespace(state, tokens)
}
if (chars === '>') {
return closingCornerBrace(state, tokens)
}
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,58 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE
} = require('../constants/token-types')
const {
OPEN_TAG_END_CONTEXT,
DATA_CONTEXT,
SCRIPT_CONTENT_CONTEXT,
STYLE_CONTENT_CONTEXT
} = require('../constants/tokenizer-contexts')
const tokensMap = {
'script': TOKEN_OPEN_TAG_END_SCRIPT,
'style': TOKEN_OPEN_TAG_END_STYLE,
'default': TOKEN_OPEN_TAG_END
}
const contextsMap = {
'script': SCRIPT_CONTENT_CONTEXT,
'style': STYLE_CONTENT_CONTEXT,
'default': DATA_CONTEXT
}
function closingCornerBrace (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: true })
const tagName = state.contextParams[OPEN_TAG_END_CONTEXT].tagName
tokens.push({
type: tokensMap[tagName] || tokensMap.default,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = contextsMap[tagName] || contextsMap.default
state.caretPosition++
state.contextParams[OPEN_TAG_END_CONTEXT] = undefined
}
function parseSyntax (chars, state, tokens) {
if (chars === '>') {
return closingCornerBrace(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,74 @@
const {
parseOpenTagName,
isWhitespace,
calculateTokenCharactersRange
} = require('../helpers')
const {
TOKEN_OPEN_TAG_START,
TOKEN_OPEN_TAG_START_SCRIPT,
TOKEN_OPEN_TAG_START_STYLE
} = require('../constants/token-types')
const {
OPEN_TAG_END_CONTEXT,
ATTRIBUTES_CONTEXT
} = require('../constants/tokenizer-contexts')
const tokensMap = {
'script': TOKEN_OPEN_TAG_START_SCRIPT,
'style': TOKEN_OPEN_TAG_START_STYLE,
'default': TOKEN_OPEN_TAG_START
}
function tagEnd (state, tokens) {
const tagName = parseOpenTagName(state.accumulatedContent)
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: tokensMap[tagName] || tokensMap.default,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.decisionBuffer = ''
state.accumulatedContent = ''
state.currentContext = OPEN_TAG_END_CONTEXT
state.contextParams[OPEN_TAG_END_CONTEXT] = { tagName }
}
function whitespace (state, tokens) {
const tagName = parseOpenTagName(state.accumulatedContent)
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: tokensMap[tagName] || tokensMap.default,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
state.contextParams[ATTRIBUTES_CONTEXT] = { tagName }
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens)
}
if (isWhitespace(chars)) {
return whitespace(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,59 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_SCRIPT_TAG_CONTENT,
TOKEN_CLOSE_TAG_SCRIPT
} = require('../constants/token-types')
const { DATA_CONTEXT } = require('../constants/tokenizer-contexts')
function closingScriptTag (state, tokens) {
if (state.accumulatedContent !== '') {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_SCRIPT_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
}
tokens.push({
type: TOKEN_CLOSE_TAG_SCRIPT,
content: state.decisionBuffer,
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
endPosition: state.caretPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
const INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/
const CLOSING_SCRIPT_TAG_PATTERN = /<\/script\s*>/i
function parseSyntax (chars, state, tokens) {
if (
chars === '<' ||
chars === '</' ||
INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)
) {
state.caretPosition++
return
}
if (CLOSING_SCRIPT_TAG_PATTERN.test(chars)) {
return closingScriptTag(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -0,0 +1,59 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_STYLE_TAG_CONTENT,
TOKEN_CLOSE_TAG_STYLE
} = require('../constants/token-types')
const { DATA_CONTEXT } = require('../constants/tokenizer-contexts')
function closingStyleTag (state, tokens) {
if (state.accumulatedContent !== '') {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_STYLE_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
}
tokens.push({
type: TOKEN_CLOSE_TAG_STYLE,
content: state.decisionBuffer,
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
endPosition: state.caretPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
const INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/
const CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i
function parseSyntax (chars, state, tokens) {
if (
chars === '<' ||
chars === '</' ||
INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)
) {
state.caretPosition++
return
}
if (CLOSING_STYLE_TAG_PATTERN.test(chars)) {
return closingStyleTag(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}