Revert "permet l'ajout des frameworks et des routes"

This reverts commit 361112699c
This commit is contained in:
Dario Duchateau-weinberger
2023-09-25 09:44:12 +02:00
parent 361112699c
commit 20cb812095
2787 changed files with 0 additions and 864804 deletions

View File

@@ -1,9 +0,0 @@
module.exports = {
NODE_DOCUMENT: 'document',
NODE_TAG: 'tag',
NODE_TEXT: 'text',
NODE_DOCTYPE: 'doctype',
NODE_COMMENT: 'comment',
NODE_SCRIPT: 'script',
NODE_STYLE: 'style'
}

View File

@@ -1,35 +0,0 @@
module.exports = {
TOKEN_TEXT: 'token:text',
TOKEN_OPEN_TAG_START: 'token:open-tag-start',
TOKEN_ATTRIBUTE_KEY: 'token:attribute-key',
TOKEN_ATTRIBUTE_ASSIGNMENT: 'token:attribute-assignment',
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START: 'token:attribute-value-wrapper-start',
TOKEN_ATTRIBUTE_VALUE: 'token:attribute-value',
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END: 'token:attribute-value-wrapper-end',
TOKEN_OPEN_TAG_END: 'token:open-tag-end',
TOKEN_CLOSE_TAG: 'token:close-tag',
TOKEN_OPEN_TAG_START_SCRIPT: 'token:open-tag-start-script',
TOKEN_SCRIPT_TAG_CONTENT: 'token:script-tag-content',
TOKEN_OPEN_TAG_END_SCRIPT: 'token:open-tag-end-script',
TOKEN_CLOSE_TAG_SCRIPT: 'token:close-tag-script',
TOKEN_OPEN_TAG_START_STYLE: 'token:open-tag-start-style',
TOKEN_STYLE_TAG_CONTENT: 'token:style-tag-content',
TOKEN_OPEN_TAG_END_STYLE: 'token:open-tag-end-style',
TOKEN_CLOSE_TAG_STYLE: 'token:close-tag-style',
TOKEN_DOCTYPE_START: 'token:doctype-start',
TOKEN_DOCTYPE_END: 'token:doctype-end',
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START: 'token:doctype-attribute-wrapper-start',
TOKEN_DOCTYPE_ATTRIBUTE: 'token:doctype-attribute',
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END: 'token:doctype-attribute-wrapper-end',
TOKEN_COMMENT_START: 'token:comment-start',
TOKEN_COMMENT_CONTENT: 'token:comment-content',
TOKEN_COMMENT_END: 'token:comment-end'
}

View File

@@ -1,21 +0,0 @@
module.exports = {
DATA_CONTEXT: 'tokenizer-context:data',
OPEN_TAG_START_CONTEXT: 'tokenizer-context:open-tag-start',
CLOSE_TAG_CONTEXT: 'tokenizer-context:close-tag',
ATTRIBUTES_CONTEXT: 'tokenizer-context:attributes',
OPEN_TAG_END_CONTEXT: 'tokenizer-context:open-tag-end',
ATTRIBUTE_KEY_CONTEXT: 'tokenizer-context:attribute-key',
ATTRIBUTE_VALUE_CONTEXT: 'tokenizer-context:attribute-value',
ATTRIBUTE_VALUE_BARE_CONTEXT: 'tokenizer-context:attribute-value-bare',
ATTRIBUTE_VALUE_WRAPPED_CONTEXT: 'tokenizer-context:attribute-value-wrapped',
SCRIPT_CONTENT_CONTEXT: 'tokenizer-context:script-content',
STYLE_CONTENT_CONTEXT: 'tokenizer-context:style-content',
DOCTYPE_START_CONTEXT: 'tokenizer-context:doctype-start',
DOCTYPE_END_CONTEXT: 'tokenizer-context:doctype-end',
DOCTYPE_ATTRIBUTES_CONTEXT: 'tokenizer-context:doctype-attributes',
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT: 'tokenizer-context:doctype-attribute-wrapped',
DOCTYPE_ATTRIBUTE_BARE_CONTEXT: 'tokenizer-context:doctype-attribute-bare',
COMMENT_START_CONTEXT: 'tokenizer-context:comment-start',
COMMENT_CONTENT_CONTEXT: 'tokenizer-context:comment-content',
COMMENT_END_CONTEXT: 'tokenizer-context:comment-end'
}

View File

@@ -1,14 +0,0 @@
module.exports = {
TAG_CONTENT_CONTEXT: 'tree-constructor-context:tag-content',
TAG_CONTEXT: 'tree-constructor-context:tag',
TAG_NAME_CONTEXT: 'tree-constructor-context:tag-name',
ATTRIBUTES_CONTEXT: 'tree-constructor-context:attributes',
ATTRIBUTE_CONTEXT: 'tree-constructor-context:attribute',
ATTRIBUTE_VALUE_CONTEXT: 'tree-constructor-context:attribute-value',
COMMENT_CONTEXT: 'tree-constructor-context:comment',
DOCTYPE_CONTEXT: 'tree-constructor-context:doctype',
DOCTYPE_ATTRIBUTES_CONTEXT: 'tree-constructor-context:doctype-attributes',
DOCTYPE_ATTRIBUTE_CONTEXT: 'tree-constructor-context:doctype-attribute',
SCRIPT_TAG_CONTEXT: 'tree-constructor-context:script-tag',
STYLE_TAG_CONTEXT: 'tree-constructor-context:style-tag'
}

View File

@@ -1,90 +0,0 @@
const tag = require('./tree-constructor-context-handlers/tag')
const tagContent = require('./tree-constructor-context-handlers/tag-content')
const tagName = require('./tree-constructor-context-handlers/tag-name')
const attributes = require('./tree-constructor-context-handlers/attributes')
const attribute = require('./tree-constructor-context-handlers/attribute')
const attributeValue = require('./tree-constructor-context-handlers/attribute-value')
const comment = require('./tree-constructor-context-handlers/comment')
const doctype = require('./tree-constructor-context-handlers/doctype')
const doctypeAttributes = require('./tree-constructor-context-handlers/doctype-attributes')
const doctypeAttribute = require('./tree-constructor-context-handlers/doctype-attribute')
const scriptTag = require('./tree-constructor-context-handlers/script-tag')
const styleTag = require('./tree-constructor-context-handlers/style-tag')
const {
TAG_CONTENT_CONTEXT,
TAG_CONTEXT,
TAG_NAME_CONTEXT,
ATTRIBUTES_CONTEXT,
ATTRIBUTE_CONTEXT,
ATTRIBUTE_VALUE_CONTEXT,
COMMENT_CONTEXT,
DOCTYPE_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT,
DOCTYPE_ATTRIBUTE_CONTEXT,
SCRIPT_TAG_CONTEXT,
STYLE_TAG_CONTEXT
} = require('./constants/tree-constructor-contexts')
const { NODE_DOCUMENT } = require('./constants/ast-nodes')
const contextsMap = {
[TAG_CONTENT_CONTEXT]: tagContent,
[TAG_CONTEXT]: tag,
[TAG_NAME_CONTEXT]: tagName,
[ATTRIBUTES_CONTEXT]: attributes,
[ATTRIBUTE_CONTEXT]: attribute,
[ATTRIBUTE_VALUE_CONTEXT]: attributeValue,
[COMMENT_CONTEXT]: comment,
[DOCTYPE_CONTEXT]: doctype,
[DOCTYPE_ATTRIBUTES_CONTEXT]: doctypeAttributes,
[DOCTYPE_ATTRIBUTE_CONTEXT]: doctypeAttribute,
[SCRIPT_TAG_CONTEXT]: scriptTag,
[STYLE_TAG_CONTEXT]: styleTag
}
function processTokens (tokens, state, positionOffset) {
let tokenIndex = state.caretPosition - positionOffset
while (tokenIndex < tokens.length) {
const token = tokens[tokenIndex]
const contextHandler = contextsMap[state.currentContext.type]
state = contextHandler(token, state)
tokenIndex = state.caretPosition - positionOffset
}
return state
}
module.exports = function constructTree (
tokens = [],
existingState
) {
let state = existingState
if (existingState === undefined) {
const rootContext = {
type: TAG_CONTENT_CONTEXT,
parentRef: undefined,
content: []
}
const rootNode = {
nodeType: NODE_DOCUMENT,
parentRef: undefined,
content: {}
}
state = {
caretPosition: 0,
currentContext: rootContext,
currentNode: rootNode,
rootNode
}
}
const positionOffset = state.caretPosition
processTokens(tokens, state, positionOffset)
return { state, ast: state.rootNode }
}

View File

@@ -1,90 +0,0 @@
const OPEN_TAG_NAME_PATTERN = /^<(\S+)/
const CLOSE_TAG_NAME_PATTERN = /^<\/((?:.|\n)*)>$/
function prettyJSON (obj) {
return JSON.stringify(obj, null, 2)
}
/**
* Clear tree of nodes from everything
* "parentRef" properties so the tree
* can be easily stringified into JSON.
*/
function clearAst (ast) {
const cleanAst = ast
delete cleanAst.parentRef
if (Array.isArray(ast.content.children)) {
cleanAst.content.children = ast.content.children.map((node) => {
return clearAst(node)
})
}
return cleanAst
}
function parseOpenTagName (openTagStartTokenContent) {
const match = openTagStartTokenContent.match(OPEN_TAG_NAME_PATTERN)
if (match === null) {
throw new Error(
'Unable to parse open tag name.\n' +
`${ openTagStartTokenContent } does not match pattern of opening tag.`
)
}
return match[1].toLowerCase()
}
function parseCloseTagName (closeTagTokenContent) {
const match = closeTagTokenContent.match(CLOSE_TAG_NAME_PATTERN)
if (match === null) {
throw new Error(
'Unable to parse close tag name.\n' +
`${ closeTagTokenContent } does not match pattern of closing tag.`
)
}
return match[1].trim().toLowerCase()
}
function calculateTokenCharactersRange (state, { keepBuffer }) {
if (keepBuffer === undefined) {
throw new Error(
'Unable to calculate characters range for token.\n' +
'"keepBuffer" parameter is not specified to decide if ' +
'the decision buffer is a part of characters range.'
)
}
const startPosition = (
state.caretPosition -
(state.accumulatedContent.length - 1) -
state.decisionBuffer.length
)
let endPosition
if (!keepBuffer) {
endPosition = state.caretPosition - state.decisionBuffer.length
} else {
endPosition = state.caretPosition
}
return { startPosition, endPosition }
}
function isWhitespace (char) {
return char === ' ' || char === '\n' || char === '\t'
}
module.exports = {
prettyJSON,
clearAst,
parseOpenTagName,
parseCloseTagName,
calculateTokenCharactersRange,
isWhitespace
}

View File

@@ -1,51 +0,0 @@
const { Transform } = require('stream')
const tokenize = require('./tokenize')
class StreamTokenizer extends Transform {
constructor (options) {
super(Object.assign(
{},
options,
{
decodeStrings: false,
readableObjectMode: true
}
))
this.currentTokenizerState = undefined
this.setDefaultEncoding('utf8')
}
_transform (chunk, encoding, callback) {
let chunkString = chunk
if (Buffer.isBuffer(chunk)) {
chunkString = chunk.toString()
}
const { state, tokens } = tokenize(
chunkString,
this.currentTokenizerState,
{ isFinalChunk: false }
)
this.currentTokenizerState = state
callback(null, tokens)
}
_flush (callback) {
const tokenizeResults = tokenize(
'',
this.currentTokenizerState,
{ isFinalChunk: true }
)
this.push(tokenizeResults.tokens)
callback()
}
}
module.exports = StreamTokenizer

View File

@@ -1,31 +0,0 @@
const { Transform } = require('stream')
const constructTree = require('./construct-tree')
class StreamTreeConstructor extends Transform {
constructor (options) {
super(Object.assign(
{},
options,
{
objectMode: true,
readableObjectMode: true
}
))
this.currentState = undefined
}
_transform (tokensChunk, encoding, callback) {
const { state, ast } = constructTree(
tokensChunk,
this.currentState
)
this.currentState = state
callback(null, ast)
}
}
module.exports = StreamTreeConstructor

View File

@@ -1,125 +0,0 @@
const dataContext = require('./tokenizer-context-handlers/data')
const openTagStartContext = require('./tokenizer-context-handlers/open-tag-start')
const closeTagContext = require('./tokenizer-context-handlers/close-tag')
const openTagEndContext = require('./tokenizer-context-handlers/open-tag-end')
const attributesContext = require('./tokenizer-context-handlers/attributes')
const attributeKeyContext = require('./tokenizer-context-handlers/attribute-key')
const attributeValueContext = require('./tokenizer-context-handlers/attribute-value')
const attributeValueBareContext = require('./tokenizer-context-handlers/attribute-value-bare')
const attributeValueWrappedContext = require('./tokenizer-context-handlers/attribute-value-wrapped')
const scriptContentContext = require('./tokenizer-context-handlers/script-tag-content')
const styleContentContext = require('./tokenizer-context-handlers/style-tag-content')
const doctypeStartContext = require('./tokenizer-context-handlers/doctype-start')
const doctypeEndContextFactory = require('./tokenizer-context-handlers/doctype-end')
const doctypeAttributesContext = require('./tokenizer-context-handlers/doctype-attributes')
const doctypeAttributeWrappedContext = require('./tokenizer-context-handlers/doctype-attribute-wrapped')
const doctypeAttributeBareEndContext = require('./tokenizer-context-handlers/doctype-attribute-bare')
const commentContentContext = require('./tokenizer-context-handlers/comment-content')
const {
DATA_CONTEXT,
OPEN_TAG_START_CONTEXT,
CLOSE_TAG_CONTEXT,
ATTRIBUTES_CONTEXT,
OPEN_TAG_END_CONTEXT,
ATTRIBUTE_KEY_CONTEXT,
ATTRIBUTE_VALUE_CONTEXT,
ATTRIBUTE_VALUE_BARE_CONTEXT,
ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
SCRIPT_CONTENT_CONTEXT,
STYLE_CONTENT_CONTEXT,
DOCTYPE_START_CONTEXT,
DOCTYPE_END_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT,
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
COMMENT_CONTENT_CONTEXT,
} = require('./constants/tokenizer-contexts')
const contextHandlersMap = {
[DATA_CONTEXT]: dataContext,
[OPEN_TAG_START_CONTEXT]: openTagStartContext,
[CLOSE_TAG_CONTEXT]: closeTagContext,
[ATTRIBUTES_CONTEXT]: attributesContext,
[OPEN_TAG_END_CONTEXT]: openTagEndContext,
[ATTRIBUTE_KEY_CONTEXT]: attributeKeyContext,
[ATTRIBUTE_VALUE_CONTEXT]: attributeValueContext,
[ATTRIBUTE_VALUE_BARE_CONTEXT]: attributeValueBareContext,
[ATTRIBUTE_VALUE_WRAPPED_CONTEXT]: attributeValueWrappedContext,
[SCRIPT_CONTENT_CONTEXT]: scriptContentContext,
[STYLE_CONTENT_CONTEXT]: styleContentContext,
[DOCTYPE_START_CONTEXT]: doctypeStartContext,
[DOCTYPE_END_CONTEXT]: doctypeEndContextFactory,
[DOCTYPE_ATTRIBUTES_CONTEXT]: doctypeAttributesContext,
[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT]: doctypeAttributeWrappedContext,
[DOCTYPE_ATTRIBUTE_BARE_CONTEXT]: doctypeAttributeBareEndContext,
[COMMENT_CONTENT_CONTEXT]: commentContentContext
}
function tokenizeChars (
chars,
state,
tokens,
{ isFinalChunk, positionOffset }
) {
let charIndex = state.caretPosition - positionOffset
while (charIndex < chars.length) {
const context = contextHandlersMap[state.currentContext]
state.decisionBuffer += chars[charIndex]
context.parseSyntax(state.decisionBuffer, state, tokens)
charIndex = state.caretPosition - positionOffset
}
if (isFinalChunk) {
const context = contextHandlersMap[state.currentContext]
// Move the caret back, as at this point
// it in the position outside of chars array,
// and it should not be taken into account
// when calculating characters range
state.caretPosition--
if (context.handleContentEnd !== undefined) {
context.handleContentEnd(state, tokens)
}
}
}
function tokenize (
content = '',
existingState,
{ isFinalChunk } = {}
) {
isFinalChunk = isFinalChunk === undefined ? true : isFinalChunk
let state
if (existingState !== undefined) {
state = Object.assign({}, existingState)
} else {
state = {
currentContext: DATA_CONTEXT,
contextParams: {},
decisionBuffer: '',
accumulatedContent: '',
caretPosition: 0
}
}
const chars = state.decisionBuffer + content
const tokens = []
const positionOffset = state.caretPosition - state.decisionBuffer.length
tokenizeChars(chars, state, tokens, {
isFinalChunk,
positionOffset
})
return { state, tokens }
}
module.exports = tokenize

View File

@@ -1,44 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const { TOKEN_ATTRIBUTE_KEY } = require('../constants/token-types')
const { ATTRIBUTES_CONTEXT } = require('../constants/tokenizer-contexts')
function keyEnd (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_ATTRIBUTE_KEY,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
}
function isKeyBreak (chars) {
return (
chars === '='
|| chars === ' '
|| chars === '\n'
|| chars === '\t'
|| chars === '/'
|| chars === '>'
)
}
function parseSyntax (chars, state, tokens) {
if (isKeyBreak(chars)) {
return keyEnd(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,40 +0,0 @@
const {
calculateTokenCharactersRange,
isWhitespace
} = require('../helpers')
const { TOKEN_ATTRIBUTE_VALUE } = require('../constants/token-types')
const { ATTRIBUTES_CONTEXT } = require('../constants/tokenizer-contexts')
function valueEnd (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (
isWhitespace(chars)
|| chars === '>'
|| chars === '/'
) {
return valueEnd(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,53 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_ATTRIBUTE_VALUE,
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END
} = require('../constants/token-types')
const {
ATTRIBUTES_CONTEXT,
ATTRIBUTE_VALUE_WRAPPED_CONTEXT
} = require('../constants/tokenizer-contexts')
function wrapper (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
const endWrapperPosition = range.endPosition + 1
tokens.push(
{
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
},
{
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: endWrapperPosition,
endPosition: endWrapperPosition
}
)
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
state.caretPosition++
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = undefined
}
function parseSyntax (chars, state, tokens) {
const wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper
if (chars === wrapperChar) {
return wrapper(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,60 +0,0 @@
const { isWhitespace } = require('../helpers')
const {
ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
ATTRIBUTES_CONTEXT,
ATTRIBUTE_VALUE_BARE_CONTEXT
} = require('../constants/tokenizer-contexts')
const {
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START
} = require('../constants/token-types')
function wrapper (state, tokens) {
const wrapper = state.decisionBuffer
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
content: wrapper,
startPosition: state.caretPosition,
endPosition: state.caretPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTE_VALUE_WRAPPED_CONTEXT
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = { wrapper }
state.caretPosition++
}
function bare (state) {
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = ATTRIBUTE_VALUE_BARE_CONTEXT
state.caretPosition++
}
function tagEnd (state) {
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (chars === '"' || chars === '\'') {
return wrapper(state, tokens)
}
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens)
}
if (!isWhitespace(chars)) {
return bare(state, tokens)
}
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,66 +0,0 @@
const {
isWhitespace,
calculateTokenCharactersRange
} = require('../helpers')
const {
ATTRIBUTES_CONTEXT,
OPEN_TAG_END_CONTEXT,
ATTRIBUTE_VALUE_CONTEXT,
ATTRIBUTE_KEY_CONTEXT
} = require('../constants/tokenizer-contexts')
const { TOKEN_ATTRIBUTE_ASSIGNMENT } = require('../constants/token-types')
function tagEnd (state) {
const tagName = state.contextParams[ATTRIBUTES_CONTEXT].tagName
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = OPEN_TAG_END_CONTEXT
state.contextParams[OPEN_TAG_END_CONTEXT] = { tagName }
state.contextParams[ATTRIBUTES_CONTEXT] = undefined
}
function noneWhitespace (state) {
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = ATTRIBUTE_KEY_CONTEXT
state.caretPosition++
}
function equal (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: true })
tokens.push({
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTE_VALUE_CONTEXT
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens)
}
if (chars === '=') {
return equal(state, tokens)
}
if (!isWhitespace(chars)) {
return noneWhitespace(state, tokens)
}
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,34 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const { TOKEN_CLOSE_TAG } = require('../constants/token-types')
const { DATA_CONTEXT } = require('../constants/tokenizer-contexts')
function closingCornerBrace (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: true })
tokens.push({
type: TOKEN_CLOSE_TAG,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
if (chars === '>') {
return closingCornerBrace(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,58 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_COMMENT_END,
TOKEN_COMMENT_CONTENT
} = require('../constants/token-types')
const {
DATA_CONTEXT
} = require('../constants/tokenizer-contexts')
const COMMENT_END = '-->'
function commentEnd (state, tokens) {
const contentRange = calculateTokenCharactersRange(state, { keepBuffer: false })
const commentEndRange = {
startPosition: contentRange.endPosition + 1,
endPosition: contentRange.endPosition + COMMENT_END.length,
}
tokens.push({
type: TOKEN_COMMENT_CONTENT,
content: state.accumulatedContent,
startPosition: contentRange.startPosition,
endPosition: contentRange.endPosition
})
tokens.push({
type: TOKEN_COMMENT_END,
content: state.decisionBuffer,
startPosition: commentEndRange.startPosition,
endPosition: commentEndRange.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
if (chars === '-' || chars === '--') {
state.caretPosition++
return
}
if (chars === COMMENT_END) {
return commentEnd(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,155 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_TEXT,
TOKEN_COMMENT_START
} = require('../constants/token-types')
const {
OPEN_TAG_START_CONTEXT,
CLOSE_TAG_CONTEXT,
DOCTYPE_START_CONTEXT,
COMMENT_CONTENT_CONTEXT
} = require('../constants/tokenizer-contexts')
const COMMENT_START = '<!--'
function generateTextToken (state) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
return {
type: TOKEN_TEXT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
}
}
function openingCornerBraceWithText (state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state))
}
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = OPEN_TAG_START_CONTEXT
state.caretPosition++
}
function openingCornerBraceWithSlash (state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state))
}
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = CLOSE_TAG_CONTEXT
state.caretPosition++
}
function doctypeStart (state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state))
}
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = DOCTYPE_START_CONTEXT
state.caretPosition++
}
function commentStart (state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state))
}
const commentStartRange = {
startPosition: state.caretPosition - (COMMENT_START.length - 1),
endPosition: state.caretPosition
}
tokens.push({
type: TOKEN_COMMENT_START,
content: state.decisionBuffer,
startPosition: commentStartRange.startPosition,
endPosition: commentStartRange.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = COMMENT_CONTENT_CONTEXT
state.caretPosition++
}
function handleContentEnd (state, tokens) {
const textContent = state.accumulatedContent + state.decisionBuffer
if (textContent.length !== 0) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_TEXT,
content: textContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
}
}
function isIncompleteDoctype (chars) {
const charsUpperCase = chars.toUpperCase()
return (
charsUpperCase === '<!'
|| charsUpperCase === '<!D'
|| charsUpperCase === '<!DO'
|| charsUpperCase === '<!DOC'
|| charsUpperCase === '<!DOCT'
|| charsUpperCase === '<!DOCTY'
|| charsUpperCase === '<!DOCTYP'
)
}
const OPEN_TAG_START_PATTERN = /^<\w/
function parseSyntax (chars, state, tokens) {
if (OPEN_TAG_START_PATTERN.test(chars)) {
return openingCornerBraceWithText(state, tokens)
}
if (chars === '</') {
return openingCornerBraceWithSlash(state, tokens)
}
if (
chars === '<'
|| chars === '<!'
|| chars === '<!-'
) {
state.caretPosition++
return
}
if (chars === COMMENT_START) {
return commentStart(state, tokens)
}
if (isIncompleteDoctype(chars)) {
state.caretPosition++
return
}
if (chars.toUpperCase() === '<!DOCTYPE') {
return doctypeStart(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax,
handleContentEnd
}

View File

@@ -1,37 +0,0 @@
const { isWhitespace, calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_DOCTYPE_ATTRIBUTE
} = require('../constants/token-types')
const {
DOCTYPE_ATTRIBUTES_CONTEXT
} = require('../constants/tokenizer-contexts')
function attributeEnd (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (isWhitespace(chars) || chars === '>') {
return attributeEnd(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,52 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
TOKEN_DOCTYPE_ATTRIBUTE
} = require('../constants/token-types')
const {
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT
} = require('../constants/tokenizer-contexts')
function wrapper (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
const endWrapperPosition = range.endPosition + 1
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: endWrapperPosition,
endPosition: endWrapperPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT
state.caretPosition++
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = undefined
}
function parseSyntax (chars, state, tokens) {
const wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper
if (chars === wrapperChar) {
return wrapper(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,61 +0,0 @@
const { isWhitespace } = require('../helpers')
const {
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
DOCTYPE_END_CONTEXT
} = require('../constants/tokenizer-contexts')
const {
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START
} = require('../constants/token-types')
function wrapper (state, tokens) {
const wrapper = state.decisionBuffer
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
content: wrapper,
startPosition: state.caretPosition,
endPosition: state.caretPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = { wrapper }
state.caretPosition++
}
function bare (state) {
state.accumulatedContent = state.decisionBuffer
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTE_BARE_CONTEXT
state.caretPosition++
}
function closingCornerBrace (state) {
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_END_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (chars === '"' || chars === '\'') {
return wrapper(state, tokens)
}
if (chars === '>') {
return closingCornerBrace(state, tokens)
}
if (!isWhitespace(chars)) {
return bare(state, tokens)
}
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,28 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const { TOKEN_DOCTYPE_END } = require('../constants/token-types')
const { DATA_CONTEXT } = require('../constants/tokenizer-contexts')
function closingCornerBrace (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: true })
tokens.push({
type: TOKEN_DOCTYPE_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
return closingCornerBrace(state, tokens)
}
module.exports = {
parseSyntax
}

View File

@@ -1,54 +0,0 @@
const { isWhitespace, calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_DOCTYPE_START
} = require('../constants/token-types')
const {
DOCTYPE_END_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT
} = require('../constants/tokenizer-contexts')
function generateDoctypeStartToken (state) {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
return {
type: TOKEN_DOCTYPE_START,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
}
}
function closingCornerBrace (state, tokens) {
tokens.push(generateDoctypeStartToken(state))
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_END_CONTEXT
}
function whitespace (state, tokens) {
tokens.push(generateDoctypeStartToken(state))
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT
}
function parseSyntax (chars, state, tokens) {
if (isWhitespace(chars)) {
return whitespace(state, tokens)
}
if (chars === '>') {
return closingCornerBrace(state, tokens)
}
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,58 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE
} = require('../constants/token-types')
const {
OPEN_TAG_END_CONTEXT,
DATA_CONTEXT,
SCRIPT_CONTENT_CONTEXT,
STYLE_CONTENT_CONTEXT
} = require('../constants/tokenizer-contexts')
const tokensMap = {
'script': TOKEN_OPEN_TAG_END_SCRIPT,
'style': TOKEN_OPEN_TAG_END_STYLE,
'default': TOKEN_OPEN_TAG_END
}
const contextsMap = {
'script': SCRIPT_CONTENT_CONTEXT,
'style': STYLE_CONTENT_CONTEXT,
'default': DATA_CONTEXT
}
function closingCornerBrace (state, tokens) {
const range = calculateTokenCharactersRange(state, { keepBuffer: true })
const tagName = state.contextParams[OPEN_TAG_END_CONTEXT].tagName
tokens.push({
type: tokensMap[tagName] || tokensMap.default,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = contextsMap[tagName] || contextsMap.default
state.caretPosition++
state.contextParams[OPEN_TAG_END_CONTEXT] = undefined
}
function parseSyntax (chars, state, tokens) {
if (chars === '>') {
return closingCornerBrace(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,74 +0,0 @@
const {
parseOpenTagName,
isWhitespace,
calculateTokenCharactersRange
} = require('../helpers')
const {
TOKEN_OPEN_TAG_START,
TOKEN_OPEN_TAG_START_SCRIPT,
TOKEN_OPEN_TAG_START_STYLE
} = require('../constants/token-types')
const {
OPEN_TAG_END_CONTEXT,
ATTRIBUTES_CONTEXT
} = require('../constants/tokenizer-contexts')
const tokensMap = {
'script': TOKEN_OPEN_TAG_START_SCRIPT,
'style': TOKEN_OPEN_TAG_START_STYLE,
'default': TOKEN_OPEN_TAG_START
}
function tagEnd (state, tokens) {
const tagName = parseOpenTagName(state.accumulatedContent)
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: tokensMap[tagName] || tokensMap.default,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.decisionBuffer = ''
state.accumulatedContent = ''
state.currentContext = OPEN_TAG_END_CONTEXT
state.contextParams[OPEN_TAG_END_CONTEXT] = { tagName }
}
function whitespace (state, tokens) {
const tagName = parseOpenTagName(state.accumulatedContent)
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: tokensMap[tagName] || tokensMap.default,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = ATTRIBUTES_CONTEXT
state.contextParams[ATTRIBUTES_CONTEXT] = { tagName }
state.caretPosition++
}
function parseSyntax (chars, state, tokens) {
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens)
}
if (isWhitespace(chars)) {
return whitespace(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,59 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_SCRIPT_TAG_CONTENT,
TOKEN_CLOSE_TAG_SCRIPT
} = require('../constants/token-types')
const { DATA_CONTEXT } = require('../constants/tokenizer-contexts')
function closingScriptTag (state, tokens) {
if (state.accumulatedContent !== '') {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_SCRIPT_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
}
tokens.push({
type: TOKEN_CLOSE_TAG_SCRIPT,
content: state.decisionBuffer,
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
endPosition: state.caretPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
const INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/
const CLOSING_SCRIPT_TAG_PATTERN = /<\/script\s*>/i
function parseSyntax (chars, state, tokens) {
if (
chars === '<' ||
chars === '</' ||
INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)
) {
state.caretPosition++
return
}
if (CLOSING_SCRIPT_TAG_PATTERN.test(chars)) {
return closingScriptTag(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,59 +0,0 @@
const { calculateTokenCharactersRange } = require('../helpers')
const {
TOKEN_STYLE_TAG_CONTENT,
TOKEN_CLOSE_TAG_STYLE
} = require('../constants/token-types')
const { DATA_CONTEXT } = require('../constants/tokenizer-contexts')
function closingStyleTag (state, tokens) {
if (state.accumulatedContent !== '') {
const range = calculateTokenCharactersRange(state, { keepBuffer: false })
tokens.push({
type: TOKEN_STYLE_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
})
}
tokens.push({
type: TOKEN_CLOSE_TAG_STYLE,
content: state.decisionBuffer,
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
endPosition: state.caretPosition
})
state.accumulatedContent = ''
state.decisionBuffer = ''
state.currentContext = DATA_CONTEXT
state.caretPosition++
}
const INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/
const CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i
function parseSyntax (chars, state, tokens) {
if (
chars === '<' ||
chars === '</' ||
INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)
) {
state.caretPosition++
return
}
if (CLOSING_STYLE_TAG_PATTERN.test(chars)) {
return closingStyleTag(state, tokens)
}
state.accumulatedContent += state.decisionBuffer
state.decisionBuffer = ''
state.caretPosition++
}
module.exports = {
parseSyntax
}

View File

@@ -1,79 +0,0 @@
const {
TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE,
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT,
TOKEN_ATTRIBUTE_VALUE,
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END
} = require('../constants/token-types')
function getLastAttribute (state) {
const attributes = state.currentNode.content.attributes
return attributes[attributes.length - 1]
}
function handleValueEnd (state) {
state.currentContext = state.currentContext.parentRef
return state
}
function handleAttributeValue (state, token) {
const attribute = getLastAttribute(state)
attribute.value = token
state.caretPosition++
return state
}
function handleAttributeValueWrapperStart (state, token) {
const attribute = getLastAttribute(state)
attribute.startWrapper = token
state.caretPosition++
return state
}
function handleAttributeValueWrapperEnd (state, token) {
const attribute = getLastAttribute(state)
attribute.endWrapper = token
state.caretPosition++
return state
}
module.exports = function attributeValue (token, state) {
const VALUE_END_TOKENS = [
TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE,
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT
]
if (VALUE_END_TOKENS.indexOf(token.type) !== -1) {
return handleValueEnd(state)
}
if (token.type === TOKEN_ATTRIBUTE_VALUE) {
return handleAttributeValue(state, token)
}
if (token.type === TOKEN_ATTRIBUTE_VALUE_WRAPPER_START) {
return handleAttributeValueWrapperStart(state, token)
}
if (token.type === TOKEN_ATTRIBUTE_VALUE_WRAPPER_END) {
return handleAttributeValueWrapperEnd(state, token)
}
state.caretPosition++
return state
}

View File

@@ -1,79 +0,0 @@
const {
TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE,
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT
} = require('../constants/token-types')
const {
ATTRIBUTE_VALUE_CONTEXT
} = require('../constants/tree-constructor-contexts')
function getLastAttribute (state) {
const attributes = state.currentNode.content.attributes
return attributes[attributes.length - 1]
}
function handleOpenTagEnd (state) {
state.currentContext = state.currentContext.parentRef
return state
}
function handleAttributeKey (state, token) {
const attribute = getLastAttribute(state)
if (attribute.key !== undefined || attribute.value !== undefined) {
state.currentContext = state.currentContext.parentRef
return state
}
attribute.key = token
state.caretPosition++
return state
}
function handleAttributeAssignment (state) {
const attribute = getLastAttribute(state)
if (attribute.value !== undefined) {
state.currentContext = state.currentContext.parentRef
return state
}
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTE_VALUE_CONTEXT
}
state.caretPosition++
return state
}
module.exports = function attribute (token, state) {
const OPEN_TAG_END_TOKENS = [
TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE
]
if (OPEN_TAG_END_TOKENS.indexOf(token.type) !== -1) {
return handleOpenTagEnd(state)
}
if (token.type === TOKEN_ATTRIBUTE_KEY) {
return handleAttributeKey(state, token)
}
if (token.type === TOKEN_ATTRIBUTE_ASSIGNMENT) {
return handleAttributeAssignment(state)
}
state.caretPosition++
return state
}

View File

@@ -1,57 +0,0 @@
const {
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT,
TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE
} = require('../constants/token-types')
const {
ATTRIBUTE_CONTEXT
} = require('../constants/tree-constructor-contexts')
function handlerAttributeStart (state) {
if (state.currentNode.content.attributes === undefined) {
state.currentNode.content.attributes = []
}
// new empty attribute
state.currentNode.content.attributes.push({})
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTE_CONTEXT
}
return state
}
function handleOpenTagEnd (state) {
state.currentContext = state.currentContext.parentRef
return state
}
module.exports = function attributes (token, state) {
const ATTRIBUTE_START_TOKENS = [
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT
]
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handlerAttributeStart(state)
}
const ATTRIBUTES_END_TOKENS = [
TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE
]
if (ATTRIBUTES_END_TOKENS.indexOf(token.type) !== -1) {
return handleOpenTagEnd(state)
}
state.caretPosition++
return state
}

View File

@@ -1,46 +0,0 @@
const {
TOKEN_COMMENT_START,
TOKEN_COMMENT_END,
TOKEN_COMMENT_CONTENT
} = require('../constants/token-types')
function handleCommentStart (state, token) {
state.currentNode.content.start = token
state.caretPosition++
return state
}
function handleCommentContent (state, token) {
state.currentNode.content.value = token
state.caretPosition++
return state
}
function handleCommentEnd (state, token) {
state.currentNode.content.end = token
state.currentNode = state.currentNode.parentRef
state.currentContext = state.currentContext.parentRef
state.caretPosition++
return state
}
module.exports = function comment (token, state) {
if (token.type === TOKEN_COMMENT_START) {
return handleCommentStart(state, token)
}
if (token.type === TOKEN_COMMENT_CONTENT) {
return handleCommentContent(state, token)
}
if (token.type === TOKEN_COMMENT_END) {
return handleCommentEnd(state, token)
}
state.caretPosition++
return state
}

View File

@@ -1,80 +0,0 @@
const {
TOKEN_DOCTYPE_END,
TOKEN_DOCTYPE_ATTRIBUTE,
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END
} = require('../constants/token-types')
function getLastAttribute (state) {
const attributes = state.currentNode.content.attributes
return attributes[attributes.length - 1]
}
function handleDoctypeEnd (state) {
state.currentContext = state.currentContext.parentRef
return state
}
function handleAttributeValue (state, token) {
const attribute = getLastAttribute(state)
if (attribute.value !== undefined) {
state.currentContext = state.currentContext.parentRef
return state
}
attribute.value = token
state.caretPosition++
return state
}
function handleAttributeWrapperStart (state, token) {
const attribute = getLastAttribute(state)
if (attribute.start !== undefined || attribute.value !== undefined) {
state.currentContext = state.currentContext.parentRef
return state
}
attribute.startWrapper = token
state.caretPosition++
return state
}
function handleAttributeWrapperEnd (state, token) {
const attribute = getLastAttribute(state)
attribute.endWrapper = token
state.currentContext = state.currentContext.parentRef
state.caretPosition++
return state
}
module.exports = function doctypeAttribute (token, state) {
if (token.type === TOKEN_DOCTYPE_END) {
return handleDoctypeEnd(state, token)
}
if (token.type === TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START) {
return handleAttributeWrapperStart(state, token)
}
if (token.type === TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END) {
return handleAttributeWrapperEnd(state, token)
}
if (token.type === TOKEN_DOCTYPE_ATTRIBUTE) {
return handleAttributeValue(state, token)
}
state.caretPosition++
return state
}

View File

@@ -1,50 +0,0 @@
const {
DOCTYPE_ATTRIBUTE_CONTEXT
} = require('../constants/tree-constructor-contexts')
const {
TOKEN_DOCTYPE_END,
TOKEN_DOCTYPE_ATTRIBUTE,
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START
} = require('../constants/token-types')
function handleDoctypeEnd (state) {
state.currentContext = state.currentContext.parentRef
return state
}
function handleAttribute (state) {
if (state.currentNode.content.attributes === undefined) {
state.currentNode.content.attributes = []
}
// new empty attribute
state.currentNode.content.attributes.push({})
state.currentContext = {
type: DOCTYPE_ATTRIBUTE_CONTEXT,
parentRef: state.currentContext
}
return state
}
module.exports = function doctypeAttributes (token, state) {
if (token.type === TOKEN_DOCTYPE_END) {
return handleDoctypeEnd(state, token)
}
const ATTRIBUTE_START_TOKENS = [
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
TOKEN_DOCTYPE_ATTRIBUTE
]
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handleAttribute(state, token)
}
state.caretPosition++
return state
}

View File

@@ -1,57 +0,0 @@
const {
TOKEN_DOCTYPE_END,
TOKEN_DOCTYPE_ATTRIBUTE,
TOKEN_DOCTYPE_START,
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START
} = require('../constants/token-types')
const {
DOCTYPE_ATTRIBUTES_CONTEXT
} = require('../constants/tree-constructor-contexts')
function handleDoctypeStart (state, token) {
state.currentNode.content.start = token
state.caretPosition++
return state
}
function handleDoctypeEnd (state, token) {
state.currentNode.content.end = token
state.currentNode = state.currentNode.parentRef
state.currentContext = state.currentContext.parentRef
state.caretPosition++
return state
}
function handleDoctypeAttributes (state) {
state.currentContext = {
parentRef: state.currentContext,
type: DOCTYPE_ATTRIBUTES_CONTEXT
}
return state
}
module.exports = function doctype (token, state) {
if (token.type === TOKEN_DOCTYPE_START) {
return handleDoctypeStart(state, token)
}
if (token.type === TOKEN_DOCTYPE_END) {
return handleDoctypeEnd(state, token)
}
const ATTRIBUTES_START_TOKENS = [
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
TOKEN_DOCTYPE_ATTRIBUTE
]
if (ATTRIBUTES_START_TOKENS.indexOf(token.type) !== -1) {
return handleDoctypeAttributes(state, token)
}
state.caretPosition++
return state
}

View File

@@ -1,80 +0,0 @@
const {
TOKEN_OPEN_TAG_START_SCRIPT,
TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_CLOSE_TAG_SCRIPT,
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT,
TOKEN_SCRIPT_TAG_CONTENT
} = require('../constants/token-types')
const { ATTRIBUTES_CONTEXT } = require('../constants/tree-constructor-contexts')
function handleOpenTagStartScript (state, token) {
state.currentNode.content.openStart = token
state.caretPosition++
return state
}
function handleAttributeStartScript (state) {
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTES_CONTEXT
}
return state
}
function handleOpenTagEndScript (state, token) {
state.currentNode.content.openEnd = token
state.caretPosition++
return state
}
function handleScriptContent (state, token) {
state.currentNode.content.value = token
state.caretPosition++
return state
}
function handleCloseTagScript (state, token) {
state.currentNode.content.close = token
state.currentNode = state.currentNode.parentRef
state.currentContext = state.currentContext.parentRef
state.caretPosition++
return state
}
module.exports = function scriptTag (token, state) {
if (token.type === TOKEN_OPEN_TAG_START_SCRIPT) {
return handleOpenTagStartScript(state, token)
}
const ATTRIBUTE_START_TOKENS = [
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT
]
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handleAttributeStartScript(state)
}
if (token.type === TOKEN_OPEN_TAG_END_SCRIPT) {
return handleOpenTagEndScript(state, token)
}
if (token.type === TOKEN_SCRIPT_TAG_CONTENT) {
return handleScriptContent(state, token)
}
if (token.type === TOKEN_CLOSE_TAG_SCRIPT) {
return handleCloseTagScript(state, token)
}
state.caretPosition++
return state
}

View File

@@ -1,79 +0,0 @@
const {
TOKEN_OPEN_TAG_START_STYLE,
TOKEN_OPEN_TAG_END_STYLE,
TOKEN_CLOSE_TAG_STYLE,
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT,
TOKEN_STYLE_TAG_CONTENT
} = require('../constants/token-types')
const { ATTRIBUTES_CONTEXT } = require('../constants/tree-constructor-contexts')
function handleOpenTagStartStyle (state, token) {
state.currentNode.content.openStart = token
state.caretPosition++
return state
}
function handleAttributeStartStyle (state) {
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTES_CONTEXT
}
return state
}
function handleOpenTagEndStyle (state, token) {
state.currentNode.content.openEnd = token
state.caretPosition++
return state
}
function handleStyleContent (state, token) {
state.currentNode.content.value = token
state.caretPosition++
return state
}
function handleCloseTagStyle (state, token) {
state.currentNode.content.close = token
state.currentNode = state.currentNode.parentRef
state.currentContext = state.currentContext.parentRef
state.caretPosition++
return state
}
module.exports = function styleTag (token, state) {
if (token.type === TOKEN_OPEN_TAG_START_STYLE) {
return handleOpenTagStartStyle(state, token)
}
const ATTRIBUTE_START_TOKENS = [
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT
]
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handleAttributeStartStyle(state)
}
if (token.type === TOKEN_OPEN_TAG_END_STYLE) {
return handleOpenTagEndStyle(state, token)
}
if (token.type === TOKEN_STYLE_TAG_CONTENT) {
return handleStyleContent(state, token)
}
if (token.type === TOKEN_CLOSE_TAG_STYLE) {
return handleCloseTagStyle(state, token)
}
state.caretPosition++
return state
}

View File

@@ -1,203 +0,0 @@
const parseCloseTagName = require('../helpers').parseCloseTagName
const {
TOKEN_OPEN_TAG_START,
TOKEN_CLOSE_TAG,
TOKEN_COMMENT_START,
TOKEN_DOCTYPE_START,
TOKEN_TEXT,
TOKEN_OPEN_TAG_START_SCRIPT,
TOKEN_OPEN_TAG_START_STYLE
} = require('../constants/token-types')
const {
TAG_CONTEXT,
COMMENT_CONTEXT,
DOCTYPE_CONTEXT,
SCRIPT_TAG_CONTEXT,
STYLE_TAG_CONTEXT
} = require('../constants/tree-constructor-contexts')
const {
NODE_TAG,
NODE_TEXT,
NODE_DOCTYPE,
NODE_COMMENT,
NODE_SCRIPT,
NODE_STYLE
} = require('../constants/ast-nodes')
function handleOpenTagStart (state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = []
}
const tagNode = {
nodeType: NODE_TAG,
parentRef: state.currentNode,
content: {}
}
state.currentNode.content.children.push(tagNode)
state.currentNode = tagNode
state.currentContext = {
parentRef: state.currentContext,
type: TAG_CONTEXT
}
return state
}
function handleCloseTag (state, token) {
const closeTagName = parseCloseTagName(token.content)
if (closeTagName !== state.currentNode.content.name) {
state.caretPosition++
return state
}
state.currentContext = state.currentContext.parentRef
return state
}
function handleCommentStart (state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = []
}
const commentNode = {
nodeType: NODE_COMMENT,
parentRef: state.currentNode,
content: {}
}
state.currentNode.content.children.push(commentNode)
state.currentNode = commentNode
state.currentContext = {
parentRef: state.currentContext,
type: COMMENT_CONTEXT
}
return state
}
function handleDoctypeStart (state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = []
}
const doctypeNode = {
nodeType: NODE_DOCTYPE,
parentRef: state.currentNode,
content: {}
}
state.currentNode.content.children.push(doctypeNode)
state.currentNode = doctypeNode
state.currentContext = {
parentRef: state.currentContext,
type: DOCTYPE_CONTEXT
}
return state
}
function handleText (state, token) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = []
}
const textNode = {
nodeType: NODE_TEXT,
parentRef: state.currentNode,
content: {
value: token
}
}
state.currentNode.content.children.push(textNode)
state.caretPosition++
return state
}
function handleOpenTagStartScript (state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = []
}
const scriptNode = {
nodeType: NODE_SCRIPT,
parentRef: state.currentNode,
content: {}
}
state.currentNode.content.children.push(scriptNode)
state.currentNode = scriptNode
state.currentContext = {
type: SCRIPT_TAG_CONTEXT,
parentRef: state.currentContext
}
return state
}
function handleOpenTagStartStyle (state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = []
}
const styleNode = {
nodeType: NODE_STYLE,
parentRef: state.currentNode,
content: {}
}
state.currentNode.content.children.push(styleNode)
state.currentNode = styleNode
state.currentContext = {
type: STYLE_TAG_CONTEXT,
parentRef: state.currentContext
}
return state
}
module.exports = function tagContent (token, state) {
if (token.type === TOKEN_OPEN_TAG_START) {
return handleOpenTagStart(state, token)
}
if (token.type === TOKEN_TEXT) {
return handleText(state, token)
}
if (token.type === TOKEN_CLOSE_TAG) {
return handleCloseTag(state, token)
}
if (token.type === TOKEN_COMMENT_START) {
return handleCommentStart(state, token)
}
if (token.type === TOKEN_DOCTYPE_START) {
return handleDoctypeStart(state, token)
}
if (token.type === TOKEN_OPEN_TAG_START_SCRIPT) {
return handleOpenTagStartScript(state, token)
}
if (token.type === TOKEN_OPEN_TAG_START_STYLE) {
return handleOpenTagStartStyle(state, token)
}
state.caretPosition++
return state
}

View File

@@ -1,29 +0,0 @@
/**
* Parser for 'tag-name' context.
* Parses tag name from 'open-tag-start' (<div)
* token and save the tag name as self content.
* Ignores tokens others than 'open-tag-start'.
*/
const parseOpenTagName = require('../helpers').parseOpenTagName
const {
TOKEN_OPEN_TAG_START
} = require('../constants/token-types')
function handleTagOpenStart (state, token) {
state.currentNode.content.name = parseOpenTagName(token.content)
state.currentContext = state.currentContext.parentRef
return state
}
module.exports = function tagName (token, state) {
if (token.type === TOKEN_OPEN_TAG_START) {
handleTagOpenStart(state, token)
}
state.caretPosition++
return state
}

View File

@@ -1,108 +0,0 @@
const {
TOKEN_OPEN_TAG_START,
TOKEN_OPEN_TAG_END,
TOKEN_CLOSE_TAG,
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT
} = require('../constants/token-types')
const {
TAG_NAME_CONTEXT,
ATTRIBUTES_CONTEXT,
TAG_CONTENT_CONTEXT
} = require('../constants/tree-constructor-contexts')
function handleOpenTagStart (state, token) {
state.currentNode.content.openStart = token
state.currentContext = {
parentRef: state.currentContext,
type: TAG_NAME_CONTEXT
}
return state
}
function handleAttributeStart (state) {
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTES_CONTEXT
}
return state
}
function handleOpenTagEnd (state, token) {
const SELF_CLOSING_TAGS = [
'area',
'base',
'br',
'col',
'embed',
'hr',
'img',
'input',
'keygen',
'link',
'meta',
'param',
'source',
'track',
'wbr'
]
const tagName = state.currentNode.content.name
state.currentNode.content.openEnd = token
if (SELF_CLOSING_TAGS.indexOf(tagName) !== -1) {
state.currentNode.content.selfClosing = true
state.currentNode = state.currentNode.parentRef
state.currentContext = state.currentContext.parentRef
state.caretPosition++
return state
}
state.currentNode.content.selfClosing = false
state.currentContext = {
parentRef: state.currentContext,
type: TAG_CONTENT_CONTEXT
}
state.caretPosition++
return state
}
function handleCloseTag (state, token) {
state.currentNode.content.close = token
state.currentNode = state.currentNode.parentRef
state.currentContext = state.currentContext.parentRef
state.caretPosition++
return state
}
module.exports = function tag (token, state) {
if (token.type === TOKEN_OPEN_TAG_START) {
return handleOpenTagStart(state, token)
}
const ATTRIBUTE_START_TOKENS = [
TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT
]
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handleAttributeStart(state)
}
if (token.type === TOKEN_OPEN_TAG_END) {
return handleOpenTagEnd(state, token)
}
if (token.type === TOKEN_CLOSE_TAG) {
return handleCloseTag(state, token)
}
state.caretPosition++
return state
}