permet l'ajout des frameworks et des routes

This commit is contained in:
22107988t
2023-09-25 09:41:55 +02:00
parent 0b9f7d4dfb
commit 361112699c
2787 changed files with 864804 additions and 0 deletions

View File

@@ -0,0 +1,35 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_ASSIGNMENT = _require2.TOKEN_ATTRIBUTE_ASSIGNMENT;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_CONTEXT = _require3.ATTRIBUTE_VALUE_CONTEXT;
function equal(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '=') {
return equal(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,43 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_KEY = _require2.TOKEN_ATTRIBUTE_KEY;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
function keyEnd(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_ATTRIBUTE_KEY,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
}
function isKeyBreak(chars) {
return chars === '=' || chars === ' ' || chars === '\n' || chars === '\t' || chars === '/' || chars === '>';
}
function parseSyntax(chars, state, tokens) {
if (isKeyBreak(chars)) {
return keyEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,40 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange,
isWhitespace = _require.isWhitespace;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
function valueEnd(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (isWhitespace(chars) || chars === '>' || chars === '/') {
return valueEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,40 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
delete state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT];
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,37 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,51 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE,
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT,
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
var endWrapperPosition = range.endPosition + 1;
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
}, {
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: endWrapperPosition,
endPosition: endWrapperPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
state.caretPosition++;
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = undefined;
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,63 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace;
var _require2 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require2.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT,
ATTRIBUTE_VALUE_BARE_CONTEXT = _require2.ATTRIBUTE_VALUE_BARE_CONTEXT;
var _require3 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require3.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START;
function wrapper(state, tokens) {
var wrapper = state.decisionBuffer;
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
content: wrapper,
startPosition: state.caretPosition,
endPosition: state.caretPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = {
wrapper: wrapper
};
state.caretPosition++;
}
function bare(state) {
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_BARE_CONTEXT;
state.caretPosition++;
}
function tagEnd(state) {
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '"' || chars === '\'') {
return wrapper(state, tokens);
}
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens);
}
if (!isWhitespace(chars)) {
return bare(state, tokens);
}
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,69 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/tokenizer-contexts'),
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT,
OPEN_TAG_END_CONTEXT = _require2.OPEN_TAG_END_CONTEXT,
ATTRIBUTE_VALUE_CONTEXT = _require2.ATTRIBUTE_VALUE_CONTEXT,
ATTRIBUTE_KEY_CONTEXT = _require2.ATTRIBUTE_KEY_CONTEXT;
var _require3 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_ASSIGNMENT = _require3.TOKEN_ATTRIBUTE_ASSIGNMENT;
function tagEnd(state) {
var tagName = state.contextParams[ATTRIBUTES_CONTEXT].tagName;
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = OPEN_TAG_END_CONTEXT;
state.contextParams[OPEN_TAG_END_CONTEXT] = {
tagName: tagName
};
state.contextParams[ATTRIBUTES_CONTEXT] = undefined;
}
function noneWhitespace(state) {
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_KEY_CONTEXT;
state.caretPosition++;
}
function equal(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: true
});
tokens.push({
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_CONTEXT;
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens);
}
if (chars === '=') {
return equal(state, tokens);
}
if (!isWhitespace(chars)) {
return noneWhitespace(state, tokens);
}
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,40 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_CLOSE_TAG = _require2.TOKEN_CLOSE_TAG;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function closingCornerBrace(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: true
});
tokens.push({
type: TOKEN_CLOSE_TAG,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>') {
return closingCornerBrace(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,58 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_COMMENT_END = _require2.TOKEN_COMMENT_END,
TOKEN_COMMENT_CONTENT = _require2.TOKEN_COMMENT_CONTENT;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
var COMMENT_END = '-->';
function commentEnd(state, tokens) {
var contentRange = calculateTokenCharactersRange(state, {
keepBuffer: false
});
var commentEndRange = {
startPosition: contentRange.endPosition + 1,
endPosition: contentRange.endPosition + COMMENT_END.length
};
tokens.push({
type: TOKEN_COMMENT_CONTENT,
content: state.accumulatedContent,
startPosition: contentRange.startPosition,
endPosition: contentRange.endPosition
});
tokens.push({
type: TOKEN_COMMENT_END,
content: state.decisionBuffer,
startPosition: commentEndRange.startPosition,
endPosition: commentEndRange.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
if (chars === '-' || chars === '--') {
state.caretPosition++;
return;
}
if (chars === COMMENT_END) {
return commentEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,38 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_COMMENT_END = _require2.TOKEN_COMMENT_END;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function commentEnd(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_COMMENT_END,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>') {
return commentEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,35 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_COMMENT_START = _require2.TOKEN_COMMENT_START;
var _require3 = require('../constants/tokenizer-contexts'),
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT;
function commentStart(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_COMMENT_START,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = COMMENT_CONTENT_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '<!--') {
return commentStart(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,142 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_TEXT = _require2.TOKEN_TEXT,
TOKEN_COMMENT_START = _require2.TOKEN_COMMENT_START;
var _require3 = require('../constants/tokenizer-contexts'),
OPEN_TAG_START_CONTEXT = _require3.OPEN_TAG_START_CONTEXT,
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT,
DOCTYPE_START_CONTEXT = _require3.DOCTYPE_START_CONTEXT,
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT;
var COMMENT_START = '<!--';
function generateTextToken(state) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
return {
type: TOKEN_TEXT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
};
}
function openingCornerBraceWithText(state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = OPEN_TAG_START_CONTEXT;
state.caretPosition++;
}
function openingCornerBraceWithSlash(state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = CLOSE_TAG_CONTEXT;
state.caretPosition++;
}
function doctypeStart(state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = DOCTYPE_START_CONTEXT;
state.caretPosition++;
}
function commentStart(state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
var commentStartRange = {
startPosition: state.caretPosition - (COMMENT_START.length - 1),
endPosition: state.caretPosition
};
tokens.push({
type: TOKEN_COMMENT_START,
content: state.decisionBuffer,
startPosition: commentStartRange.startPosition,
endPosition: commentStartRange.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = COMMENT_CONTENT_CONTEXT;
state.caretPosition++;
}
function handleContentEnd(state, tokens) {
var textContent = state.accumulatedContent + state.decisionBuffer;
if (textContent.length !== 0) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_TEXT,
content: textContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
}
}
function isIncompleteDoctype(chars) {
var charsUpperCase = chars.toUpperCase();
return charsUpperCase === '<!' || charsUpperCase === '<!D' || charsUpperCase === '<!DO' || charsUpperCase === '<!DOC' || charsUpperCase === '<!DOCT' || charsUpperCase === '<!DOCTY' || charsUpperCase === '<!DOCTYP';
}
var OPEN_TAG_START_PATTERN = /^<\w/;
function parseSyntax(chars, state, tokens) {
if (OPEN_TAG_START_PATTERN.test(chars)) {
return openingCornerBraceWithText(state, tokens);
}
if (chars === '</') {
return openingCornerBraceWithSlash(state, tokens);
}
if (chars === '<' || chars === '<!' || chars === '<!-') {
state.caretPosition++;
return;
}
if (chars === COMMENT_START) {
return commentStart(state, tokens);
}
if (isIncompleteDoctype(chars)) {
state.caretPosition++;
return;
}
if (chars.toUpperCase() === '<!DOCTYPE') {
return doctypeStart(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax,
handleContentEnd: handleContentEnd
};

View File

@@ -0,0 +1,40 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
function attributeEnd(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (isWhitespace(chars) || chars === '>') {
return attributeEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,40 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
delete state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT];
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,37 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,52 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
var endWrapperPosition = range.endPosition + 1;
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: endWrapperPosition,
endPosition: endWrapperPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
state.caretPosition++;
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = undefined;
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,63 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace;
var _require2 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
DOCTYPE_END_CONTEXT = _require2.DOCTYPE_END_CONTEXT;
var _require3 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require3.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
function wrapper(state, tokens) {
var wrapper = state.decisionBuffer;
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
content: wrapper,
startPosition: state.caretPosition,
endPosition: state.caretPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = {
wrapper: wrapper
};
state.caretPosition++;
}
function bare(state) {
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTE_BARE_CONTEXT;
state.caretPosition++;
}
function closingCornerBrace(state) {
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_END_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '"' || chars === '\'') {
return wrapper(state, tokens);
}
if (chars === '>') {
return closingCornerBrace(state, tokens);
}
if (!isWhitespace(chars)) {
return bare(state, tokens);
}
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,34 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_END = _require2.TOKEN_DOCTYPE_END;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function closingCornerBrace(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: true
});
tokens.push({
type: TOKEN_DOCTYPE_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
return closingCornerBrace(state, tokens);
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,55 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_START = _require2.TOKEN_DOCTYPE_START;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_END_CONTEXT = _require3.DOCTYPE_END_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
function generateDoctypeStartToken(state) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
return {
type: TOKEN_DOCTYPE_START,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
};
}
function closingCornerBrace(state, tokens) {
tokens.push(generateDoctypeStartToken(state));
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_END_CONTEXT;
}
function whitespace(state, tokens) {
tokens.push(generateDoctypeStartToken(state));
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (isWhitespace(chars)) {
return whitespace(state, tokens);
}
if (chars === '>') {
return closingCornerBrace(state, tokens);
}
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,58 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_OPEN_TAG_END = _require2.TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT = _require2.TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE = _require2.TOKEN_OPEN_TAG_END_STYLE;
var _require3 = require('../constants/tokenizer-contexts'),
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
DATA_CONTEXT = _require3.DATA_CONTEXT,
SCRIPT_CONTENT_CONTEXT = _require3.SCRIPT_CONTENT_CONTEXT,
STYLE_CONTENT_CONTEXT = _require3.STYLE_CONTENT_CONTEXT;
var tokensMap = {
'script': TOKEN_OPEN_TAG_END_SCRIPT,
'style': TOKEN_OPEN_TAG_END_STYLE,
'default': TOKEN_OPEN_TAG_END
};
var contextsMap = {
'script': SCRIPT_CONTENT_CONTEXT,
'style': STYLE_CONTENT_CONTEXT,
'default': DATA_CONTEXT
};
function closingCornerBrace(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: true
});
var tagName = state.contextParams[OPEN_TAG_END_CONTEXT].tagName;
tokens.push({
type: tokensMap[tagName] || tokensMap["default"],
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = contextsMap[tagName] || contextsMap["default"];
state.caretPosition++;
state.contextParams[OPEN_TAG_END_CONTEXT] = undefined;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>') {
return closingCornerBrace(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,78 @@
"use strict";
var _require = require('../helpers'),
parseOpenTagName = _require.parseOpenTagName,
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_OPEN_TAG_START = _require2.TOKEN_OPEN_TAG_START,
TOKEN_OPEN_TAG_START_SCRIPT = _require2.TOKEN_OPEN_TAG_START_SCRIPT,
TOKEN_OPEN_TAG_START_STYLE = _require2.TOKEN_OPEN_TAG_START_STYLE;
var _require3 = require('../constants/tokenizer-contexts'),
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
var tokensMap = {
'script': TOKEN_OPEN_TAG_START_SCRIPT,
'style': TOKEN_OPEN_TAG_START_STYLE,
'default': TOKEN_OPEN_TAG_START
};
function tagEnd(state, tokens) {
var tagName = parseOpenTagName(state.accumulatedContent);
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: tokensMap[tagName] || tokensMap["default"],
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.decisionBuffer = '';
state.accumulatedContent = '';
state.currentContext = OPEN_TAG_END_CONTEXT;
state.contextParams[OPEN_TAG_END_CONTEXT] = {
tagName: tagName
};
}
function whitespace(state, tokens) {
var tagName = parseOpenTagName(state.accumulatedContent);
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: tokensMap[tagName] || tokensMap["default"],
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
state.contextParams[ATTRIBUTES_CONTEXT] = {
tagName: tagName
};
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens);
}
if (isWhitespace(chars)) {
return whitespace(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,58 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_SCRIPT_TAG_CONTENT = _require2.TOKEN_SCRIPT_TAG_CONTENT,
TOKEN_CLOSE_TAG_SCRIPT = _require2.TOKEN_CLOSE_TAG_SCRIPT;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function closingScriptTag(state, tokens) {
if (state.accumulatedContent !== '') {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_SCRIPT_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
}
tokens.push({
type: TOKEN_CLOSE_TAG_SCRIPT,
content: state.decisionBuffer,
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
endPosition: state.caretPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
var CLOSING_SCRIPT_TAG_PATTERN = /<\/script\s*>/i;
function parseSyntax(chars, state, tokens) {
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
state.caretPosition++;
return;
}
if (CLOSING_SCRIPT_TAG_PATTERN.test(chars)) {
return closingScriptTag(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,58 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_STYLE_TAG_CONTENT = _require2.TOKEN_STYLE_TAG_CONTENT,
TOKEN_CLOSE_TAG_STYLE = _require2.TOKEN_CLOSE_TAG_STYLE;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function closingStyleTag(state, tokens) {
if (state.accumulatedContent !== '') {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_STYLE_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
}
tokens.push({
type: TOKEN_CLOSE_TAG_STYLE,
content: state.decisionBuffer,
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
endPosition: state.caretPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
var CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i;
function parseSyntax(chars, state, tokens) {
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
state.caretPosition++;
return;
}
if (CLOSING_STYLE_TAG_PATTERN.test(chars)) {
return closingStyleTag(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};