mirror of
https://scm.univ-tours.fr/22107988t/rappaurio-sae501_502.git
synced 2025-08-29 18:05:57 +02:00
permet l'ajout des frameworks et des routes
This commit is contained in:
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-assignment.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-assignment.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require2.TOKEN_ATTRIBUTE_ASSIGNMENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_ASSIGNMENT_CONTEXT = _require3.ATTRIBUTE_ASSIGNMENT_CONTEXT,
|
||||
ATTRIBUTE_VALUE_CONTEXT = _require3.ATTRIBUTE_VALUE_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
equal: function equal(state, tokens, contextFactories, options) {
|
||||
var attributeValueContext = contextFactories[ATTRIBUTE_VALUE_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
|
||||
content: '' + state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '=') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.equal(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeKeyContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_ASSIGNMENT_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
49
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-key.factory.js
generated
vendored
Normal file
49
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-key.factory.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_KEY = _require2.TOKEN_ATTRIBUTE_KEY;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_KEY_CONTEXT = _require3.ATTRIBUTE_KEY_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
keyEnd: function keyEnd(state, tokens, contextFactories, options) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_KEY,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
var KEY_BREAK_CHARS = [' ', '\n', '\t', '=', '/', '>'];
|
||||
|
||||
if (KEY_BREAK_CHARS.indexOf(chars) !== -1) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.keyEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeKeyContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_KEY_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
49
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-bare.factory.js
generated
vendored
Normal file
49
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-bare.factory.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_BARE_CONTEXT = _require3.ATTRIBUTE_VALUE_BARE_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
valueEnd: function valueEnd(state, tokens, contextFactories, options) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
var BARE_VALUE_END_PATTERN = /\s/;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (BARE_VALUE_END_PATTERN.test(chars) || chars === '>' || chars === '/') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.valueEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueBareContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_BARE_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped-end.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueWrappedEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped-start.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT,
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var attributeValueWrappedContext = contextFactories[ATTRIBUTE_VALUE_WRAPPED_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueWrappedContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueWrappedStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped.factory.js
generated
vendored
Normal file
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped.factory.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
|
||||
ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var attributeValueWrappedEndContext = contextFactories[ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueWrappedEndContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueWrappedContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
65
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value.factory.js
generated
vendored
Normal file
65
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value.factory.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_CONTEXT = _require.ATTRIBUTE_VALUE_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
|
||||
ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT = _require.ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT,
|
||||
ATTRIBUTE_VALUE_BARE_CONTEXT = _require.ATTRIBUTE_VALUE_BARE_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var attributeValueWrappedStartContext = contextFactories[ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT](contextFactories, Object.assign({}, options, { wrapper: state.decisionBuffer }));
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueWrappedStartContext;
|
||||
},
|
||||
bare: function bare(state, tokens, contextFactories, options) {
|
||||
var attributeValueBareContext = contextFactories[ATTRIBUTE_VALUE_BARE_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueBareContext;
|
||||
},
|
||||
tagEnd: function tagEnd(state, tokens, contextFactories, options) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
var BARE_VALUE_PATTERN = /\S/;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '"' || chars === '\'') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '>' || chars === '/') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.tagEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (BARE_VALUE_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.bare(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
65
app/node_modules/hyntax/lib-es5/tokenize-contexts/attributes.factory.js
generated
vendored
Normal file
65
app/node_modules/hyntax/lib-es5/tokenize-contexts/attributes.factory.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
|
||||
OPEN_TAG_END_CONTEXT = _require.OPEN_TAG_END_CONTEXT,
|
||||
ATTRIBUTE_ASSIGNMENT_CONTEXT = _require.ATTRIBUTE_ASSIGNMENT_CONTEXT,
|
||||
ATTRIBUTE_KEY_CONTEXT = _require.ATTRIBUTE_KEY_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
tagEnd: function tagEnd(state, tokens, contextFactories, options) {
|
||||
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagEndContext;
|
||||
},
|
||||
noneWhitespace: function noneWhitespace(state, tokens, contextFactories, options) {
|
||||
var attributeKeyContext = contextFactories[ATTRIBUTE_KEY_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeKeyContext;
|
||||
},
|
||||
equal: function equal(state, tokens, contextFactories, options) {
|
||||
var attributeAssignmentContext = contextFactories[ATTRIBUTE_ASSIGNMENT_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeAssignmentContext;
|
||||
}
|
||||
};
|
||||
|
||||
var ATTRIBUTE_KEY_PATTERN = /^\S/;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>' || chars === '/') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.tagEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '=') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.equal(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (ATTRIBUTE_KEY_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.noneWhitespace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributesContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTES_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
74
app/node_modules/hyntax/lib-es5/tokenize-contexts/close-tag.factory.js
generated
vendored
Normal file
74
app/node_modules/hyntax/lib-es5/tokenize-contexts/close-tag.factory.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_CLOSE_TAG = _require2.TOKEN_CLOSE_TAG,
|
||||
TOKEN_CLOSE_TAG_SCRIPT = _require2.TOKEN_CLOSE_TAG_SCRIPT,
|
||||
TOKEN_CLOSE_TAG_STYLE = _require2.TOKEN_CLOSE_TAG_STYLE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT,
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
/**
|
||||
* @param withinContent — type of content withing
|
||||
* which the close tag was found
|
||||
*/
|
||||
|
||||
|
||||
function getCloseTokenType(withinContent) {
|
||||
switch (withinContent) {
|
||||
case 'script':
|
||||
{
|
||||
return TOKEN_CLOSE_TAG_SCRIPT;
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
return TOKEN_CLOSE_TAG_STYLE;
|
||||
}
|
||||
|
||||
case 'data':
|
||||
{
|
||||
return TOKEN_CLOSE_TAG;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories, options) {
|
||||
var tokenType = getCloseTokenType(options.withinContent);
|
||||
var dataContext = contextFactories[DATA_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: tokenType,
|
||||
content: '' + state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = dataContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function closeTagContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: CLOSE_TAG_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
55
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-content.factory.js
generated
vendored
Normal file
55
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-content.factory.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_CONTENT = _require2.TOKEN_COMMENT_CONTENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT,
|
||||
COMMENT_END_CONTEXT = _require3.COMMENT_END_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
commentEnd: function commentEnd(state, tokens, contextFactories) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
var commentContentContext = contextFactories[COMMENT_END_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_CONTENT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = commentContentContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '-' || chars === '--') {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (chars === '-->') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.commentEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function commentContentContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: COMMENT_CONTENT_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-end.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_END = _require2.TOKEN_COMMENT_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
COMMENT_END_CONTEXT = _require3.COMMENT_END_CONTEXT,
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
commentEnd: function commentEnd(state, tokens, contextFactories) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
var dataContext = contextFactories[DATA_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_END,
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = dataContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.commentEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function commentEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: COMMENT_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
54
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-start.factory.js
generated
vendored
Normal file
54
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_START = _require2.TOKEN_COMMENT_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
COMMENT_START_CONTEXT = _require3.COMMENT_START_CONTEXT,
|
||||
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
commentStart: function commentStart(state, tokens, contextFactories) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
var commentContentContext = contextFactories[COMMENT_CONTENT_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_START,
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = commentContentContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '<' || chars === '<!' || chars === '<!-') {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (chars === '<!--') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.commentStart(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function commentStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: COMMENT_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
139
app/node_modules/hyntax/lib-es5/tokenize-contexts/data.factory.js
generated
vendored
Normal file
139
app/node_modules/hyntax/lib-es5/tokenize-contexts/data.factory.js
generated
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_TEXT = _require2.TOKEN_TEXT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT,
|
||||
OPEN_TAG_START_CONTEXT = _require3.OPEN_TAG_START_CONTEXT,
|
||||
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT,
|
||||
DOCTYPE_START_CONTEXT = _require3.DOCTYPE_START_CONTEXT,
|
||||
COMMENT_START_CONTEXT = _require3.COMMENT_START_CONTEXT;
|
||||
|
||||
function generateTextToken(state) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
return {
|
||||
type: TOKEN_TEXT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
};
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
openingCornerBraceWithText: function openingCornerBraceWithText(state, tokens, contextFactories) {
|
||||
var openTagStartContext = contextFactories[OPEN_TAG_START_CONTEXT](contextFactories);
|
||||
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagStartContext;
|
||||
},
|
||||
openingCornerBraceWithSlash: function openingCornerBraceWithSlash(state, tokens, contextFactories) {
|
||||
var closeTagContext = contextFactories[CLOSE_TAG_CONTEXT](contextFactories, { withinContent: 'data' });
|
||||
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = closeTagContext;
|
||||
},
|
||||
doctypeStart: function doctypeStart(state, tokens, contextFactories) {
|
||||
var doctypeStartContext = contextFactories[DOCTYPE_START_CONTEXT](contextFactories);
|
||||
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeStartContext;
|
||||
},
|
||||
commentStart: function commentStart(state, tokens, contextFactories) {
|
||||
var commentStartContext = contextFactories[COMMENT_START_CONTEXT](contextFactories);
|
||||
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = commentStartContext;
|
||||
}
|
||||
};
|
||||
|
||||
function handleDataContextContentEnd(state, tokens) {
|
||||
var textContent = '' + state.accumulatedContent + state.decisionBuffer;
|
||||
|
||||
if (textContent.length !== 0) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_TEXT,
|
||||
content: textContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var INCOMPLETE_DOCTYPE_START = /<!\w*$/;
|
||||
var COMPLETE_DOCTYPE_START = /<!DOCTYPE/i;
|
||||
var OPEN_TAG_START_PATTERN = /^<\w/;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '<' || chars === '<!' || chars === '<!-' || INCOMPLETE_DOCTYPE_START.test(chars)) {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (chars === '<!--') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.commentStart(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (COMPLETE_DOCTYPE_START.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.doctypeStart(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (OPEN_TAG_START_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.openingCornerBraceWithText(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '</') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.openingCornerBraceWithSlash(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function dataContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DATA_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
},
|
||||
handleContentEnd: handleDataContextContentEnd
|
||||
};
|
||||
};
|
48
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-bare.factory.js
generated
vendored
Normal file
48
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-bare.factory.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
attributeEnd: function attributeEnd(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
var doctypeAttributesContext = contextFactories[DOCTYPE_ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (isWhitespace(chars) || chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.attributeEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributeBareContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped-end.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
var doctypeAttributesContext = contextFactories[DOCTYPE_ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributeWrappedEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped-start.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
var doctypeAttributeWrappedContext = contextFactories[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT](contextFactories, options);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributeWrappedContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributeWrappedStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped.factory.js
generated
vendored
Normal file
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped.factory.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
var doctypeAttributeWrappedEndContext = contextFactories[DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT](contextFactories, options);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributeWrappedEndContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributeWrappedContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
66
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attributes.factory.js
generated
vendored
Normal file
66
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attributes.factory.js
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace;
|
||||
|
||||
var _require2 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require2.DOCTYPE_ATTRIBUTES_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
|
||||
DOCTYPE_END_CONTEXT = _require2.DOCTYPE_END_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories) {
|
||||
var doctypeAttributeWrappedStartContext = contextFactories[DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT](contextFactories, { wrapper: state.decisionBuffer });
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributeWrappedStartContext;
|
||||
},
|
||||
bare: function bare(state, tokens, contextFactories) {
|
||||
var doctypeAttributeBareStartContext = contextFactories[DOCTYPE_ATTRIBUTE_BARE_CONTEXT](contextFactories);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributeBareStartContext;
|
||||
},
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories) {
|
||||
var doctypeEndContext = contextFactories[DOCTYPE_END_CONTEXT](contextFactories);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeEndContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '"' || chars === '\'') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (!isWhitespace(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.bare(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributesContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTES_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-end.factory.js
generated
vendored
Normal file
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_END = _require2.TOKEN_DOCTYPE_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_END_CONTEXT = _require3.DOCTYPE_END_CONTEXT,
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
var dataContext = contextFactories[DATA_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_END,
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = dataContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
70
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-start.factory.js
generated
vendored
Normal file
70
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_START = _require2.TOKEN_DOCTYPE_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_START_CONTEXT = _require3.DOCTYPE_START_CONTEXT,
|
||||
DOCTYPE_END_CONTEXT = _require3.DOCTYPE_END_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
function generateDoctypeStartToken(state) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
return {
|
||||
type: TOKEN_DOCTYPE_START,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
};
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories) {
|
||||
var doctypeEndContext = contextFactories[DOCTYPE_END_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push(generateDoctypeStartToken(state));
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeEndContext;
|
||||
},
|
||||
whitespace: function whitespace(state, tokens, contextFactories) {
|
||||
var attributesContext = contextFactories[DOCTYPE_ATTRIBUTES_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push(generateDoctypeStartToken(state));
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (isWhitespace(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.whitespace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
87
app/node_modules/hyntax/lib-es5/tokenize-contexts/open-tag-end.factory.js
generated
vendored
Normal file
87
app/node_modules/hyntax/lib-es5/tokenize-contexts/open-tag-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_END = _require2.TOKEN_OPEN_TAG_END,
|
||||
TOKEN_OPEN_TAG_END_SCRIPT = _require2.TOKEN_OPEN_TAG_END_SCRIPT,
|
||||
TOKEN_OPEN_TAG_END_STYLE = _require2.TOKEN_OPEN_TAG_END_STYLE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT,
|
||||
SCRIPT_CONTENT_CONTEXT = _require3.SCRIPT_CONTENT_CONTEXT,
|
||||
STYLE_CONTENT_CONTEXT = _require3.STYLE_CONTENT_CONTEXT;
|
||||
|
||||
function getTokenType(tagName) {
|
||||
switch (tagName) {
|
||||
case 'script':
|
||||
{
|
||||
return TOKEN_OPEN_TAG_END_SCRIPT;
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
return TOKEN_OPEN_TAG_END_STYLE;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
return TOKEN_OPEN_TAG_END;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getContentContext(tagName, contextFactories, options) {
|
||||
switch (tagName) {
|
||||
case 'script':
|
||||
{
|
||||
return contextFactories[SCRIPT_CONTENT_CONTEXT](contextFactories, options);
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
return contextFactories[STYLE_CONTENT_CONTEXT](contextFactories, options);
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
return contextFactories[DATA_CONTEXT](contextFactories, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: getTokenType(options.tagName),
|
||||
content: '' + state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = getContentContext(options.tagName, contextFactories, options);
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function openTagEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: OPEN_TAG_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
188
app/node_modules/hyntax/lib-es5/tokenize-contexts/open-tag-start.factory.js
generated
vendored
Normal file
188
app/node_modules/hyntax/lib-es5/tokenize-contexts/open-tag-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,188 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
parseOpenTagName = _require.parseOpenTagName,
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_START = _require2.TOKEN_OPEN_TAG_START,
|
||||
TOKEN_OPEN_TAG_START_SCRIPT = _require2.TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
TOKEN_OPEN_TAG_START_STYLE = _require2.TOKEN_OPEN_TAG_START_STYLE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
OPEN_TAG_START_CONTEXT = _require3.OPEN_TAG_START_CONTEXT,
|
||||
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
function handleTagEndAfterScriptOpenTagStart(state, tokens, contextFactories) {
|
||||
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, { tagName: 'script' });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagEndContext;
|
||||
}
|
||||
|
||||
function handleTagEndAfterStyleOpenTagStart(state, tokens, contextFactories) {
|
||||
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, { tagName: 'style' });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START_STYLE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagEndContext;
|
||||
}
|
||||
|
||||
function handleTagEndAfterOpenTagStart(state, tokens, contextFactories) {
|
||||
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, { tagName: undefined });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagEndContext;
|
||||
}
|
||||
|
||||
function handleWhitespaceAfterScriptOpenTagStart(state, tokens, contextFactories) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, { tagName: 'script' });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
|
||||
function handleWhitespaceAfterStyleOpenTagStart(state, tokens, contextFactories) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, { tagName: 'style' });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START_STYLE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
|
||||
function handleWhitespaceAfterOpenTagStart(state, tokens, contextFactories) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, { tagName: undefined });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
tagEnd: function tagEnd(state, tokens, contextFactories, options) {
|
||||
var tagName = parseOpenTagName(state.accumulatedContent);
|
||||
|
||||
switch (tagName) {
|
||||
case 'script':
|
||||
{
|
||||
handleTagEndAfterScriptOpenTagStart(state, tokens, contextFactories, options);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
handleTagEndAfterStyleOpenTagStart(state, tokens, contextFactories, options);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
handleTagEndAfterOpenTagStart(state, tokens, contextFactories, options);
|
||||
}
|
||||
}
|
||||
},
|
||||
whitespace: function whitespace(state, tokens, contextFactories, options) {
|
||||
var tagName = parseOpenTagName(state.accumulatedContent);
|
||||
|
||||
switch (tagName) {
|
||||
case 'script':
|
||||
{
|
||||
handleWhitespaceAfterScriptOpenTagStart(state, tokens, contextFactories, options);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
handleWhitespaceAfterStyleOpenTagStart(state, tokens, contextFactories, options);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
handleWhitespaceAfterOpenTagStart(state, tokens, contextFactories, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>' || chars === '/') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.tagEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (isWhitespace(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.whitespace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function openTagStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: OPEN_TAG_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
61
app/node_modules/hyntax/lib-es5/tokenize-contexts/script-tag-content.factory.js
generated
vendored
Normal file
61
app/node_modules/hyntax/lib-es5/tokenize-contexts/script-tag-content.factory.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_SCRIPT_TAG_CONTENT = _require2.TOKEN_SCRIPT_TAG_CONTENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
SCRIPT_CONTENT_CONTEXT = _require3.SCRIPT_CONTENT_CONTEXT,
|
||||
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingScriptTag: function closingScriptTag(state, tokens, contextFactories) {
|
||||
var closeTagContext = contextFactories[CLOSE_TAG_CONTEXT](contextFactories, { withinContent: 'script' });
|
||||
|
||||
if (state.accumulatedContent !== '') {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_SCRIPT_TAG_CONTENT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = closeTagContext;
|
||||
}
|
||||
};
|
||||
|
||||
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
|
||||
var CLOSING_SCRIPT_TAG_PATTERN = /<\/script\s*>/i;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (CLOSING_SCRIPT_TAG_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingScriptTag(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function scriptTagContentContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: SCRIPT_CONTENT_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
61
app/node_modules/hyntax/lib-es5/tokenize-contexts/style-tag-content.factory.js
generated
vendored
Normal file
61
app/node_modules/hyntax/lib-es5/tokenize-contexts/style-tag-content.factory.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_STYLE_TAG_CONTENT = _require2.TOKEN_STYLE_TAG_CONTENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
STYLE_CONTENT_CONTEXT = _require3.STYLE_CONTENT_CONTEXT,
|
||||
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingStyleTag: function closingStyleTag(state, tokens, contextFactories) {
|
||||
var closeTagContext = contextFactories[CLOSE_TAG_CONTEXT](contextFactories, { withinContent: 'style' });
|
||||
|
||||
if (state.accumulatedContent !== '') {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_STYLE_TAG_CONTENT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = closeTagContext;
|
||||
}
|
||||
};
|
||||
|
||||
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
|
||||
var CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (CLOSING_STYLE_TAG_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingStyleTag(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function styleTagContentContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: STYLE_CONTENT_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
Reference in New Issue
Block a user