mirror of
https://scm.univ-tours.fr/22107988t/rappaurio-sae501_502.git
synced 2025-08-29 11:35:59 +02:00
v1.0 du site web
This commit is contained in:
11
app/node_modules/hyntax/lib-es5/constants/ast-nodes.js
generated
vendored
Normal file
11
app/node_modules/hyntax/lib-es5/constants/ast-nodes.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
NODE_DOCUMENT: 'document',
|
||||
NODE_TAG: 'tag',
|
||||
NODE_TEXT: 'text',
|
||||
NODE_DOCTYPE: 'doctype',
|
||||
NODE_COMMENT: 'comment',
|
||||
NODE_SCRIPT: 'script',
|
||||
NODE_STYLE: 'style'
|
||||
};
|
29
app/node_modules/hyntax/lib-es5/constants/token-types.js
generated
vendored
Normal file
29
app/node_modules/hyntax/lib-es5/constants/token-types.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
TOKEN_TEXT: 'token:text',
|
||||
TOKEN_OPEN_TAG_START: 'token:open-tag-start',
|
||||
TOKEN_ATTRIBUTE_KEY: 'token:attribute-key',
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT: 'token:attribute-assignment',
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START: 'token:attribute-value-wrapper-start',
|
||||
TOKEN_ATTRIBUTE_VALUE: 'token:attribute-value',
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END: 'token:attribute-value-wrapper-end',
|
||||
TOKEN_OPEN_TAG_END: 'token:open-tag-end',
|
||||
TOKEN_CLOSE_TAG: 'token:close-tag',
|
||||
TOKEN_OPEN_TAG_START_SCRIPT: 'token:open-tag-start-script',
|
||||
TOKEN_SCRIPT_TAG_CONTENT: 'token:script-tag-content',
|
||||
TOKEN_OPEN_TAG_END_SCRIPT: 'token:open-tag-end-script',
|
||||
TOKEN_CLOSE_TAG_SCRIPT: 'token:close-tag-script',
|
||||
TOKEN_OPEN_TAG_START_STYLE: 'token:open-tag-start-style',
|
||||
TOKEN_STYLE_TAG_CONTENT: 'token:style-tag-content',
|
||||
TOKEN_OPEN_TAG_END_STYLE: 'token:open-tag-end-style',
|
||||
TOKEN_CLOSE_TAG_STYLE: 'token:close-tag-style',
|
||||
TOKEN_DOCTYPE_START: 'token:doctype-start',
|
||||
TOKEN_DOCTYPE_END: 'token:doctype-end',
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START: 'token:doctype-attribute-wrapper-start',
|
||||
TOKEN_DOCTYPE_ATTRIBUTE: 'token:doctype-attribute',
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END: 'token:doctype-attribute-wrapper-end',
|
||||
TOKEN_COMMENT_START: 'token:comment-start',
|
||||
TOKEN_COMMENT_CONTENT: 'token:comment-content',
|
||||
TOKEN_COMMENT_END: 'token:comment-end'
|
||||
};
|
23
app/node_modules/hyntax/lib-es5/constants/tokenizer-contexts.js
generated
vendored
Normal file
23
app/node_modules/hyntax/lib-es5/constants/tokenizer-contexts.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
DATA_CONTEXT: 'tokenizer-context:data',
|
||||
OPEN_TAG_START_CONTEXT: 'tokenizer-context:open-tag-start',
|
||||
CLOSE_TAG_CONTEXT: 'tokenizer-context:close-tag',
|
||||
ATTRIBUTES_CONTEXT: 'tokenizer-context:attributes',
|
||||
OPEN_TAG_END_CONTEXT: 'tokenizer-context:open-tag-end',
|
||||
ATTRIBUTE_KEY_CONTEXT: 'tokenizer-context:attribute-key',
|
||||
ATTRIBUTE_VALUE_CONTEXT: 'tokenizer-context:attribute-value',
|
||||
ATTRIBUTE_VALUE_BARE_CONTEXT: 'tokenizer-context:attribute-value-bare',
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT: 'tokenizer-context:attribute-value-wrapped',
|
||||
SCRIPT_CONTENT_CONTEXT: 'tokenizer-context:script-content',
|
||||
STYLE_CONTENT_CONTEXT: 'tokenizer-context:style-content',
|
||||
DOCTYPE_START_CONTEXT: 'tokenizer-context:doctype-start',
|
||||
DOCTYPE_END_CONTEXT: 'tokenizer-context:doctype-end',
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT: 'tokenizer-context:doctype-attributes',
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT: 'tokenizer-context:doctype-attribute-wrapped',
|
||||
DOCTYPE_ATTRIBUTE_BARE_CONTEXT: 'tokenizer-context:doctype-attribute-bare',
|
||||
COMMENT_START_CONTEXT: 'tokenizer-context:comment-start',
|
||||
COMMENT_CONTENT_CONTEXT: 'tokenizer-context:comment-content',
|
||||
COMMENT_END_CONTEXT: 'tokenizer-context:comment-end'
|
||||
};
|
16
app/node_modules/hyntax/lib-es5/constants/tree-constructor-contexts.js
generated
vendored
Normal file
16
app/node_modules/hyntax/lib-es5/constants/tree-constructor-contexts.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
TAG_CONTENT_CONTEXT: 'tree-constructor-context:tag-content',
|
||||
TAG_CONTEXT: 'tree-constructor-context:tag',
|
||||
TAG_NAME_CONTEXT: 'tree-constructor-context:tag-name',
|
||||
ATTRIBUTES_CONTEXT: 'tree-constructor-context:attributes',
|
||||
ATTRIBUTE_CONTEXT: 'tree-constructor-context:attribute',
|
||||
ATTRIBUTE_VALUE_CONTEXT: 'tree-constructor-context:attribute-value',
|
||||
COMMENT_CONTEXT: 'tree-constructor-context:comment',
|
||||
DOCTYPE_CONTEXT: 'tree-constructor-context:doctype',
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT: 'tree-constructor-context:doctype-attributes',
|
||||
DOCTYPE_ATTRIBUTE_CONTEXT: 'tree-constructor-context:doctype-attribute',
|
||||
SCRIPT_TAG_CONTEXT: 'tree-constructor-context:script-tag',
|
||||
STYLE_TAG_CONTEXT: 'tree-constructor-context:style-tag'
|
||||
};
|
93
app/node_modules/hyntax/lib-es5/construct-tree.js
generated
vendored
Normal file
93
app/node_modules/hyntax/lib-es5/construct-tree.js
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
"use strict";
|
||||
|
||||
var _contextsMap;
|
||||
|
||||
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||||
|
||||
var tag = require('./tree-constructor-context-handlers/tag');
|
||||
|
||||
var tagContent = require('./tree-constructor-context-handlers/tag-content');
|
||||
|
||||
var tagName = require('./tree-constructor-context-handlers/tag-name');
|
||||
|
||||
var attributes = require('./tree-constructor-context-handlers/attributes');
|
||||
|
||||
var attribute = require('./tree-constructor-context-handlers/attribute');
|
||||
|
||||
var attributeValue = require('./tree-constructor-context-handlers/attribute-value');
|
||||
|
||||
var comment = require('./tree-constructor-context-handlers/comment');
|
||||
|
||||
var doctype = require('./tree-constructor-context-handlers/doctype');
|
||||
|
||||
var doctypeAttributes = require('./tree-constructor-context-handlers/doctype-attributes');
|
||||
|
||||
var doctypeAttribute = require('./tree-constructor-context-handlers/doctype-attribute');
|
||||
|
||||
var scriptTag = require('./tree-constructor-context-handlers/script-tag');
|
||||
|
||||
var styleTag = require('./tree-constructor-context-handlers/style-tag');
|
||||
|
||||
var _require = require('./constants/tree-constructor-contexts'),
|
||||
TAG_CONTENT_CONTEXT = _require.TAG_CONTENT_CONTEXT,
|
||||
TAG_CONTEXT = _require.TAG_CONTEXT,
|
||||
TAG_NAME_CONTEXT = _require.TAG_NAME_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
|
||||
ATTRIBUTE_CONTEXT = _require.ATTRIBUTE_CONTEXT,
|
||||
ATTRIBUTE_VALUE_CONTEXT = _require.ATTRIBUTE_VALUE_CONTEXT,
|
||||
COMMENT_CONTEXT = _require.COMMENT_CONTEXT,
|
||||
DOCTYPE_CONTEXT = _require.DOCTYPE_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require.DOCTYPE_ATTRIBUTES_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_CONTEXT = _require.DOCTYPE_ATTRIBUTE_CONTEXT,
|
||||
SCRIPT_TAG_CONTEXT = _require.SCRIPT_TAG_CONTEXT,
|
||||
STYLE_TAG_CONTEXT = _require.STYLE_TAG_CONTEXT;
|
||||
|
||||
var _require2 = require('./constants/ast-nodes'),
|
||||
NODE_DOCUMENT = _require2.NODE_DOCUMENT;
|
||||
|
||||
var contextsMap = (_contextsMap = {}, _defineProperty(_contextsMap, TAG_CONTENT_CONTEXT, tagContent), _defineProperty(_contextsMap, TAG_CONTEXT, tag), _defineProperty(_contextsMap, TAG_NAME_CONTEXT, tagName), _defineProperty(_contextsMap, ATTRIBUTES_CONTEXT, attributes), _defineProperty(_contextsMap, ATTRIBUTE_CONTEXT, attribute), _defineProperty(_contextsMap, ATTRIBUTE_VALUE_CONTEXT, attributeValue), _defineProperty(_contextsMap, COMMENT_CONTEXT, comment), _defineProperty(_contextsMap, DOCTYPE_CONTEXT, doctype), _defineProperty(_contextsMap, DOCTYPE_ATTRIBUTES_CONTEXT, doctypeAttributes), _defineProperty(_contextsMap, DOCTYPE_ATTRIBUTE_CONTEXT, doctypeAttribute), _defineProperty(_contextsMap, SCRIPT_TAG_CONTEXT, scriptTag), _defineProperty(_contextsMap, STYLE_TAG_CONTEXT, styleTag), _contextsMap);
|
||||
|
||||
function processTokens(tokens, state, positionOffset) {
|
||||
var tokenIndex = state.caretPosition - positionOffset;
|
||||
|
||||
while (tokenIndex < tokens.length) {
|
||||
var token = tokens[tokenIndex];
|
||||
var contextHandler = contextsMap[state.currentContext.type];
|
||||
state = contextHandler(token, state);
|
||||
tokenIndex = state.caretPosition - positionOffset;
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function constructTree() {
|
||||
var tokens = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : [];
|
||||
var existingState = arguments.length > 1 ? arguments[1] : undefined;
|
||||
var state = existingState;
|
||||
|
||||
if (existingState === undefined) {
|
||||
var rootContext = {
|
||||
type: TAG_CONTENT_CONTEXT,
|
||||
parentRef: undefined,
|
||||
content: []
|
||||
};
|
||||
var rootNode = {
|
||||
nodeType: NODE_DOCUMENT,
|
||||
parentRef: undefined,
|
||||
content: {}
|
||||
};
|
||||
state = {
|
||||
caretPosition: 0,
|
||||
currentContext: rootContext,
|
||||
currentNode: rootNode,
|
||||
rootNode: rootNode
|
||||
};
|
||||
}
|
||||
|
||||
var positionOffset = state.caretPosition;
|
||||
processTokens(tokens, state, positionOffset);
|
||||
return {
|
||||
state: state,
|
||||
ast: state.rootNode
|
||||
};
|
||||
};
|
82
app/node_modules/hyntax/lib-es5/helpers.js
generated
vendored
Normal file
82
app/node_modules/hyntax/lib-es5/helpers.js
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
|
||||
var OPEN_TAG_NAME_PATTERN = /^<(\S+)/;
|
||||
var CLOSE_TAG_NAME_PATTERN = /^<\/((?:.|\n)*)>$/;
|
||||
|
||||
function prettyJSON(obj) {
|
||||
return JSON.stringify(obj, null, 2);
|
||||
}
|
||||
/**
|
||||
* Clear tree of nodes from everything
|
||||
* "parentRef" properties so the tree
|
||||
* can be easily stringified into JSON.
|
||||
*/
|
||||
|
||||
|
||||
function clearAst(ast) {
|
||||
var cleanAst = ast;
|
||||
delete cleanAst.parentRef;
|
||||
|
||||
if (Array.isArray(ast.content.children)) {
|
||||
cleanAst.content.children = ast.content.children.map(function (node) {
|
||||
return clearAst(node);
|
||||
});
|
||||
}
|
||||
|
||||
return cleanAst;
|
||||
}
|
||||
|
||||
function parseOpenTagName(openTagStartTokenContent) {
|
||||
var match = openTagStartTokenContent.match(OPEN_TAG_NAME_PATTERN);
|
||||
|
||||
if (match === null) {
|
||||
throw new Error('Unable to parse open tag name.\n' + "".concat(openTagStartTokenContent, " does not match pattern of opening tag."));
|
||||
}
|
||||
|
||||
return match[1].toLowerCase();
|
||||
}
|
||||
|
||||
function parseCloseTagName(closeTagTokenContent) {
|
||||
var match = closeTagTokenContent.match(CLOSE_TAG_NAME_PATTERN);
|
||||
|
||||
if (match === null) {
|
||||
throw new Error('Unable to parse close tag name.\n' + "".concat(closeTagTokenContent, " does not match pattern of closing tag."));
|
||||
}
|
||||
|
||||
return match[1].trim().toLowerCase();
|
||||
}
|
||||
|
||||
function calculateTokenCharactersRange(state, _ref) {
|
||||
var keepBuffer = _ref.keepBuffer;
|
||||
|
||||
if (keepBuffer === undefined) {
|
||||
throw new Error('Unable to calculate characters range for token.\n' + '"keepBuffer" parameter is not specified to decide if ' + 'the decision buffer is a part of characters range.');
|
||||
}
|
||||
|
||||
var startPosition = state.caretPosition - (state.accumulatedContent.length - 1) - state.decisionBuffer.length;
|
||||
var endPosition;
|
||||
|
||||
if (!keepBuffer) {
|
||||
endPosition = state.caretPosition - state.decisionBuffer.length;
|
||||
} else {
|
||||
endPosition = state.caretPosition;
|
||||
}
|
||||
|
||||
return {
|
||||
startPosition: startPosition,
|
||||
endPosition: endPosition
|
||||
};
|
||||
}
|
||||
|
||||
function isWhitespace(_char) {
|
||||
return _char === ' ' || _char === '\n' || _char === '\t';
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
prettyJSON: prettyJSON,
|
||||
clearAst: clearAst,
|
||||
parseOpenTagName: parseOpenTagName,
|
||||
parseCloseTagName: parseCloseTagName,
|
||||
calculateTokenCharactersRange: calculateTokenCharactersRange,
|
||||
isWhitespace: isWhitespace
|
||||
};
|
83
app/node_modules/hyntax/lib-es5/stream-tokenizer.js
generated
vendored
Normal file
83
app/node_modules/hyntax/lib-es5/stream-tokenizer.js
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
"use strict";
|
||||
|
||||
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||||
|
||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||||
|
||||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||||
|
||||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||||
|
||||
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
|
||||
|
||||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||||
|
||||
function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
|
||||
|
||||
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
|
||||
|
||||
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
|
||||
|
||||
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }
|
||||
|
||||
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
|
||||
|
||||
var _require = require('stream'),
|
||||
Transform = _require.Transform;
|
||||
|
||||
var tokenize = require('./tokenize');
|
||||
|
||||
var StreamTokenizer = /*#__PURE__*/function (_Transform) {
|
||||
_inherits(StreamTokenizer, _Transform);
|
||||
|
||||
var _super = _createSuper(StreamTokenizer);
|
||||
|
||||
function StreamTokenizer(options) {
|
||||
var _this;
|
||||
|
||||
_classCallCheck(this, StreamTokenizer);
|
||||
|
||||
_this = _super.call(this, Object.assign({}, options, {
|
||||
decodeStrings: false,
|
||||
readableObjectMode: true
|
||||
}));
|
||||
_this.currentTokenizerState = undefined;
|
||||
|
||||
_this.setDefaultEncoding('utf8');
|
||||
|
||||
return _this;
|
||||
}
|
||||
|
||||
_createClass(StreamTokenizer, [{
|
||||
key: "_transform",
|
||||
value: function _transform(chunk, encoding, callback) {
|
||||
var chunkString = chunk;
|
||||
|
||||
if (Buffer.isBuffer(chunk)) {
|
||||
chunkString = chunk.toString();
|
||||
}
|
||||
|
||||
var _tokenize = tokenize(chunkString, this.currentTokenizerState, {
|
||||
isFinalChunk: false
|
||||
}),
|
||||
state = _tokenize.state,
|
||||
tokens = _tokenize.tokens;
|
||||
|
||||
this.currentTokenizerState = state;
|
||||
callback(null, tokens);
|
||||
}
|
||||
}, {
|
||||
key: "_flush",
|
||||
value: function _flush(callback) {
|
||||
var tokenizeResults = tokenize('', this.currentTokenizerState, {
|
||||
isFinalChunk: true
|
||||
});
|
||||
this.push(tokenizeResults.tokens);
|
||||
callback();
|
||||
}
|
||||
}]);
|
||||
|
||||
return StreamTokenizer;
|
||||
}(Transform);
|
||||
|
||||
module.exports = StreamTokenizer;
|
63
app/node_modules/hyntax/lib-es5/stream-tree-constructor.js
generated
vendored
Normal file
63
app/node_modules/hyntax/lib-es5/stream-tree-constructor.js
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
|
||||
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||||
|
||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
||||
|
||||
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
|
||||
|
||||
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
|
||||
|
||||
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
|
||||
|
||||
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
|
||||
|
||||
function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
|
||||
|
||||
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
|
||||
|
||||
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
|
||||
|
||||
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }
|
||||
|
||||
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
|
||||
|
||||
var _require = require('stream'),
|
||||
Transform = _require.Transform;
|
||||
|
||||
var constructTree = require('./construct-tree');
|
||||
|
||||
var StreamTreeConstructor = /*#__PURE__*/function (_Transform) {
|
||||
_inherits(StreamTreeConstructor, _Transform);
|
||||
|
||||
var _super = _createSuper(StreamTreeConstructor);
|
||||
|
||||
function StreamTreeConstructor(options) {
|
||||
var _this;
|
||||
|
||||
_classCallCheck(this, StreamTreeConstructor);
|
||||
|
||||
_this = _super.call(this, Object.assign({}, options, {
|
||||
objectMode: true,
|
||||
readableObjectMode: true
|
||||
}));
|
||||
_this.currentState = undefined;
|
||||
return _this;
|
||||
}
|
||||
|
||||
_createClass(StreamTreeConstructor, [{
|
||||
key: "_transform",
|
||||
value: function _transform(tokensChunk, encoding, callback) {
|
||||
var _constructTree = constructTree(tokensChunk, this.currentState),
|
||||
state = _constructTree.state,
|
||||
ast = _constructTree.ast;
|
||||
|
||||
this.currentState = state;
|
||||
callback(null, ast);
|
||||
}
|
||||
}]);
|
||||
|
||||
return StreamTreeConstructor;
|
||||
}(Transform);
|
||||
|
||||
module.exports = StreamTreeConstructor;
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-assignment.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-assignment.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require2.TOKEN_ATTRIBUTE_ASSIGNMENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_ASSIGNMENT_CONTEXT = _require3.ATTRIBUTE_ASSIGNMENT_CONTEXT,
|
||||
ATTRIBUTE_VALUE_CONTEXT = _require3.ATTRIBUTE_VALUE_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
equal: function equal(state, tokens, contextFactories, options) {
|
||||
var attributeValueContext = contextFactories[ATTRIBUTE_VALUE_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
|
||||
content: '' + state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '=') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.equal(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeKeyContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_ASSIGNMENT_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
49
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-key.factory.js
generated
vendored
Normal file
49
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-key.factory.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_KEY = _require2.TOKEN_ATTRIBUTE_KEY;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_KEY_CONTEXT = _require3.ATTRIBUTE_KEY_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
keyEnd: function keyEnd(state, tokens, contextFactories, options) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_KEY,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
var KEY_BREAK_CHARS = [' ', '\n', '\t', '=', '/', '>'];
|
||||
|
||||
if (KEY_BREAK_CHARS.indexOf(chars) !== -1) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.keyEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeKeyContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_KEY_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
49
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-bare.factory.js
generated
vendored
Normal file
49
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-bare.factory.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_BARE_CONTEXT = _require3.ATTRIBUTE_VALUE_BARE_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
valueEnd: function valueEnd(state, tokens, contextFactories, options) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
var BARE_VALUE_END_PATTERN = /\s/;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (BARE_VALUE_END_PATTERN.test(chars) || chars === '>' || chars === '/') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.valueEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueBareContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_BARE_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped-end.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueWrappedEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped-start.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT,
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var attributeValueWrappedContext = contextFactories[ATTRIBUTE_VALUE_WRAPPED_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueWrappedContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueWrappedStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped.factory.js
generated
vendored
Normal file
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value-wrapped.factory.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
|
||||
ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var attributeValueWrappedEndContext = contextFactories[ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueWrappedEndContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueWrappedContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
65
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value.factory.js
generated
vendored
Normal file
65
app/node_modules/hyntax/lib-es5/tokenize-contexts/attribute-value.factory.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_CONTEXT = _require.ATTRIBUTE_VALUE_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
|
||||
ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT = _require.ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT,
|
||||
ATTRIBUTE_VALUE_BARE_CONTEXT = _require.ATTRIBUTE_VALUE_BARE_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var attributeValueWrappedStartContext = contextFactories[ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT](contextFactories, Object.assign({}, options, { wrapper: state.decisionBuffer }));
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueWrappedStartContext;
|
||||
},
|
||||
bare: function bare(state, tokens, contextFactories, options) {
|
||||
var attributeValueBareContext = contextFactories[ATTRIBUTE_VALUE_BARE_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeValueBareContext;
|
||||
},
|
||||
tagEnd: function tagEnd(state, tokens, contextFactories, options) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
var BARE_VALUE_PATTERN = /\S/;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '"' || chars === '\'') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '>' || chars === '/') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.tagEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (BARE_VALUE_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.bare(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributeValueContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTE_VALUE_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
65
app/node_modules/hyntax/lib-es5/tokenize-contexts/attributes.factory.js
generated
vendored
Normal file
65
app/node_modules/hyntax/lib-es5/tokenize-contexts/attributes.factory.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
|
||||
OPEN_TAG_END_CONTEXT = _require.OPEN_TAG_END_CONTEXT,
|
||||
ATTRIBUTE_ASSIGNMENT_CONTEXT = _require.ATTRIBUTE_ASSIGNMENT_CONTEXT,
|
||||
ATTRIBUTE_KEY_CONTEXT = _require.ATTRIBUTE_KEY_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
tagEnd: function tagEnd(state, tokens, contextFactories, options) {
|
||||
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagEndContext;
|
||||
},
|
||||
noneWhitespace: function noneWhitespace(state, tokens, contextFactories, options) {
|
||||
var attributeKeyContext = contextFactories[ATTRIBUTE_KEY_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeKeyContext;
|
||||
},
|
||||
equal: function equal(state, tokens, contextFactories, options) {
|
||||
var attributeAssignmentContext = contextFactories[ATTRIBUTE_ASSIGNMENT_CONTEXT](contextFactories, options);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributeAssignmentContext;
|
||||
}
|
||||
};
|
||||
|
||||
var ATTRIBUTE_KEY_PATTERN = /^\S/;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>' || chars === '/') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.tagEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '=') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.equal(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (ATTRIBUTE_KEY_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.noneWhitespace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function attributesContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: ATTRIBUTES_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
74
app/node_modules/hyntax/lib-es5/tokenize-contexts/close-tag.factory.js
generated
vendored
Normal file
74
app/node_modules/hyntax/lib-es5/tokenize-contexts/close-tag.factory.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_CLOSE_TAG = _require2.TOKEN_CLOSE_TAG,
|
||||
TOKEN_CLOSE_TAG_SCRIPT = _require2.TOKEN_CLOSE_TAG_SCRIPT,
|
||||
TOKEN_CLOSE_TAG_STYLE = _require2.TOKEN_CLOSE_TAG_STYLE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT,
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
/**
|
||||
* @param withinContent — type of content withing
|
||||
* which the close tag was found
|
||||
*/
|
||||
|
||||
|
||||
function getCloseTokenType(withinContent) {
|
||||
switch (withinContent) {
|
||||
case 'script':
|
||||
{
|
||||
return TOKEN_CLOSE_TAG_SCRIPT;
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
return TOKEN_CLOSE_TAG_STYLE;
|
||||
}
|
||||
|
||||
case 'data':
|
||||
{
|
||||
return TOKEN_CLOSE_TAG;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories, options) {
|
||||
var tokenType = getCloseTokenType(options.withinContent);
|
||||
var dataContext = contextFactories[DATA_CONTEXT](contextFactories, options);
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: tokenType,
|
||||
content: '' + state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = dataContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function closeTagContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: CLOSE_TAG_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
55
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-content.factory.js
generated
vendored
Normal file
55
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-content.factory.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_CONTENT = _require2.TOKEN_COMMENT_CONTENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT,
|
||||
COMMENT_END_CONTEXT = _require3.COMMENT_END_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
commentEnd: function commentEnd(state, tokens, contextFactories) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
var commentContentContext = contextFactories[COMMENT_END_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_CONTENT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = commentContentContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '-' || chars === '--') {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (chars === '-->') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.commentEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function commentContentContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: COMMENT_CONTENT_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-end.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_END = _require2.TOKEN_COMMENT_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
COMMENT_END_CONTEXT = _require3.COMMENT_END_CONTEXT,
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
commentEnd: function commentEnd(state, tokens, contextFactories) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
var dataContext = contextFactories[DATA_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_END,
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = dataContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.commentEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function commentEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: COMMENT_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
54
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-start.factory.js
generated
vendored
Normal file
54
app/node_modules/hyntax/lib-es5/tokenize-contexts/comment-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_START = _require2.TOKEN_COMMENT_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
COMMENT_START_CONTEXT = _require3.COMMENT_START_CONTEXT,
|
||||
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
commentStart: function commentStart(state, tokens, contextFactories) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
var commentContentContext = contextFactories[COMMENT_CONTENT_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_START,
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = commentContentContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '<' || chars === '<!' || chars === '<!-') {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (chars === '<!--') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.commentStart(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function commentStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: COMMENT_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
139
app/node_modules/hyntax/lib-es5/tokenize-contexts/data.factory.js
generated
vendored
Normal file
139
app/node_modules/hyntax/lib-es5/tokenize-contexts/data.factory.js
generated
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_TEXT = _require2.TOKEN_TEXT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT,
|
||||
OPEN_TAG_START_CONTEXT = _require3.OPEN_TAG_START_CONTEXT,
|
||||
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT,
|
||||
DOCTYPE_START_CONTEXT = _require3.DOCTYPE_START_CONTEXT,
|
||||
COMMENT_START_CONTEXT = _require3.COMMENT_START_CONTEXT;
|
||||
|
||||
function generateTextToken(state) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
return {
|
||||
type: TOKEN_TEXT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
};
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
openingCornerBraceWithText: function openingCornerBraceWithText(state, tokens, contextFactories) {
|
||||
var openTagStartContext = contextFactories[OPEN_TAG_START_CONTEXT](contextFactories);
|
||||
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagStartContext;
|
||||
},
|
||||
openingCornerBraceWithSlash: function openingCornerBraceWithSlash(state, tokens, contextFactories) {
|
||||
var closeTagContext = contextFactories[CLOSE_TAG_CONTEXT](contextFactories, { withinContent: 'data' });
|
||||
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = closeTagContext;
|
||||
},
|
||||
doctypeStart: function doctypeStart(state, tokens, contextFactories) {
|
||||
var doctypeStartContext = contextFactories[DOCTYPE_START_CONTEXT](contextFactories);
|
||||
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeStartContext;
|
||||
},
|
||||
commentStart: function commentStart(state, tokens, contextFactories) {
|
||||
var commentStartContext = contextFactories[COMMENT_START_CONTEXT](contextFactories);
|
||||
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = commentStartContext;
|
||||
}
|
||||
};
|
||||
|
||||
function handleDataContextContentEnd(state, tokens) {
|
||||
var textContent = '' + state.accumulatedContent + state.decisionBuffer;
|
||||
|
||||
if (textContent.length !== 0) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_TEXT,
|
||||
content: textContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var INCOMPLETE_DOCTYPE_START = /<!\w*$/;
|
||||
var COMPLETE_DOCTYPE_START = /<!DOCTYPE/i;
|
||||
var OPEN_TAG_START_PATTERN = /^<\w/;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '<' || chars === '<!' || chars === '<!-' || INCOMPLETE_DOCTYPE_START.test(chars)) {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (chars === '<!--') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.commentStart(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (COMPLETE_DOCTYPE_START.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.doctypeStart(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (OPEN_TAG_START_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.openingCornerBraceWithText(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '</') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.openingCornerBraceWithSlash(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function dataContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DATA_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
},
|
||||
handleContentEnd: handleDataContextContentEnd
|
||||
};
|
||||
};
|
48
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-bare.factory.js
generated
vendored
Normal file
48
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-bare.factory.js
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
attributeEnd: function attributeEnd(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
var doctypeAttributesContext = contextFactories[DOCTYPE_ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (isWhitespace(chars) || chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.attributeEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributeBareContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped-end.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
var doctypeAttributesContext = contextFactories[DOCTYPE_ATTRIBUTES_CONTEXT](contextFactories, options);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributeWrappedEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped-start.factory.js
generated
vendored
Normal file
46
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
var doctypeAttributeWrappedContext = contextFactories[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT](contextFactories, options);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributeWrappedContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributeWrappedStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped.factory.js
generated
vendored
Normal file
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attribute-wrapped.factory.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
var doctypeAttributeWrappedEndContext = contextFactories[DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT](contextFactories, options);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributeWrappedEndContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === options.wrapper) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributeWrappedContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
66
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attributes.factory.js
generated
vendored
Normal file
66
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-attributes.factory.js
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace;
|
||||
|
||||
var _require2 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require2.DOCTYPE_ATTRIBUTES_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
|
||||
DOCTYPE_END_CONTEXT = _require2.DOCTYPE_END_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
wrapper: function wrapper(state, tokens, contextFactories) {
|
||||
var doctypeAttributeWrappedStartContext = contextFactories[DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT](contextFactories, { wrapper: state.decisionBuffer });
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributeWrappedStartContext;
|
||||
},
|
||||
bare: function bare(state, tokens, contextFactories) {
|
||||
var doctypeAttributeBareStartContext = contextFactories[DOCTYPE_ATTRIBUTE_BARE_CONTEXT](contextFactories);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeAttributeBareStartContext;
|
||||
},
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories) {
|
||||
var doctypeEndContext = contextFactories[DOCTYPE_END_CONTEXT](contextFactories);
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeEndContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '"' || chars === '\'') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (!isWhitespace(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.bare(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributesContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_ATTRIBUTES_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-end.factory.js
generated
vendored
Normal file
47
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_END = _require2.TOKEN_DOCTYPE_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_END_CONTEXT = _require3.DOCTYPE_END_CONTEXT,
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
var dataContext = contextFactories[DATA_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_END,
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = dataContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
70
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-start.factory.js
generated
vendored
Normal file
70
app/node_modules/hyntax/lib-es5/tokenize-contexts/doctype-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_START = _require2.TOKEN_DOCTYPE_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_START_CONTEXT = _require3.DOCTYPE_START_CONTEXT,
|
||||
DOCTYPE_END_CONTEXT = _require3.DOCTYPE_END_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
function generateDoctypeStartToken(state) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
return {
|
||||
type: TOKEN_DOCTYPE_START,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
};
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories) {
|
||||
var doctypeEndContext = contextFactories[DOCTYPE_END_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push(generateDoctypeStartToken(state));
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = doctypeEndContext;
|
||||
},
|
||||
whitespace: function whitespace(state, tokens, contextFactories) {
|
||||
var attributesContext = contextFactories[DOCTYPE_ATTRIBUTES_CONTEXT](contextFactories);
|
||||
|
||||
tokens.push(generateDoctypeStartToken(state));
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (isWhitespace(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.whitespace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function doctypeStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: DOCTYPE_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
87
app/node_modules/hyntax/lib-es5/tokenize-contexts/open-tag-end.factory.js
generated
vendored
Normal file
87
app/node_modules/hyntax/lib-es5/tokenize-contexts/open-tag-end.factory.js
generated
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_END = _require2.TOKEN_OPEN_TAG_END,
|
||||
TOKEN_OPEN_TAG_END_SCRIPT = _require2.TOKEN_OPEN_TAG_END_SCRIPT,
|
||||
TOKEN_OPEN_TAG_END_STYLE = _require2.TOKEN_OPEN_TAG_END_STYLE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT,
|
||||
SCRIPT_CONTENT_CONTEXT = _require3.SCRIPT_CONTENT_CONTEXT,
|
||||
STYLE_CONTENT_CONTEXT = _require3.STYLE_CONTENT_CONTEXT;
|
||||
|
||||
function getTokenType(tagName) {
|
||||
switch (tagName) {
|
||||
case 'script':
|
||||
{
|
||||
return TOKEN_OPEN_TAG_END_SCRIPT;
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
return TOKEN_OPEN_TAG_END_STYLE;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
return TOKEN_OPEN_TAG_END;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getContentContext(tagName, contextFactories, options) {
|
||||
switch (tagName) {
|
||||
case 'script':
|
||||
{
|
||||
return contextFactories[SCRIPT_CONTENT_CONTEXT](contextFactories, options);
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
return contextFactories[STYLE_CONTENT_CONTEXT](contextFactories, options);
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
return contextFactories[DATA_CONTEXT](contextFactories, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories, options) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: getTokenType(options.tagName),
|
||||
content: '' + state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = getContentContext(options.tagName, contextFactories, options);
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function openTagEndContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: OPEN_TAG_END_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
188
app/node_modules/hyntax/lib-es5/tokenize-contexts/open-tag-start.factory.js
generated
vendored
Normal file
188
app/node_modules/hyntax/lib-es5/tokenize-contexts/open-tag-start.factory.js
generated
vendored
Normal file
@@ -0,0 +1,188 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
parseOpenTagName = _require.parseOpenTagName,
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_START = _require2.TOKEN_OPEN_TAG_START,
|
||||
TOKEN_OPEN_TAG_START_SCRIPT = _require2.TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
TOKEN_OPEN_TAG_START_STYLE = _require2.TOKEN_OPEN_TAG_START_STYLE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
OPEN_TAG_START_CONTEXT = _require3.OPEN_TAG_START_CONTEXT,
|
||||
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
function handleTagEndAfterScriptOpenTagStart(state, tokens, contextFactories) {
|
||||
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, { tagName: 'script' });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagEndContext;
|
||||
}
|
||||
|
||||
function handleTagEndAfterStyleOpenTagStart(state, tokens, contextFactories) {
|
||||
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, { tagName: 'style' });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START_STYLE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagEndContext;
|
||||
}
|
||||
|
||||
function handleTagEndAfterOpenTagStart(state, tokens, contextFactories) {
|
||||
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, { tagName: undefined });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = openTagEndContext;
|
||||
}
|
||||
|
||||
function handleWhitespaceAfterScriptOpenTagStart(state, tokens, contextFactories) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, { tagName: 'script' });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
|
||||
function handleWhitespaceAfterStyleOpenTagStart(state, tokens, contextFactories) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, { tagName: 'style' });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START_STYLE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
|
||||
function handleWhitespaceAfterOpenTagStart(state, tokens, contextFactories) {
|
||||
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, { tagName: undefined });
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_OPEN_TAG_START,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = attributesContext;
|
||||
}
|
||||
|
||||
var syntaxHandlers = {
|
||||
tagEnd: function tagEnd(state, tokens, contextFactories, options) {
|
||||
var tagName = parseOpenTagName(state.accumulatedContent);
|
||||
|
||||
switch (tagName) {
|
||||
case 'script':
|
||||
{
|
||||
handleTagEndAfterScriptOpenTagStart(state, tokens, contextFactories, options);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
handleTagEndAfterStyleOpenTagStart(state, tokens, contextFactories, options);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
handleTagEndAfterOpenTagStart(state, tokens, contextFactories, options);
|
||||
}
|
||||
}
|
||||
},
|
||||
whitespace: function whitespace(state, tokens, contextFactories, options) {
|
||||
var tagName = parseOpenTagName(state.accumulatedContent);
|
||||
|
||||
switch (tagName) {
|
||||
case 'script':
|
||||
{
|
||||
handleWhitespaceAfterScriptOpenTagStart(state, tokens, contextFactories, options);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'style':
|
||||
{
|
||||
handleWhitespaceAfterStyleOpenTagStart(state, tokens, contextFactories, options);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
{
|
||||
handleWhitespaceAfterOpenTagStart(state, tokens, contextFactories, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '>' || chars === '/') {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.tagEnd(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
|
||||
if (isWhitespace(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.whitespace(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function openTagStartContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: OPEN_TAG_START_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
61
app/node_modules/hyntax/lib-es5/tokenize-contexts/script-tag-content.factory.js
generated
vendored
Normal file
61
app/node_modules/hyntax/lib-es5/tokenize-contexts/script-tag-content.factory.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_SCRIPT_TAG_CONTENT = _require2.TOKEN_SCRIPT_TAG_CONTENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
SCRIPT_CONTENT_CONTEXT = _require3.SCRIPT_CONTENT_CONTEXT,
|
||||
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingScriptTag: function closingScriptTag(state, tokens, contextFactories) {
|
||||
var closeTagContext = contextFactories[CLOSE_TAG_CONTEXT](contextFactories, { withinContent: 'script' });
|
||||
|
||||
if (state.accumulatedContent !== '') {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_SCRIPT_TAG_CONTENT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = closeTagContext;
|
||||
}
|
||||
};
|
||||
|
||||
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
|
||||
var CLOSING_SCRIPT_TAG_PATTERN = /<\/script\s*>/i;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (CLOSING_SCRIPT_TAG_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingScriptTag(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function scriptTagContentContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: SCRIPT_CONTENT_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
61
app/node_modules/hyntax/lib-es5/tokenize-contexts/style-tag-content.factory.js
generated
vendored
Normal file
61
app/node_modules/hyntax/lib-es5/tokenize-contexts/style-tag-content.factory.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_STYLE_TAG_CONTENT = _require2.TOKEN_STYLE_TAG_CONTENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
STYLE_CONTENT_CONTEXT = _require3.STYLE_CONTENT_CONTEXT,
|
||||
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT;
|
||||
|
||||
var syntaxHandlers = {
|
||||
closingStyleTag: function closingStyleTag(state, tokens, contextFactories) {
|
||||
var closeTagContext = contextFactories[CLOSE_TAG_CONTEXT](contextFactories, { withinContent: 'style' });
|
||||
|
||||
if (state.accumulatedContent !== '') {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_STYLE_TAG_CONTENT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
}
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.caretPosition -= state.decisionBuffer.length;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = closeTagContext;
|
||||
}
|
||||
};
|
||||
|
||||
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
|
||||
var CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i;
|
||||
|
||||
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
|
||||
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
|
||||
/**
|
||||
* Signals to wait for more characters in
|
||||
* the decision buffer to decide about syntax
|
||||
*/
|
||||
return function () {};
|
||||
}
|
||||
|
||||
if (CLOSING_STYLE_TAG_PATTERN.test(chars)) {
|
||||
return function (state, tokens) {
|
||||
return syntaxHandlers.closingStyleTag(state, tokens, contextFactories, options);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function styleTagContentContextFactory(contextFactories, options) {
|
||||
return {
|
||||
factoryName: STYLE_CONTENT_CONTEXT,
|
||||
parseSyntax: function parseSyntax(chars) {
|
||||
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
|
||||
}
|
||||
};
|
||||
};
|
123
app/node_modules/hyntax/lib-es5/tokenize.js
generated
vendored
Normal file
123
app/node_modules/hyntax/lib-es5/tokenize.js
generated
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
"use strict";
|
||||
|
||||
var _contextHandlersMap;
|
||||
|
||||
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
||||
|
||||
var dataContext = require('./tokenizer-context-handlers/data');
|
||||
|
||||
var openTagStartContext = require('./tokenizer-context-handlers/open-tag-start');
|
||||
|
||||
var closeTagContext = require('./tokenizer-context-handlers/close-tag');
|
||||
|
||||
var openTagEndContext = require('./tokenizer-context-handlers/open-tag-end');
|
||||
|
||||
var attributesContext = require('./tokenizer-context-handlers/attributes');
|
||||
|
||||
var attributeKeyContext = require('./tokenizer-context-handlers/attribute-key');
|
||||
|
||||
var attributeValueContext = require('./tokenizer-context-handlers/attribute-value');
|
||||
|
||||
var attributeValueBareContext = require('./tokenizer-context-handlers/attribute-value-bare');
|
||||
|
||||
var attributeValueWrappedContext = require('./tokenizer-context-handlers/attribute-value-wrapped');
|
||||
|
||||
var scriptContentContext = require('./tokenizer-context-handlers/script-tag-content');
|
||||
|
||||
var styleContentContext = require('./tokenizer-context-handlers/style-tag-content');
|
||||
|
||||
var doctypeStartContext = require('./tokenizer-context-handlers/doctype-start');
|
||||
|
||||
var doctypeEndContextFactory = require('./tokenizer-context-handlers/doctype-end');
|
||||
|
||||
var doctypeAttributesContext = require('./tokenizer-context-handlers/doctype-attributes');
|
||||
|
||||
var doctypeAttributeWrappedContext = require('./tokenizer-context-handlers/doctype-attribute-wrapped');
|
||||
|
||||
var doctypeAttributeBareEndContext = require('./tokenizer-context-handlers/doctype-attribute-bare');
|
||||
|
||||
var commentContentContext = require('./tokenizer-context-handlers/comment-content');
|
||||
|
||||
var _require = require('./constants/tokenizer-contexts'),
|
||||
DATA_CONTEXT = _require.DATA_CONTEXT,
|
||||
OPEN_TAG_START_CONTEXT = _require.OPEN_TAG_START_CONTEXT,
|
||||
CLOSE_TAG_CONTEXT = _require.CLOSE_TAG_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
|
||||
OPEN_TAG_END_CONTEXT = _require.OPEN_TAG_END_CONTEXT,
|
||||
ATTRIBUTE_KEY_CONTEXT = _require.ATTRIBUTE_KEY_CONTEXT,
|
||||
ATTRIBUTE_VALUE_CONTEXT = _require.ATTRIBUTE_VALUE_CONTEXT,
|
||||
ATTRIBUTE_VALUE_BARE_CONTEXT = _require.ATTRIBUTE_VALUE_BARE_CONTEXT,
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
|
||||
SCRIPT_CONTENT_CONTEXT = _require.SCRIPT_CONTENT_CONTEXT,
|
||||
STYLE_CONTENT_CONTEXT = _require.STYLE_CONTENT_CONTEXT,
|
||||
DOCTYPE_START_CONTEXT = _require.DOCTYPE_START_CONTEXT,
|
||||
DOCTYPE_END_CONTEXT = _require.DOCTYPE_END_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require.DOCTYPE_ATTRIBUTES_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
|
||||
COMMENT_CONTENT_CONTEXT = _require.COMMENT_CONTENT_CONTEXT;
|
||||
|
||||
var contextHandlersMap = (_contextHandlersMap = {}, _defineProperty(_contextHandlersMap, DATA_CONTEXT, dataContext), _defineProperty(_contextHandlersMap, OPEN_TAG_START_CONTEXT, openTagStartContext), _defineProperty(_contextHandlersMap, CLOSE_TAG_CONTEXT, closeTagContext), _defineProperty(_contextHandlersMap, ATTRIBUTES_CONTEXT, attributesContext), _defineProperty(_contextHandlersMap, OPEN_TAG_END_CONTEXT, openTagEndContext), _defineProperty(_contextHandlersMap, ATTRIBUTE_KEY_CONTEXT, attributeKeyContext), _defineProperty(_contextHandlersMap, ATTRIBUTE_VALUE_CONTEXT, attributeValueContext), _defineProperty(_contextHandlersMap, ATTRIBUTE_VALUE_BARE_CONTEXT, attributeValueBareContext), _defineProperty(_contextHandlersMap, ATTRIBUTE_VALUE_WRAPPED_CONTEXT, attributeValueWrappedContext), _defineProperty(_contextHandlersMap, SCRIPT_CONTENT_CONTEXT, scriptContentContext), _defineProperty(_contextHandlersMap, STYLE_CONTENT_CONTEXT, styleContentContext), _defineProperty(_contextHandlersMap, DOCTYPE_START_CONTEXT, doctypeStartContext), _defineProperty(_contextHandlersMap, DOCTYPE_END_CONTEXT, doctypeEndContextFactory), _defineProperty(_contextHandlersMap, DOCTYPE_ATTRIBUTES_CONTEXT, doctypeAttributesContext), _defineProperty(_contextHandlersMap, DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT, doctypeAttributeWrappedContext), _defineProperty(_contextHandlersMap, DOCTYPE_ATTRIBUTE_BARE_CONTEXT, doctypeAttributeBareEndContext), _defineProperty(_contextHandlersMap, COMMENT_CONTENT_CONTEXT, commentContentContext), _contextHandlersMap);
|
||||
|
||||
function tokenizeChars(chars, state, tokens, _ref) {
|
||||
var isFinalChunk = _ref.isFinalChunk,
|
||||
positionOffset = _ref.positionOffset;
|
||||
var charIndex = state.caretPosition - positionOffset;
|
||||
|
||||
while (charIndex < chars.length) {
|
||||
var context = contextHandlersMap[state.currentContext];
|
||||
state.decisionBuffer += chars[charIndex];
|
||||
context.parseSyntax(state.decisionBuffer, state, tokens);
|
||||
charIndex = state.caretPosition - positionOffset;
|
||||
}
|
||||
|
||||
if (isFinalChunk) {
|
||||
var _context = contextHandlersMap[state.currentContext]; // Move the caret back, as at this point
|
||||
// it in the position outside of chars array,
|
||||
// and it should not be taken into account
|
||||
// when calculating characters range
|
||||
|
||||
state.caretPosition--;
|
||||
|
||||
if (_context.handleContentEnd !== undefined) {
|
||||
_context.handleContentEnd(state, tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function tokenize() {
|
||||
var content = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : '';
|
||||
var existingState = arguments.length > 1 ? arguments[1] : undefined;
|
||||
|
||||
var _ref2 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {},
|
||||
isFinalChunk = _ref2.isFinalChunk;
|
||||
|
||||
isFinalChunk = isFinalChunk === undefined ? true : isFinalChunk;
|
||||
var state;
|
||||
|
||||
if (existingState !== undefined) {
|
||||
state = Object.assign({}, existingState);
|
||||
} else {
|
||||
state = {
|
||||
currentContext: DATA_CONTEXT,
|
||||
contextParams: {},
|
||||
decisionBuffer: '',
|
||||
accumulatedContent: '',
|
||||
caretPosition: 0
|
||||
};
|
||||
}
|
||||
|
||||
var chars = state.decisionBuffer + content;
|
||||
var tokens = [];
|
||||
var positionOffset = state.caretPosition - state.decisionBuffer.length;
|
||||
tokenizeChars(chars, state, tokens, {
|
||||
isFinalChunk: isFinalChunk,
|
||||
positionOffset: positionOffset
|
||||
});
|
||||
return {
|
||||
state: state,
|
||||
tokens: tokens
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = tokenize;
|
35
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-assignment.js
generated
vendored
Normal file
35
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-assignment.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require2.TOKEN_ATTRIBUTE_ASSIGNMENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_CONTEXT = _require3.ATTRIBUTE_VALUE_CONTEXT;
|
||||
|
||||
function equal(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTE_VALUE_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '=') {
|
||||
return equal(state, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
43
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-key.js
generated
vendored
Normal file
43
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-key.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_KEY = _require2.TOKEN_ATTRIBUTE_KEY;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
function keyEnd(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_KEY,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTES_CONTEXT;
|
||||
}
|
||||
|
||||
function isKeyBreak(chars) {
|
||||
return chars === '=' || chars === ' ' || chars === '\n' || chars === '\t' || chars === '/' || chars === '>';
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (isKeyBreak(chars)) {
|
||||
return keyEnd(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value-bare.js
generated
vendored
Normal file
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value-bare.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange,
|
||||
isWhitespace = _require.isWhitespace;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
function valueEnd(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTES_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (isWhitespace(chars) || chars === '>' || chars === '/') {
|
||||
return valueEnd(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value-wrapped-end.js
generated
vendored
Normal file
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value-wrapped-end.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
function wrapper(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTES_CONTEXT;
|
||||
|
||||
delete state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT];
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
var wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper;
|
||||
|
||||
if (chars === wrapperChar) {
|
||||
return wrapper(state, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
37
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value-wrapped-start.js
generated
vendored
Normal file
37
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value-wrapped-start.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
|
||||
|
||||
function wrapper(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
var wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper;
|
||||
|
||||
if (chars === wrapperChar) {
|
||||
return wrapper(state, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
51
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value-wrapped.js
generated
vendored
Normal file
51
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value-wrapped.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE,
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT,
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
|
||||
|
||||
function wrapper(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
var endWrapperPosition = range.endPosition + 1;
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
}, {
|
||||
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: endWrapperPosition,
|
||||
endPosition: endWrapperPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTES_CONTEXT;
|
||||
state.caretPosition++;
|
||||
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = undefined;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
var wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper;
|
||||
|
||||
if (chars === wrapperChar) {
|
||||
return wrapper(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
63
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value.js
generated
vendored
Normal file
63
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attribute-value.js
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace;
|
||||
|
||||
var _require2 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require2.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT,
|
||||
ATTRIBUTE_VALUE_BARE_CONTEXT = _require2.ATTRIBUTE_VALUE_BARE_CONTEXT;
|
||||
|
||||
var _require3 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require3.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START;
|
||||
|
||||
function wrapper(state, tokens) {
|
||||
var wrapper = state.decisionBuffer;
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
|
||||
content: wrapper,
|
||||
startPosition: state.caretPosition,
|
||||
endPosition: state.caretPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
|
||||
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = {
|
||||
wrapper: wrapper
|
||||
};
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function bare(state) {
|
||||
state.accumulatedContent = state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTE_VALUE_BARE_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function tagEnd(state) {
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTES_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '"' || chars === '\'') {
|
||||
return wrapper(state, tokens);
|
||||
}
|
||||
|
||||
if (chars === '>' || chars === '/') {
|
||||
return tagEnd(state, tokens);
|
||||
}
|
||||
|
||||
if (!isWhitespace(chars)) {
|
||||
return bare(state, tokens);
|
||||
}
|
||||
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
69
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attributes.js
generated
vendored
Normal file
69
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/attributes.js
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/tokenizer-contexts'),
|
||||
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT,
|
||||
OPEN_TAG_END_CONTEXT = _require2.OPEN_TAG_END_CONTEXT,
|
||||
ATTRIBUTE_VALUE_CONTEXT = _require2.ATTRIBUTE_VALUE_CONTEXT,
|
||||
ATTRIBUTE_KEY_CONTEXT = _require2.ATTRIBUTE_KEY_CONTEXT;
|
||||
|
||||
var _require3 = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require3.TOKEN_ATTRIBUTE_ASSIGNMENT;
|
||||
|
||||
function tagEnd(state) {
|
||||
var tagName = state.contextParams[ATTRIBUTES_CONTEXT].tagName;
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = OPEN_TAG_END_CONTEXT;
|
||||
state.contextParams[OPEN_TAG_END_CONTEXT] = {
|
||||
tagName: tagName
|
||||
};
|
||||
state.contextParams[ATTRIBUTES_CONTEXT] = undefined;
|
||||
}
|
||||
|
||||
function noneWhitespace(state) {
|
||||
state.accumulatedContent = state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTE_KEY_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function equal(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: true
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTE_VALUE_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '>' || chars === '/') {
|
||||
return tagEnd(state, tokens);
|
||||
}
|
||||
|
||||
if (chars === '=') {
|
||||
return equal(state, tokens);
|
||||
}
|
||||
|
||||
if (!isWhitespace(chars)) {
|
||||
return noneWhitespace(state, tokens);
|
||||
}
|
||||
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/close-tag.js
generated
vendored
Normal file
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/close-tag.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_CLOSE_TAG = _require2.TOKEN_CLOSE_TAG;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
function closingCornerBrace(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: true
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_CLOSE_TAG,
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DATA_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '>') {
|
||||
return closingCornerBrace(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
58
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/comment-content.js
generated
vendored
Normal file
58
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/comment-content.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_END = _require2.TOKEN_COMMENT_END,
|
||||
TOKEN_COMMENT_CONTENT = _require2.TOKEN_COMMENT_CONTENT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
var COMMENT_END = '-->';
|
||||
|
||||
function commentEnd(state, tokens) {
|
||||
var contentRange = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
var commentEndRange = {
|
||||
startPosition: contentRange.endPosition + 1,
|
||||
endPosition: contentRange.endPosition + COMMENT_END.length
|
||||
};
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_CONTENT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: contentRange.startPosition,
|
||||
endPosition: contentRange.endPosition
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: commentEndRange.startPosition,
|
||||
endPosition: commentEndRange.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DATA_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '-' || chars === '--') {
|
||||
state.caretPosition++;
|
||||
return;
|
||||
}
|
||||
|
||||
if (chars === COMMENT_END) {
|
||||
return commentEnd(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
38
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/comment-end.js
generated
vendored
Normal file
38
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/comment-end.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_END = _require2.TOKEN_COMMENT_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
function commentEnd(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_END,
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DATA_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '>') {
|
||||
return commentEnd(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
35
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/comment-start.js
generated
vendored
Normal file
35
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/comment-start.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_START = _require2.TOKEN_COMMENT_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT;
|
||||
|
||||
function commentStart(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_START,
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = COMMENT_CONTENT_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '<!--') {
|
||||
return commentStart(state, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
142
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/data.js
generated
vendored
Normal file
142
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/data.js
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_TEXT = _require2.TOKEN_TEXT,
|
||||
TOKEN_COMMENT_START = _require2.TOKEN_COMMENT_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
OPEN_TAG_START_CONTEXT = _require3.OPEN_TAG_START_CONTEXT,
|
||||
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT,
|
||||
DOCTYPE_START_CONTEXT = _require3.DOCTYPE_START_CONTEXT,
|
||||
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT;
|
||||
|
||||
var COMMENT_START = '<!--';
|
||||
|
||||
function generateTextToken(state) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
return {
|
||||
type: TOKEN_TEXT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
};
|
||||
}
|
||||
|
||||
function openingCornerBraceWithText(state, tokens) {
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = OPEN_TAG_START_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function openingCornerBraceWithSlash(state, tokens) {
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = CLOSE_TAG_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function doctypeStart(state, tokens) {
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
state.accumulatedContent = state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_START_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function commentStart(state, tokens) {
|
||||
if (state.accumulatedContent.length !== 0) {
|
||||
tokens.push(generateTextToken(state));
|
||||
}
|
||||
|
||||
var commentStartRange = {
|
||||
startPosition: state.caretPosition - (COMMENT_START.length - 1),
|
||||
endPosition: state.caretPosition
|
||||
};
|
||||
tokens.push({
|
||||
type: TOKEN_COMMENT_START,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: commentStartRange.startPosition,
|
||||
endPosition: commentStartRange.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = COMMENT_CONTENT_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function handleContentEnd(state, tokens) {
|
||||
var textContent = state.accumulatedContent + state.decisionBuffer;
|
||||
|
||||
if (textContent.length !== 0) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_TEXT,
|
||||
content: textContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function isIncompleteDoctype(chars) {
|
||||
var charsUpperCase = chars.toUpperCase();
|
||||
return charsUpperCase === '<!' || charsUpperCase === '<!D' || charsUpperCase === '<!DO' || charsUpperCase === '<!DOC' || charsUpperCase === '<!DOCT' || charsUpperCase === '<!DOCTY' || charsUpperCase === '<!DOCTYP';
|
||||
}
|
||||
|
||||
var OPEN_TAG_START_PATTERN = /^<\w/;
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (OPEN_TAG_START_PATTERN.test(chars)) {
|
||||
return openingCornerBraceWithText(state, tokens);
|
||||
}
|
||||
|
||||
if (chars === '</') {
|
||||
return openingCornerBraceWithSlash(state, tokens);
|
||||
}
|
||||
|
||||
if (chars === '<' || chars === '<!' || chars === '<!-') {
|
||||
state.caretPosition++;
|
||||
return;
|
||||
}
|
||||
|
||||
if (chars === COMMENT_START) {
|
||||
return commentStart(state, tokens);
|
||||
}
|
||||
|
||||
if (isIncompleteDoctype(chars)) {
|
||||
state.caretPosition++;
|
||||
return;
|
||||
}
|
||||
|
||||
if (chars.toUpperCase() === '<!DOCTYPE') {
|
||||
return doctypeStart(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax,
|
||||
handleContentEnd: handleContentEnd
|
||||
};
|
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attribute-bare.js
generated
vendored
Normal file
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attribute-bare.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
function attributeEnd(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (isWhitespace(chars) || chars === '>') {
|
||||
return attributeEnd(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attribute-wrapped-end.js
generated
vendored
Normal file
40
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attribute-wrapped-end.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
function wrapper(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
delete state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT];
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
var wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper;
|
||||
|
||||
if (chars === wrapperChar) {
|
||||
return wrapper(state, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
37
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attribute-wrapped-start.js
generated
vendored
Normal file
37
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attribute-wrapped-start.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
'use strict';
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
|
||||
|
||||
function wrapper(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
var wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper;
|
||||
|
||||
if (chars === wrapperChar) {
|
||||
return wrapper(state, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
52
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attribute-wrapped.js
generated
vendored
Normal file
52
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attribute-wrapped.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
|
||||
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
function wrapper(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
var endWrapperPosition = range.endPosition + 1;
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: endWrapperPosition,
|
||||
endPosition: endWrapperPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
state.caretPosition++;
|
||||
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = undefined;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
var wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper;
|
||||
|
||||
if (chars === wrapperChar) {
|
||||
return wrapper(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
63
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attributes.js
generated
vendored
Normal file
63
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-attributes.js
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace;
|
||||
|
||||
var _require2 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
|
||||
DOCTYPE_END_CONTEXT = _require2.DOCTYPE_END_CONTEXT;
|
||||
|
||||
var _require3 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require3.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
|
||||
|
||||
function wrapper(state, tokens) {
|
||||
var wrapper = state.decisionBuffer;
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
|
||||
content: wrapper,
|
||||
startPosition: state.caretPosition,
|
||||
endPosition: state.caretPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
|
||||
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = {
|
||||
wrapper: wrapper
|
||||
};
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function bare(state) {
|
||||
state.accumulatedContent = state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_ATTRIBUTE_BARE_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function closingCornerBrace(state) {
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_END_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '"' || chars === '\'') {
|
||||
return wrapper(state, tokens);
|
||||
}
|
||||
|
||||
if (chars === '>') {
|
||||
return closingCornerBrace(state, tokens);
|
||||
}
|
||||
|
||||
if (!isWhitespace(chars)) {
|
||||
return bare(state, tokens);
|
||||
}
|
||||
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
34
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-end.js
generated
vendored
Normal file
34
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-end.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_END = _require2.TOKEN_DOCTYPE_END;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
function closingCornerBrace(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: true
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_DOCTYPE_END,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DATA_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
return closingCornerBrace(state, tokens);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
55
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-start.js
generated
vendored
Normal file
55
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/doctype-start.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_START = _require2.TOKEN_DOCTYPE_START;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DOCTYPE_END_CONTEXT = _require3.DOCTYPE_END_CONTEXT,
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
function generateDoctypeStartToken(state) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
return {
|
||||
type: TOKEN_DOCTYPE_START,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
};
|
||||
}
|
||||
|
||||
function closingCornerBrace(state, tokens) {
|
||||
tokens.push(generateDoctypeStartToken(state));
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_END_CONTEXT;
|
||||
}
|
||||
|
||||
function whitespace(state, tokens) {
|
||||
tokens.push(generateDoctypeStartToken(state));
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (isWhitespace(chars)) {
|
||||
return whitespace(state, tokens);
|
||||
}
|
||||
|
||||
if (chars === '>') {
|
||||
return closingCornerBrace(state, tokens);
|
||||
}
|
||||
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
58
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/open-tag-end.js
generated
vendored
Normal file
58
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/open-tag-end.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_END = _require2.TOKEN_OPEN_TAG_END,
|
||||
TOKEN_OPEN_TAG_END_SCRIPT = _require2.TOKEN_OPEN_TAG_END_SCRIPT,
|
||||
TOKEN_OPEN_TAG_END_STYLE = _require2.TOKEN_OPEN_TAG_END_STYLE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT,
|
||||
SCRIPT_CONTENT_CONTEXT = _require3.SCRIPT_CONTENT_CONTEXT,
|
||||
STYLE_CONTENT_CONTEXT = _require3.STYLE_CONTENT_CONTEXT;
|
||||
|
||||
var tokensMap = {
|
||||
'script': TOKEN_OPEN_TAG_END_SCRIPT,
|
||||
'style': TOKEN_OPEN_TAG_END_STYLE,
|
||||
'default': TOKEN_OPEN_TAG_END
|
||||
};
|
||||
var contextsMap = {
|
||||
'script': SCRIPT_CONTENT_CONTEXT,
|
||||
'style': STYLE_CONTENT_CONTEXT,
|
||||
'default': DATA_CONTEXT
|
||||
};
|
||||
|
||||
function closingCornerBrace(state, tokens) {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: true
|
||||
});
|
||||
var tagName = state.contextParams[OPEN_TAG_END_CONTEXT].tagName;
|
||||
tokens.push({
|
||||
type: tokensMap[tagName] || tokensMap["default"],
|
||||
content: state.accumulatedContent + state.decisionBuffer,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = contextsMap[tagName] || contextsMap["default"];
|
||||
state.caretPosition++;
|
||||
state.contextParams[OPEN_TAG_END_CONTEXT] = undefined;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '>') {
|
||||
return closingCornerBrace(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
78
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/open-tag-start.js
generated
vendored
Normal file
78
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/open-tag-start.js
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
parseOpenTagName = _require.parseOpenTagName,
|
||||
isWhitespace = _require.isWhitespace,
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_START = _require2.TOKEN_OPEN_TAG_START,
|
||||
TOKEN_OPEN_TAG_START_SCRIPT = _require2.TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
TOKEN_OPEN_TAG_START_STYLE = _require2.TOKEN_OPEN_TAG_START_STYLE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
|
||||
|
||||
var tokensMap = {
|
||||
'script': TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
'style': TOKEN_OPEN_TAG_START_STYLE,
|
||||
'default': TOKEN_OPEN_TAG_START
|
||||
};
|
||||
|
||||
function tagEnd(state, tokens) {
|
||||
var tagName = parseOpenTagName(state.accumulatedContent);
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
tokens.push({
|
||||
type: tokensMap[tagName] || tokensMap["default"],
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
state.decisionBuffer = '';
|
||||
state.accumulatedContent = '';
|
||||
state.currentContext = OPEN_TAG_END_CONTEXT;
|
||||
state.contextParams[OPEN_TAG_END_CONTEXT] = {
|
||||
tagName: tagName
|
||||
};
|
||||
}
|
||||
|
||||
function whitespace(state, tokens) {
|
||||
var tagName = parseOpenTagName(state.accumulatedContent);
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
tokens.push({
|
||||
type: tokensMap[tagName] || tokensMap["default"],
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = ATTRIBUTES_CONTEXT;
|
||||
state.contextParams[ATTRIBUTES_CONTEXT] = {
|
||||
tagName: tagName
|
||||
};
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '>' || chars === '/') {
|
||||
return tagEnd(state, tokens);
|
||||
}
|
||||
|
||||
if (isWhitespace(chars)) {
|
||||
return whitespace(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
58
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/script-tag-content.js
generated
vendored
Normal file
58
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/script-tag-content.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_SCRIPT_TAG_CONTENT = _require2.TOKEN_SCRIPT_TAG_CONTENT,
|
||||
TOKEN_CLOSE_TAG_SCRIPT = _require2.TOKEN_CLOSE_TAG_SCRIPT;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
function closingScriptTag(state, tokens) {
|
||||
if (state.accumulatedContent !== '') {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_SCRIPT_TAG_CONTENT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
}
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_CLOSE_TAG_SCRIPT,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
|
||||
endPosition: state.caretPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DATA_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
|
||||
var CLOSING_SCRIPT_TAG_PATTERN = /<\/script\s*>/i;
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
|
||||
state.caretPosition++;
|
||||
return;
|
||||
}
|
||||
|
||||
if (CLOSING_SCRIPT_TAG_PATTERN.test(chars)) {
|
||||
return closingScriptTag(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
58
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/style-tag-content.js
generated
vendored
Normal file
58
app/node_modules/hyntax/lib-es5/tokenizer-context-handlers/style-tag-content.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../helpers'),
|
||||
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_STYLE_TAG_CONTENT = _require2.TOKEN_STYLE_TAG_CONTENT,
|
||||
TOKEN_CLOSE_TAG_STYLE = _require2.TOKEN_CLOSE_TAG_STYLE;
|
||||
|
||||
var _require3 = require('../constants/tokenizer-contexts'),
|
||||
DATA_CONTEXT = _require3.DATA_CONTEXT;
|
||||
|
||||
function closingStyleTag(state, tokens) {
|
||||
if (state.accumulatedContent !== '') {
|
||||
var range = calculateTokenCharactersRange(state, {
|
||||
keepBuffer: false
|
||||
});
|
||||
tokens.push({
|
||||
type: TOKEN_STYLE_TAG_CONTENT,
|
||||
content: state.accumulatedContent,
|
||||
startPosition: range.startPosition,
|
||||
endPosition: range.endPosition
|
||||
});
|
||||
}
|
||||
|
||||
tokens.push({
|
||||
type: TOKEN_CLOSE_TAG_STYLE,
|
||||
content: state.decisionBuffer,
|
||||
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
|
||||
endPosition: state.caretPosition
|
||||
});
|
||||
state.accumulatedContent = '';
|
||||
state.decisionBuffer = '';
|
||||
state.currentContext = DATA_CONTEXT;
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
|
||||
var CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i;
|
||||
|
||||
function parseSyntax(chars, state, tokens) {
|
||||
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
|
||||
state.caretPosition++;
|
||||
return;
|
||||
}
|
||||
|
||||
if (CLOSING_STYLE_TAG_PATTERN.test(chars)) {
|
||||
return closingStyleTag(state, tokens);
|
||||
}
|
||||
|
||||
state.accumulatedContent += state.decisionBuffer;
|
||||
state.decisionBuffer = '';
|
||||
state.caretPosition++;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseSyntax: parseSyntax
|
||||
};
|
65
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/attribute-value.js
generated
vendored
Normal file
65
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/attribute-value.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_END = _require.TOKEN_OPEN_TAG_END,
|
||||
TOKEN_OPEN_TAG_END_SCRIPT = _require.TOKEN_OPEN_TAG_END_SCRIPT,
|
||||
TOKEN_OPEN_TAG_END_STYLE = _require.TOKEN_OPEN_TAG_END_STYLE,
|
||||
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT,
|
||||
TOKEN_ATTRIBUTE_VALUE = _require.TOKEN_ATTRIBUTE_VALUE,
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
|
||||
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
|
||||
|
||||
function getLastAttribute(state) {
|
||||
var attributes = state.currentNode.content.attributes;
|
||||
return attributes[attributes.length - 1];
|
||||
}
|
||||
|
||||
function handleValueEnd(state) {
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeValue(state, token) {
|
||||
var attribute = getLastAttribute(state);
|
||||
attribute.value = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeValueWrapperStart(state, token) {
|
||||
var attribute = getLastAttribute(state);
|
||||
attribute.startWrapper = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeValueWrapperEnd(state, token) {
|
||||
var attribute = getLastAttribute(state);
|
||||
attribute.endWrapper = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function attributeValue(token, state) {
|
||||
var VALUE_END_TOKENS = [TOKEN_OPEN_TAG_END, TOKEN_OPEN_TAG_END_SCRIPT, TOKEN_OPEN_TAG_END_STYLE, TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
|
||||
|
||||
if (VALUE_END_TOKENS.indexOf(token.type) !== -1) {
|
||||
return handleValueEnd(state);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_ATTRIBUTE_VALUE) {
|
||||
return handleAttributeValue(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_ATTRIBUTE_VALUE_WRAPPER_START) {
|
||||
return handleAttributeValueWrapperStart(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_ATTRIBUTE_VALUE_WRAPPER_END) {
|
||||
return handleAttributeValueWrapperEnd(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
69
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/attribute.js
generated
vendored
Normal file
69
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/attribute.js
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_END = _require.TOKEN_OPEN_TAG_END,
|
||||
TOKEN_OPEN_TAG_END_SCRIPT = _require.TOKEN_OPEN_TAG_END_SCRIPT,
|
||||
TOKEN_OPEN_TAG_END_STYLE = _require.TOKEN_OPEN_TAG_END_STYLE,
|
||||
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT;
|
||||
|
||||
var _require2 = require('../constants/tree-constructor-contexts'),
|
||||
ATTRIBUTE_VALUE_CONTEXT = _require2.ATTRIBUTE_VALUE_CONTEXT;
|
||||
|
||||
function getLastAttribute(state) {
|
||||
var attributes = state.currentNode.content.attributes;
|
||||
return attributes[attributes.length - 1];
|
||||
}
|
||||
|
||||
function handleOpenTagEnd(state) {
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeKey(state, token) {
|
||||
var attribute = getLastAttribute(state);
|
||||
|
||||
if (attribute.key !== undefined || attribute.value !== undefined) {
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
attribute.key = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeAssignment(state) {
|
||||
var attribute = getLastAttribute(state);
|
||||
|
||||
if (attribute.value !== undefined) {
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: ATTRIBUTE_VALUE_CONTEXT
|
||||
};
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function attribute(token, state) {
|
||||
var OPEN_TAG_END_TOKENS = [TOKEN_OPEN_TAG_END, TOKEN_OPEN_TAG_END_SCRIPT, TOKEN_OPEN_TAG_END_STYLE];
|
||||
|
||||
if (OPEN_TAG_END_TOKENS.indexOf(token.type) !== -1) {
|
||||
return handleOpenTagEnd(state);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_ATTRIBUTE_KEY) {
|
||||
return handleAttributeKey(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_ATTRIBUTE_ASSIGNMENT) {
|
||||
return handleAttributeAssignment(state);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
47
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/attributes.js
generated
vendored
Normal file
47
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/attributes.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT,
|
||||
TOKEN_OPEN_TAG_END = _require.TOKEN_OPEN_TAG_END,
|
||||
TOKEN_OPEN_TAG_END_SCRIPT = _require.TOKEN_OPEN_TAG_END_SCRIPT,
|
||||
TOKEN_OPEN_TAG_END_STYLE = _require.TOKEN_OPEN_TAG_END_STYLE;
|
||||
|
||||
var _require2 = require('../constants/tree-constructor-contexts'),
|
||||
ATTRIBUTE_CONTEXT = _require2.ATTRIBUTE_CONTEXT;
|
||||
|
||||
function handlerAttributeStart(state) {
|
||||
if (state.currentNode.content.attributes === undefined) {
|
||||
state.currentNode.content.attributes = [];
|
||||
} // new empty attribute
|
||||
|
||||
|
||||
state.currentNode.content.attributes.push({});
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: ATTRIBUTE_CONTEXT
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleOpenTagEnd(state) {
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function attributes(token, state) {
|
||||
var ATTRIBUTE_START_TOKENS = [TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
|
||||
|
||||
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
|
||||
return handlerAttributeStart(state);
|
||||
}
|
||||
|
||||
var ATTRIBUTES_END_TOKENS = [TOKEN_OPEN_TAG_END, TOKEN_OPEN_TAG_END_SCRIPT, TOKEN_OPEN_TAG_END_STYLE];
|
||||
|
||||
if (ATTRIBUTES_END_TOKENS.indexOf(token.type) !== -1) {
|
||||
return handleOpenTagEnd(state);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
43
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/comment.js
generated
vendored
Normal file
43
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/comment.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_COMMENT_START = _require.TOKEN_COMMENT_START,
|
||||
TOKEN_COMMENT_END = _require.TOKEN_COMMENT_END,
|
||||
TOKEN_COMMENT_CONTENT = _require.TOKEN_COMMENT_CONTENT;
|
||||
|
||||
function handleCommentStart(state, token) {
|
||||
state.currentNode.content.start = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleCommentContent(state, token) {
|
||||
state.currentNode.content.value = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleCommentEnd(state, token) {
|
||||
state.currentNode.content.end = token;
|
||||
state.currentNode = state.currentNode.parentRef;
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function comment(token, state) {
|
||||
if (token.type === TOKEN_COMMENT_START) {
|
||||
return handleCommentStart(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_COMMENT_CONTENT) {
|
||||
return handleCommentContent(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_COMMENT_END) {
|
||||
return handleCommentEnd(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
72
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/doctype-attribute.js
generated
vendored
Normal file
72
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/doctype-attribute.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_END = _require.TOKEN_DOCTYPE_END,
|
||||
TOKEN_DOCTYPE_ATTRIBUTE = _require.TOKEN_DOCTYPE_ATTRIBUTE,
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END;
|
||||
|
||||
function getLastAttribute(state) {
|
||||
var attributes = state.currentNode.content.attributes;
|
||||
return attributes[attributes.length - 1];
|
||||
}
|
||||
|
||||
function handleDoctypeEnd(state) {
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeValue(state, token) {
|
||||
var attribute = getLastAttribute(state);
|
||||
|
||||
if (attribute.value !== undefined) {
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
attribute.value = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeWrapperStart(state, token) {
|
||||
var attribute = getLastAttribute(state);
|
||||
|
||||
if (attribute.start !== undefined || attribute.value !== undefined) {
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
attribute.startWrapper = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeWrapperEnd(state, token) {
|
||||
var attribute = getLastAttribute(state);
|
||||
attribute.endWrapper = token;
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttribute(token, state) {
|
||||
if (token.type === TOKEN_DOCTYPE_END) {
|
||||
return handleDoctypeEnd(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START) {
|
||||
return handleAttributeWrapperStart(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END) {
|
||||
return handleAttributeWrapperEnd(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_DOCTYPE_ATTRIBUTE) {
|
||||
return handleAttributeValue(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
43
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/doctype-attributes.js
generated
vendored
Normal file
43
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/doctype-attributes.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/tree-constructor-contexts'),
|
||||
DOCTYPE_ATTRIBUTE_CONTEXT = _require.DOCTYPE_ATTRIBUTE_CONTEXT;
|
||||
|
||||
var _require2 = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_END = _require2.TOKEN_DOCTYPE_END,
|
||||
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE,
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
|
||||
|
||||
function handleDoctypeEnd(state) {
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttribute(state) {
|
||||
if (state.currentNode.content.attributes === undefined) {
|
||||
state.currentNode.content.attributes = [];
|
||||
} // new empty attribute
|
||||
|
||||
|
||||
state.currentNode.content.attributes.push({});
|
||||
state.currentContext = {
|
||||
type: DOCTYPE_ATTRIBUTE_CONTEXT,
|
||||
parentRef: state.currentContext
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function doctypeAttributes(token, state) {
|
||||
if (token.type === TOKEN_DOCTYPE_END) {
|
||||
return handleDoctypeEnd(state, token);
|
||||
}
|
||||
|
||||
var ATTRIBUTE_START_TOKENS = [TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START, TOKEN_DOCTYPE_ATTRIBUTE];
|
||||
|
||||
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
|
||||
return handleAttribute(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
51
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/doctype.js
generated
vendored
Normal file
51
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/doctype.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_DOCTYPE_END = _require.TOKEN_DOCTYPE_END,
|
||||
TOKEN_DOCTYPE_ATTRIBUTE = _require.TOKEN_DOCTYPE_ATTRIBUTE,
|
||||
TOKEN_DOCTYPE_START = _require.TOKEN_DOCTYPE_START,
|
||||
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
|
||||
|
||||
var _require2 = require('../constants/tree-constructor-contexts'),
|
||||
DOCTYPE_ATTRIBUTES_CONTEXT = _require2.DOCTYPE_ATTRIBUTES_CONTEXT;
|
||||
|
||||
function handleDoctypeStart(state, token) {
|
||||
state.currentNode.content.start = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleDoctypeEnd(state, token) {
|
||||
state.currentNode.content.end = token;
|
||||
state.currentNode = state.currentNode.parentRef;
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleDoctypeAttributes(state) {
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: DOCTYPE_ATTRIBUTES_CONTEXT
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function doctype(token, state) {
|
||||
if (token.type === TOKEN_DOCTYPE_START) {
|
||||
return handleDoctypeStart(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_DOCTYPE_END) {
|
||||
return handleDoctypeEnd(state, token);
|
||||
}
|
||||
|
||||
var ATTRIBUTES_START_TOKENS = [TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START, TOKEN_DOCTYPE_ATTRIBUTE];
|
||||
|
||||
if (ATTRIBUTES_START_TOKENS.indexOf(token.type) !== -1) {
|
||||
return handleDoctypeAttributes(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
73
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/script-tag.js
generated
vendored
Normal file
73
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/script-tag.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_START_SCRIPT = _require.TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
TOKEN_OPEN_TAG_END_SCRIPT = _require.TOKEN_OPEN_TAG_END_SCRIPT,
|
||||
TOKEN_CLOSE_TAG_SCRIPT = _require.TOKEN_CLOSE_TAG_SCRIPT,
|
||||
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT,
|
||||
TOKEN_SCRIPT_TAG_CONTENT = _require.TOKEN_SCRIPT_TAG_CONTENT;
|
||||
|
||||
var _require2 = require('../constants/tree-constructor-contexts'),
|
||||
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT;
|
||||
|
||||
function handleOpenTagStartScript(state, token) {
|
||||
state.currentNode.content.openStart = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeStartScript(state) {
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: ATTRIBUTES_CONTEXT
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleOpenTagEndScript(state, token) {
|
||||
state.currentNode.content.openEnd = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleScriptContent(state, token) {
|
||||
state.currentNode.content.value = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleCloseTagScript(state, token) {
|
||||
state.currentNode.content.close = token;
|
||||
state.currentNode = state.currentNode.parentRef;
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function scriptTag(token, state) {
|
||||
if (token.type === TOKEN_OPEN_TAG_START_SCRIPT) {
|
||||
return handleOpenTagStartScript(state, token);
|
||||
}
|
||||
|
||||
var ATTRIBUTE_START_TOKENS = [TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
|
||||
|
||||
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
|
||||
return handleAttributeStartScript(state);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_OPEN_TAG_END_SCRIPT) {
|
||||
return handleOpenTagEndScript(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_SCRIPT_TAG_CONTENT) {
|
||||
return handleScriptContent(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_CLOSE_TAG_SCRIPT) {
|
||||
return handleCloseTagScript(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
73
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/style-tag.js
generated
vendored
Normal file
73
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/style-tag.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_START_STYLE = _require.TOKEN_OPEN_TAG_START_STYLE,
|
||||
TOKEN_OPEN_TAG_END_STYLE = _require.TOKEN_OPEN_TAG_END_STYLE,
|
||||
TOKEN_CLOSE_TAG_STYLE = _require.TOKEN_CLOSE_TAG_STYLE,
|
||||
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT,
|
||||
TOKEN_STYLE_TAG_CONTENT = _require.TOKEN_STYLE_TAG_CONTENT;
|
||||
|
||||
var _require2 = require('../constants/tree-constructor-contexts'),
|
||||
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT;
|
||||
|
||||
function handleOpenTagStartStyle(state, token) {
|
||||
state.currentNode.content.openStart = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeStartStyle(state) {
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: ATTRIBUTES_CONTEXT
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleOpenTagEndStyle(state, token) {
|
||||
state.currentNode.content.openEnd = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleStyleContent(state, token) {
|
||||
state.currentNode.content.value = token;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleCloseTagStyle(state, token) {
|
||||
state.currentNode.content.close = token;
|
||||
state.currentNode = state.currentNode.parentRef;
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function styleTag(token, state) {
|
||||
if (token.type === TOKEN_OPEN_TAG_START_STYLE) {
|
||||
return handleOpenTagStartStyle(state, token);
|
||||
}
|
||||
|
||||
var ATTRIBUTE_START_TOKENS = [TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
|
||||
|
||||
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
|
||||
return handleAttributeStartStyle(state);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_OPEN_TAG_END_STYLE) {
|
||||
return handleOpenTagEndStyle(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_STYLE_TAG_CONTENT) {
|
||||
return handleStyleContent(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_CLOSE_TAG_STYLE) {
|
||||
return handleCloseTagStyle(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
184
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/tag-content.js
generated
vendored
Normal file
184
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/tag-content.js
generated
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
"use strict";
|
||||
|
||||
var parseCloseTagName = require('../helpers').parseCloseTagName;
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_START = _require.TOKEN_OPEN_TAG_START,
|
||||
TOKEN_CLOSE_TAG = _require.TOKEN_CLOSE_TAG,
|
||||
TOKEN_COMMENT_START = _require.TOKEN_COMMENT_START,
|
||||
TOKEN_DOCTYPE_START = _require.TOKEN_DOCTYPE_START,
|
||||
TOKEN_TEXT = _require.TOKEN_TEXT,
|
||||
TOKEN_OPEN_TAG_START_SCRIPT = _require.TOKEN_OPEN_TAG_START_SCRIPT,
|
||||
TOKEN_OPEN_TAG_START_STYLE = _require.TOKEN_OPEN_TAG_START_STYLE;
|
||||
|
||||
var _require2 = require('../constants/tree-constructor-contexts'),
|
||||
TAG_CONTEXT = _require2.TAG_CONTEXT,
|
||||
COMMENT_CONTEXT = _require2.COMMENT_CONTEXT,
|
||||
DOCTYPE_CONTEXT = _require2.DOCTYPE_CONTEXT,
|
||||
SCRIPT_TAG_CONTEXT = _require2.SCRIPT_TAG_CONTEXT,
|
||||
STYLE_TAG_CONTEXT = _require2.STYLE_TAG_CONTEXT;
|
||||
|
||||
var _require3 = require('../constants/ast-nodes'),
|
||||
NODE_TAG = _require3.NODE_TAG,
|
||||
NODE_TEXT = _require3.NODE_TEXT,
|
||||
NODE_DOCTYPE = _require3.NODE_DOCTYPE,
|
||||
NODE_COMMENT = _require3.NODE_COMMENT,
|
||||
NODE_SCRIPT = _require3.NODE_SCRIPT,
|
||||
NODE_STYLE = _require3.NODE_STYLE;
|
||||
|
||||
function handleOpenTagStart(state) {
|
||||
if (state.currentNode.content.children === undefined) {
|
||||
state.currentNode.content.children = [];
|
||||
}
|
||||
|
||||
var tagNode = {
|
||||
nodeType: NODE_TAG,
|
||||
parentRef: state.currentNode,
|
||||
content: {}
|
||||
};
|
||||
state.currentNode.content.children.push(tagNode);
|
||||
state.currentNode = tagNode;
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: TAG_CONTEXT
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleCloseTag(state, token) {
|
||||
var closeTagName = parseCloseTagName(token.content);
|
||||
|
||||
if (closeTagName !== state.currentNode.content.name) {
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleCommentStart(state) {
|
||||
if (state.currentNode.content.children === undefined) {
|
||||
state.currentNode.content.children = [];
|
||||
}
|
||||
|
||||
var commentNode = {
|
||||
nodeType: NODE_COMMENT,
|
||||
parentRef: state.currentNode,
|
||||
content: {}
|
||||
};
|
||||
state.currentNode.content.children.push(commentNode);
|
||||
state.currentNode = commentNode;
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: COMMENT_CONTEXT
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleDoctypeStart(state) {
|
||||
if (state.currentNode.content.children === undefined) {
|
||||
state.currentNode.content.children = [];
|
||||
}
|
||||
|
||||
var doctypeNode = {
|
||||
nodeType: NODE_DOCTYPE,
|
||||
parentRef: state.currentNode,
|
||||
content: {}
|
||||
};
|
||||
state.currentNode.content.children.push(doctypeNode);
|
||||
state.currentNode = doctypeNode;
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: DOCTYPE_CONTEXT
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleText(state, token) {
|
||||
if (state.currentNode.content.children === undefined) {
|
||||
state.currentNode.content.children = [];
|
||||
}
|
||||
|
||||
var textNode = {
|
||||
nodeType: NODE_TEXT,
|
||||
parentRef: state.currentNode,
|
||||
content: {
|
||||
value: token
|
||||
}
|
||||
};
|
||||
state.currentNode.content.children.push(textNode);
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleOpenTagStartScript(state) {
|
||||
if (state.currentNode.content.children === undefined) {
|
||||
state.currentNode.content.children = [];
|
||||
}
|
||||
|
||||
var scriptNode = {
|
||||
nodeType: NODE_SCRIPT,
|
||||
parentRef: state.currentNode,
|
||||
content: {}
|
||||
};
|
||||
state.currentNode.content.children.push(scriptNode);
|
||||
state.currentNode = scriptNode;
|
||||
state.currentContext = {
|
||||
type: SCRIPT_TAG_CONTEXT,
|
||||
parentRef: state.currentContext
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleOpenTagStartStyle(state) {
|
||||
if (state.currentNode.content.children === undefined) {
|
||||
state.currentNode.content.children = [];
|
||||
}
|
||||
|
||||
var styleNode = {
|
||||
nodeType: NODE_STYLE,
|
||||
parentRef: state.currentNode,
|
||||
content: {}
|
||||
};
|
||||
state.currentNode.content.children.push(styleNode);
|
||||
state.currentNode = styleNode;
|
||||
state.currentContext = {
|
||||
type: STYLE_TAG_CONTEXT,
|
||||
parentRef: state.currentContext
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function tagContent(token, state) {
|
||||
if (token.type === TOKEN_OPEN_TAG_START) {
|
||||
return handleOpenTagStart(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_TEXT) {
|
||||
return handleText(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_CLOSE_TAG) {
|
||||
return handleCloseTag(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_COMMENT_START) {
|
||||
return handleCommentStart(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_DOCTYPE_START) {
|
||||
return handleDoctypeStart(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_OPEN_TAG_START_SCRIPT) {
|
||||
return handleOpenTagStartScript(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_OPEN_TAG_START_STYLE) {
|
||||
return handleOpenTagStartStyle(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
27
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/tag-name.js
generated
vendored
Normal file
27
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/tag-name.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Parser for 'tag-name' context.
|
||||
* Parses tag name from 'open-tag-start' (<div)
|
||||
* token and save the tag name as self content.
|
||||
* Ignores tokens others than 'open-tag-start'.
|
||||
*/
|
||||
var parseOpenTagName = require('../helpers').parseOpenTagName;
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_START = _require.TOKEN_OPEN_TAG_START;
|
||||
|
||||
function handleTagOpenStart(state, token) {
|
||||
state.currentNode.content.name = parseOpenTagName(token.content);
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function tagName(token, state) {
|
||||
if (token.type === TOKEN_OPEN_TAG_START) {
|
||||
handleTagOpenStart(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
83
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/tag.js
generated
vendored
Normal file
83
app/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/tag.js
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
"use strict";
|
||||
|
||||
var _require = require('../constants/token-types'),
|
||||
TOKEN_OPEN_TAG_START = _require.TOKEN_OPEN_TAG_START,
|
||||
TOKEN_OPEN_TAG_END = _require.TOKEN_OPEN_TAG_END,
|
||||
TOKEN_CLOSE_TAG = _require.TOKEN_CLOSE_TAG,
|
||||
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
|
||||
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT;
|
||||
|
||||
var _require2 = require('../constants/tree-constructor-contexts'),
|
||||
TAG_NAME_CONTEXT = _require2.TAG_NAME_CONTEXT,
|
||||
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT,
|
||||
TAG_CONTENT_CONTEXT = _require2.TAG_CONTENT_CONTEXT;
|
||||
|
||||
function handleOpenTagStart(state, token) {
|
||||
state.currentNode.content.openStart = token;
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: TAG_NAME_CONTEXT
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleAttributeStart(state) {
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: ATTRIBUTES_CONTEXT
|
||||
};
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleOpenTagEnd(state, token) {
|
||||
var SELF_CLOSING_TAGS = ['area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'meta', 'param', 'source', 'track', 'wbr'];
|
||||
var tagName = state.currentNode.content.name;
|
||||
state.currentNode.content.openEnd = token;
|
||||
|
||||
if (SELF_CLOSING_TAGS.indexOf(tagName) !== -1) {
|
||||
state.currentNode.content.selfClosing = true;
|
||||
state.currentNode = state.currentNode.parentRef;
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
state.currentNode.content.selfClosing = false;
|
||||
state.currentContext = {
|
||||
parentRef: state.currentContext,
|
||||
type: TAG_CONTENT_CONTEXT
|
||||
};
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
function handleCloseTag(state, token) {
|
||||
state.currentNode.content.close = token;
|
||||
state.currentNode = state.currentNode.parentRef;
|
||||
state.currentContext = state.currentContext.parentRef;
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
}
|
||||
|
||||
module.exports = function tag(token, state) {
|
||||
if (token.type === TOKEN_OPEN_TAG_START) {
|
||||
return handleOpenTagStart(state, token);
|
||||
}
|
||||
|
||||
var ATTRIBUTE_START_TOKENS = [TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
|
||||
|
||||
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
|
||||
return handleAttributeStart(state);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_OPEN_TAG_END) {
|
||||
return handleOpenTagEnd(state, token);
|
||||
}
|
||||
|
||||
if (token.type === TOKEN_CLOSE_TAG) {
|
||||
return handleCloseTag(state, token);
|
||||
}
|
||||
|
||||
state.caretPosition++;
|
||||
return state;
|
||||
};
|
Reference in New Issue
Block a user