v1.0 du site web

This commit is contained in:
22107988t
2023-09-25 13:27:24 +02:00
parent 20cb812095
commit a94f68f22a
2787 changed files with 864804 additions and 0 deletions

3
app/node_modules/hyntax/.babelrc generated vendored Normal file
View File

@@ -0,0 +1,3 @@
{
"presets": ["@babel/preset-env"]
}

25
app/node_modules/hyntax/.github/workflows/test.yml generated vendored Normal file
View File

@@ -0,0 +1,25 @@
name: Test
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [10.x, 12.x, 14.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- run: npm ci
- run: npm test

View File

@@ -0,0 +1,7 @@
<component name="ProjectDictionaryState">
<dictionary name="harmash">
<words>
<w>dirname</w>
</words>
</dictionary>
</component>

4
app/node_modules/hyntax/.idea/encodings.xml generated vendored Normal file
View File

@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding" addBOMForNewFiles="with NO BOM" />
</project>

14
app/node_modules/hyntax/.idea/html-parser.iml generated vendored Normal file
View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="WEB_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/.tmp" />
<excludeFolder url="file://$MODULE_DIR$/temp" />
<excludeFolder url="file://$MODULE_DIR$/tmp" />
<excludeFolder url="file://$MODULE_DIR$/workspace/lib-es5" />
<excludeFolder url="file://$MODULE_DIR$/workspace/tmp" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="Eslint" enabled="true" level="ERROR" enabled_by_default="true" />
</profile>
</component>

6
app/node_modules/hyntax/.idea/jsLibraryMappings.xml generated vendored Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptLibraryMappings">
<includedPredefinedLibrary name="Node.js Core" />
</component>
</project>

6
app/node_modules/hyntax/.idea/misc.xml generated vendored Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
</project>

8
app/node_modules/hyntax/.idea/modules.xml generated vendored Normal file
View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/html-parser.iml" filepath="$PROJECT_DIR$/.idea/html-parser.iml" />
</modules>
</component>
</project>

6
app/node_modules/hyntax/.idea/vcs.xml generated vendored Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/workspace" vcs="Git" />
</component>
</project>

4
app/node_modules/hyntax/.idea/watcherTasks.xml generated vendored Normal file
View File

@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectTasksOptions" suppressed-tasks="Babel" />
</project>

426
app/node_modules/hyntax/.idea/workspace.xml generated vendored Normal file
View File

@@ -0,0 +1,426 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChangeListManager">
<list default="true" id="8464cc48-1658-4b17-b7e4-c7fb8b0e07c7" name="Default" comment="kkkkkkkk" />
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="ElmWorkspace">
<elmProjects />
<settings elmCompilerPath="" elmFormatPath="" elmTestPath="" isElmFormatOnSaveEnabled="false" />
</component>
<component name="FavoritesManager">
<favorites_list name="blog" />
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$/workspace" />
</component>
<component name="JsFlowSettings">
<service-enabled>true</service-enabled>
<exe-path />
<annotation-enable>false</annotation-enable>
<other-services-enabled>true</other-services-enabled>
<auto-save>true</auto-save>
</component>
<component name="ProjectId" id="1OS45M2DkrtVXEAMjYHZcBUG5oE" />
<component name="ProjectInspectionProfilesVisibleTreeState">
<entry key="Project Default">
<profile-state>
<expanded-state>
<State>
<id />
</State>
<State>
<id>BashSupport</id>
</State>
<State>
<id>CSS</id>
</State>
<State>
<id>Code style issuesCSS</id>
</State>
<State>
<id>Dockerfile issues</id>
</State>
<State>
<id>File Watchers</id>
</State>
<State>
<id>General</id>
</State>
<State>
<id>GeneralJavaScript</id>
</State>
<State>
<id>HTML</id>
</State>
<State>
<id>Invalid elementsCSS</id>
</State>
<State>
<id>JavaScript</id>
</State>
<State>
<id>Probable bugsCSS</id>
</State>
<State>
<id>Probable bugsJavaScript</id>
</State>
<State>
<id>RELAX NG</id>
</State>
<State>
<id>TypeScript</id>
</State>
<State>
<id>XML</id>
</State>
<State>
<id>docker-compose</id>
</State>
</expanded-state>
</profile-state>
</entry>
</component>
<component name="ProjectLevelVcsManager" settingsEditedManually="true">
<ConfirmationsSetting value="1" id="Add" />
</component>
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showExcludedFiles" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent">
<property name="HbShouldOpenHtmlAsHb" value="" />
<property name="JavaScriptWeakerCompletionTypeGuess" value="true" />
<property name="SearchEverywhereHistoryKey" value="wrap&#9;ACTION&#9;EditorToggleUseSoftWraps&#10;softWra&#9;ACTION&#9;EditorToggleUseSoftWraps&#10;soft wrap&#9;ACTION&#9;EditorToggleUseSoftWraps&#10;soft wra&#9;ACTION&#9;EditorToggleUseSoftWraps&#10;soft wraps&#9;ACTION&#9;EditorToggleUseSoftWraps&#10;soft w&#9;ACTION&#9;EditorToggleUseSoftWraps&#10;wraps&#9;ACTION&#9;EditorToggleUseSoftWraps&#10;WRAP&#9;ACTION&#9;EditorGutterToggleGlobalSoftWraps" />
<property name="WebServerToolWindowFactoryState" value="false" />
<property name="add_unversioned_files" value="true" />
<property name="javascript.nodejs.core.library.configured.version" value="6.11.1" />
<property name="js.last.introduce.type" value="CONST" />
<property name="js.linters.configure.manually.selectedeslint" value="true" />
<property name="jsx.switch.disabled" value="true" />
<property name="last_opened_file_path" value="$PROJECT_DIR$/workspace/lib/constants" />
<property name="node.js.detected.package.eslint" value="true" />
<property name="node.js.detected.package.standard" value="true" />
<property name="node.js.detected.package.stylelint" value="true" />
<property name="node.js.detected.package.tslint" value="true" />
<property name="node.js.path.for.package.eslint" value="node" />
<property name="node.js.path.for.package.standard" value="project" />
<property name="node.js.path.for.package.stylelint" value="project" />
<property name="node.js.path.for.package.tslint" value="project" />
<property name="node.js.selected.package.eslint" value="(autodetect)" />
<property name="node.js.selected.package.standard" value="$PROJECT_DIR$/workspace/node_modules/eslint" />
<property name="node.js.selected.package.stylelint" value="" />
<property name="node.js.selected.package.tslint" value="(autodetect)" />
<property name="nodejs_interpreter_path" value="$USER_HOME$/.nvm/versions/node/v6.11.1/bin/node" />
<property name="nodejs_package_manager_path" value="npm" />
<property name="settings.editor.selected.configurable" value="editor.preferences.fonts.default" />
<property name="ts.external.directory.path" value="$APPLICATION_HOME_DIR$/plugins/JavaScriptLanguage/jsLanguageServicesImpl/external" />
</component>
<component name="RecentsManager">
<key name="CopyFile.RECENT_KEYS">
<recent name="$PROJECT_DIR$/workspace/lib/constants" />
<recent name="$PROJECT_DIR$/workspace/lib/tokenizer-context-handlers" />
<recent name="$PROJECT_DIR$/workspace/tests/tokenizer/stubs/inputs" />
<recent name="$PROJECT_DIR$/workspace/tests/tokenizer/stubs/outputs" />
<recent name="$PROJECT_DIR$/workspace/lib/parse-contexts" />
</key>
<key name="MoveFile.RECENT_KEYS">
<recent name="$PROJECT_DIR$/workspace/compare/" />
<recent name="$PROJECT_DIR$/workspace/compare" />
<recent name="$PROJECT_DIR$/workspace/lib/tokenize-contexts/" />
<recent name="$PROJECT_DIR$/workspace/lib/tokenize-contexts/factories" />
<recent name="$PROJECT_DIR$/workspace/lib/tokenize-contexts/general-tag-attributes" />
</key>
</component>
<component name="RunManager">
<configuration name="Profile" type="NodeJSConfigurationType" path-to-js-file="workspace/compare/tokenizer.compare.js" working-dir="$PROJECT_DIR$">
<EXTENSION ID="com.jetbrains.nodejs.run.NodeJSProfilingRunConfigurationExtension">
<profiling do-profile="true" v8-profiler-path="$PROJECT_DIR$/workspace/node_modules/v8-profiler" />
</EXTENSION>
<method v="2" />
</configuration>
<configuration default="true" type="NodeJSConfigurationType">
<EXTENSION ID="com.jetbrains.nodejs.run.NodeJSProfilingRunConfigurationExtension">
<profiling v8-profiler-path="$PROJECT_DIR$/workspace/node_modules/v8-profiler" />
</EXTENSION>
<method v="2" />
</configuration>
</component>
<component name="ServiceViewManager">
<option name="viewStates">
<list>
<serviceView>
<treeState>
<expand />
<select />
</treeState>
</serviceView>
</list>
</option>
</component>
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="8464cc48-1658-4b17-b7e4-c7fb8b0e07c7" name="Default" comment="" />
<created>1501054391368</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1501054391368</updated>
<workItem from="1501054392448" duration="275000" />
<workItem from="1501054738648" duration="2553000" />
<workItem from="1501093715595" duration="1082000" />
<workItem from="1501097111888" duration="7163000" />
<workItem from="1501142602914" duration="2412000" />
<workItem from="1501146272817" duration="1098000" />
<workItem from="1501147435009" duration="3700000" />
<workItem from="1501233879414" duration="9531000" />
<workItem from="1501487783122" duration="15009000" />
<workItem from="1501611716163" duration="15484000" />
<workItem from="1501831364248" duration="691000" />
<workItem from="1502266033089" duration="8513000" />
<workItem from="1502354989838" duration="1711000" />
<workItem from="1502435573197" duration="7334000" />
<workItem from="1502868630324" duration="3544000" />
<workItem from="1502874348435" duration="4309000" />
<workItem from="1502956358879" duration="2309000" />
<workItem from="1502959968847" duration="1935000" />
<workItem from="1502963342266" duration="2306000" />
<workItem from="1503645863856" duration="15078000" />
<workItem from="1503911064686" duration="7526000" />
<workItem from="1503933526820" duration="2651000" />
<workItem from="1504078111137" duration="5817000" />
<workItem from="1504084224705" duration="26000" />
<workItem from="1504084262907" duration="3799000" />
<workItem from="1504508901606" duration="536000" />
<workItem from="1504510423485" duration="6080000" />
<workItem from="1504519569644" duration="2269000" />
<workItem from="1504545851091" duration="11961000" />
<workItem from="1504632755969" duration="15110000" />
<workItem from="1504769413916" duration="25999000" />
<workItem from="1504865673122" duration="7757000" />
<workItem from="1504948843980" duration="9380000" />
<workItem from="1505200574259" duration="8778000" />
<workItem from="1505289814805" duration="1465000" />
<workItem from="1505292300900" duration="8981000" />
<workItem from="1505307842397" duration="629000" />
<workItem from="1505385507963" duration="819000" />
<workItem from="1505461523772" duration="2474000" />
<workItem from="1505470522853" duration="5636000" />
<workItem from="1505483463744" duration="153000" />
<workItem from="1505485783725" duration="18224000" />
<workItem from="1505892241258" duration="39000" />
<workItem from="1505892283123" duration="5481000" />
<workItem from="1505898673027" duration="110000" />
<workItem from="1505898792662" duration="1212000" />
<workItem from="1505978905993" duration="5221000" />
<workItem from="1506065028073" duration="7026000" />
<workItem from="1506094114158" duration="3000" />
<workItem from="1506094134263" duration="1000" />
<workItem from="1506094138206" duration="2000" />
<workItem from="1506094158098" duration="1000" />
<workItem from="1506094226775" duration="5000" />
<workItem from="1506497805199" duration="3893000" />
<workItem from="1506929130024" duration="6710000" />
<workItem from="1507373828710" duration="19510000" />
<workItem from="1508225903706" duration="3465000" />
<workItem from="1508230992774" duration="716000" />
<workItem from="1508233969423" duration="334000" />
<workItem from="1508235932546" duration="631000" />
<workItem from="1508237906883" duration="6492000" />
<workItem from="1508313346029" duration="509000" />
<workItem from="1508411030132" duration="173000" />
<workItem from="1508442275187" duration="28000" />
<workItem from="1508490522033" duration="1437000" />
<workItem from="1509005577610" duration="7434000" />
<workItem from="1509092134320" duration="1306000" />
<workItem from="1509110101418" duration="488000" />
<workItem from="1509620149897" duration="4936000" />
<workItem from="1509792481611" duration="664000" />
<workItem from="1510081552791" duration="4497000" />
<workItem from="1510254063615" duration="2903000" />
<workItem from="1510660026633" duration="2877000" />
<workItem from="1510667093071" duration="27986000" />
<workItem from="1510833659002" duration="2521000" />
<workItem from="1510908353631" duration="88000" />
<workItem from="1510908458841" duration="169000" />
<workItem from="1510908647943" duration="184000" />
<workItem from="1510908913029" duration="333000" />
<workItem from="1510909661830" duration="134000" />
<workItem from="1510910167589" duration="13051000" />
<workItem from="1511097263564" duration="8947000" />
<workItem from="1511175806836" duration="9000" />
<workItem from="1511176384235" duration="39000" />
<workItem from="1511176737283" duration="216000" />
<workItem from="1511176975904" duration="33000" />
<workItem from="1511177162137" duration="159000" />
<workItem from="1511177910436" duration="15000" />
<workItem from="1511178145334" duration="85000" />
<workItem from="1511178417992" duration="11000" />
<workItem from="1511179106421" duration="79000" />
<workItem from="1511179211776" duration="30000" />
<workItem from="1511179258620" duration="25000" />
<workItem from="1511179384961" duration="10000" />
<workItem from="1511180067806" duration="37000" />
<workItem from="1511180612975" duration="26000" />
<workItem from="1511180659864" duration="5000" />
<workItem from="1511184770490" duration="48000" />
<workItem from="1511185159206" duration="4000" />
<workItem from="1511185370368" duration="21000" />
<workItem from="1511185403257" duration="133000" />
<workItem from="1511255100821" duration="725000" />
<workItem from="1511262255380" duration="521000" />
<workItem from="1511264846777" duration="543000" />
<workItem from="1511341429608" duration="3020000" />
<workItem from="1512720871763" duration="2075000" />
<workItem from="1512723665821" duration="68000" />
<workItem from="1512723990984" duration="475000" />
<workItem from="1512724925296" duration="3663000" />
<workItem from="1512737512380" duration="2241000" />
<workItem from="1512746776821" duration="111000" />
<workItem from="1512747335220" duration="117000" />
<workItem from="1512980932201" duration="3771000" />
<workItem from="1513007637582" duration="634000" />
<workItem from="1513008845022" duration="385000" />
<workItem from="1513075020647" duration="1624000" />
<workItem from="1513077341040" duration="23000" />
<workItem from="1513077583625" duration="366000" />
<workItem from="1513084696819" duration="1637000" />
<workItem from="1513161358203" duration="865000" />
<workItem from="1513176100534" duration="803000" />
<workItem from="1513245564416" duration="31000" />
<workItem from="1513326754780" duration="246000" />
<workItem from="1513447029451" duration="7000" />
<workItem from="1513447054887" duration="24000" />
<workItem from="1513448000421" duration="2000" />
<workItem from="1513451758692" duration="332000" />
<workItem from="1513542041667" duration="353000" />
<workItem from="1513592336074" duration="94000" />
<workItem from="1513592590306" duration="12872000" />
<workItem from="1513611302296" duration="23000" />
<workItem from="1513611367778" duration="6167000" />
<workItem from="1513698692350" duration="3399000" />
<workItem from="1513786296938" duration="905000" />
<workItem from="1513848302395" duration="1147000" />
<workItem from="1513851803230" duration="16259000" />
<workItem from="1513872606382" duration="6556000" />
<workItem from="1513940677572" duration="1220000" />
<workItem from="1513952110120" duration="180000" />
<workItem from="1513952336170" duration="372000" />
<workItem from="1513952889567" duration="47000" />
<workItem from="1513953009783" duration="484000" />
<workItem from="1513953566619" duration="134000" />
<workItem from="1513953704322" duration="128000" />
<workItem from="1513954126222" duration="356000" />
<workItem from="1513954518116" duration="405000" />
<workItem from="1513954957480" duration="921000" />
<workItem from="1513956121584" duration="454000" />
<workItem from="1513958729589" duration="1710000" />
<workItem from="1514028041730" duration="680000" />
<workItem from="1514363735136" duration="10512000" />
<workItem from="1514374527474" duration="269000" />
<workItem from="1514375083971" duration="181000" />
<workItem from="1514375276170" duration="33000" />
<workItem from="1514375325619" duration="3267000" />
<workItem from="1514385260805" duration="31000" />
<workItem from="1514385305577" duration="1741000" />
<workItem from="1514387855997" duration="58000" />
<workItem from="1514387980041" duration="131000" />
<workItem from="1514389284332" duration="841000" />
<workItem from="1514454994717" duration="15000" />
<workItem from="1516705678208" duration="56000" />
<workItem from="1516709653053" duration="15000" />
<workItem from="1516784387922" duration="2503000" />
<workItem from="1517586811725" duration="3000" />
<workItem from="1517587624774" duration="11000" />
<workItem from="1525246309780" duration="550000" />
<workItem from="1529917454206" duration="313000" />
<workItem from="1537171139908" duration="1017000" />
<workItem from="1550049620754" duration="958000" />
<workItem from="1550054475074" duration="1539000" />
<workItem from="1551972000086" duration="939000" />
<workItem from="1552306169689" duration="4388000" />
<workItem from="1552314476019" duration="1525000" />
<workItem from="1552316963165" duration="1699000" />
<workItem from="1552407340644" duration="1626000" />
<workItem from="1552409294088" duration="463000" />
<workItem from="1561365019466" duration="1653000" />
<workItem from="1562835291853" duration="775000" />
<workItem from="1563442060403" duration="763000" />
<workItem from="1563785782383" duration="378000" />
<workItem from="1563957480845" duration="469000" />
<workItem from="1563958036361" duration="8000" />
<workItem from="1563958095402" duration="5000" />
<workItem from="1563958105604" duration="5000" />
<workItem from="1563958143328" duration="453000" />
<workItem from="1563958646446" duration="5000" />
<workItem from="1563958709110" duration="79000" />
<workItem from="1563958812687" duration="27000" />
<workItem from="1563958843084" duration="1002000" />
<workItem from="1563959881612" duration="2000" />
<workItem from="1563959902400" duration="6000" />
<workItem from="1563959945625" duration="8000" />
<workItem from="1563959967339" duration="13000" />
<workItem from="1563959989491" duration="644000" />
<workItem from="1563960833718" duration="1135000" />
<workItem from="1563962137841" duration="113000" />
<workItem from="1563962327462" duration="7000" />
<workItem from="1563962351804" duration="285000" />
<workItem from="1563962797068" duration="40000" />
<workItem from="1563962862455" duration="44000" />
<workItem from="1563962929112" duration="4389000" />
<workItem from="1563967498927" duration="118000" />
<workItem from="1563967744195" duration="45000" />
<workItem from="1563974028336" duration="423000" />
<workItem from="1563976566744" duration="311000" />
<workItem from="1563977461934" duration="1141000" />
<workItem from="1564047910982" duration="3814000" />
<workItem from="1564394124148" duration="5189000" />
<workItem from="1564492645197" duration="295000" />
<workItem from="1566634663875" duration="334000" />
<workItem from="1566675500200" duration="425000" />
<workItem from="1567707431285" duration="65000" />
<workItem from="1570180842801" duration="140000" />
<workItem from="1570181013875" duration="10000" />
<workItem from="1570181096316" duration="165000" />
<workItem from="1570181351179" duration="29000" />
<workItem from="1570182160220" duration="22000" />
<workItem from="1575018458563" duration="373000" />
<workItem from="1579509683660" duration="1176000" />
</task>
<servers />
</component>
<component name="TodoView">
<todo-panel id="selected-file">
<is-autoscroll-to-source value="true" />
</todo-panel>
<todo-panel id="all">
<are-packages-shown value="true" />
<is-autoscroll-to-source value="true" />
</todo-panel>
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="1" />
</component>
<component name="Vcs.Log.Tabs.Properties">
<option name="TAB_STATES">
<map>
<entry key="MAIN">
<value>
<State>
<option name="COLUMN_ORDER" />
</State>
</value>
</entry>
</map>
</option>
</component>
<component name="WindowStateProjectService">
<state x="505" y="233" width="670" height="676" key="search.everywhere.popup" timestamp="1579510663877">
<screen x="0" y="0" width="1680" height="1050" />
</state>
<state x="505" y="233" width="670" height="676" key="search.everywhere.popup/0.0.1680.1050@0.0.1680.1050" timestamp="1579510663877" />
</component>
</project>

6
app/node_modules/hyntax/.jump/data.json generated vendored Normal file
View File

@@ -0,0 +1,6 @@
{
"master": {
"name": "master",
"lastSwitch": 1599423922416
}
}

View File

@@ -0,0 +1 @@
{}

View File

@@ -0,0 +1 @@
{}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
{"uuid":"322f0f72-58f3-4cca-b5ae-8bfc428945e3","parent":null,"pid":74874,"argv":["/Users/harmash/.nvm/versions/node/v12.13.0/bin/node","/Users/harmash/.nvm/versions/node/v12.13.0/bin/npm","test"],"execArgv":[],"cwd":"/Users/harmash/dev/html-parser/workspace","time":1579510292273,"ppid":74873,"root":"b3d47a4b-58cb-4631-81c0-3428d9c9c8f5","coverageFilename":"/Users/harmash/dev/html-parser/workspace/.nyc_output/322f0f72-58f3-4cca-b5ae-8bfc428945e3.json","files":[]}

View File

@@ -0,0 +1 @@
{"uuid":"cf246f76-4f28-46ff-8241-f7c75d760127","parent":"322f0f72-58f3-4cca-b5ae-8bfc428945e3","pid":74877,"argv":["/Users/harmash/.nvm/versions/node/v12.13.0/bin/node","/Users/harmash/dev/html-parser/workspace/node_modules/.bin/tap-spec"],"execArgv":[],"cwd":"/Users/harmash/dev/html-parser/workspace","time":1579510292648,"ppid":74875,"root":"b3d47a4b-58cb-4631-81c0-3428d9c9c8f5","coverageFilename":"/Users/harmash/dev/html-parser/workspace/.nyc_output/cf246f76-4f28-46ff-8241-f7c75d760127.json","files":[]}

View File

@@ -0,0 +1 @@
{"uuid":"e27b0405-4162-49b2-83bf-adbdb30caee8","parent":"322f0f72-58f3-4cca-b5ae-8bfc428945e3","pid":74876,"argv":["/Users/harmash/.nvm/versions/node/v12.13.0/bin/node","/Users/harmash/dev/html-parser/workspace/node_modules/.bin/tape","./tests/**/*.test.js"],"execArgv":[],"cwd":"/Users/harmash/dev/html-parser/workspace","time":1579510292648,"ppid":74875,"root":"b3d47a4b-58cb-4631-81c0-3428d9c9c8f5","coverageFilename":"/Users/harmash/dev/html-parser/workspace/.nyc_output/e27b0405-4162-49b2-83bf-adbdb30caee8.json","files":["/Users/harmash/dev/html-parser/workspace/lib/stream-tokenizer.js","/Users/harmash/dev/html-parser/workspace/lib/tokenize.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/data.js","/Users/harmash/dev/html-parser/workspace/lib/helpers.js","/Users/harmash/dev/html-parser/workspace/lib/constants/token-types.js","/Users/harmash/dev/html-parser/workspace/lib/constants/tokenizer-contexts.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/open-tag-start.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/close-tag.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/open-tag-end.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/attributes.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/attribute-key.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/attribute-value.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/attribute-value-bare.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/attribute-value-wrapped.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/script-tag-content.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/style-tag-content.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/doctype-start.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/doctype-end.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/doctype-attributes.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/doctype-attribute-wrapped.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/doctype-attribute-bare.js","/Users/harmash/dev/html-parser/workspace/lib/tokenizer-context-handlers/comment-content.js","/Users/harmash/dev/html-parser/workspace/lib/construct-tree.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/tag.js","/Users/harmash/dev/html-parser/workspace/lib/constants/tree-constructor-contexts.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/tag-content.js","/Users/harmash/dev/html-parser/workspace/lib/constants/ast-nodes.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/tag-name.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/attributes.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/attribute.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/attribute-value.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/comment.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/doctype.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/doctype-attributes.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/doctype-attribute.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/script-tag.js","/Users/harmash/dev/html-parser/workspace/lib/tree-constructor-context-handlers/style-tag.js","/Users/harmash/dev/html-parser/workspace/lib/stream-tree-constructor.js"]}

File diff suppressed because one or more lines are too long

8
app/node_modules/hyntax/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,8 @@
language: node_js
node_js:
- "8"
before_install:
- "npm install -g npm@'>=5.3.0'"
after_script:
- "npm run coverage"
- "npx nyc report --reporter=text-lcov | npx coveralls"

19
app/node_modules/hyntax/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,19 @@
MIT License
Copyright (c) 2017-present Nikolay Garmash
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

463
app/node_modules/hyntax/README.md generated vendored Normal file
View File

@@ -0,0 +1,463 @@
<p align="center">
<img src="./logo.png" alt="Hyntax project logo — lego bricks in the shape of a capital letter H" width="150">
</p>
# Hyntax
Straightforward HTML parser for JavaScript. [Live Demo](https://astexplorer.net/#/gist/6bf7f78077333cff124e619aebfb5b42/latest).
- **Simple.** API is straightforward, output is clear.
- **Forgiving.** Just like a browser, normally parses invalid HTML.
- **Supports streaming.** Can process HTML while it's still being loaded.
- **No dependencies.**
## Table Of Contents
- [Usage](#usage)
- [TypeScript Typings](#typescript-typings)
- [Streaming](#streaming)
- [Tokens](#tokens)
- [AST Format](#ast-format)
- [API Reference](#api-reference)
- [Types Reference](#types-reference)
## Usage
```bash
npm install hyntax
```
```javascript
const { tokenize, constructTree } = require('hyntax')
const util = require('util')
const inputHTML = `
<html>
<body>
<input type="text" placeholder="Don't type">
<button>Don't press</button>
</body>
</html>
`
const { tokens } = tokenize(inputHTML)
const { ast } = constructTree(tokens)
console.log(JSON.stringify(tokens, null, 2))
console.log(util.inspect(ast, { showHidden: false, depth: null }))
```
## TypeScript Typings
Hyntax is written in JavaScript but has [integrated TypeScript typings](./index.d.ts) to help you navigate around its data structures. There is also [Types Reference](#types-reference) which covers most common types.
## Streaming
Use `StreamTokenizer` and `StreamTreeConstructor` classes to parse HTML chunk by chunk while it's still being loaded from the network or read from the disk.
```javascript
const { StreamTokenizer, StreamTreeConstructor } = require('hyntax')
const http = require('http')
const util = require('util')
http.get('http://info.cern.ch', (res) => {
const streamTokenizer = new StreamTokenizer()
const streamTreeConstructor = new StreamTreeConstructor()
let resultTokens = []
let resultAst
res.pipe(streamTokenizer).pipe(streamTreeConstructor)
streamTokenizer
.on('data', (tokens) => {
resultTokens = resultTokens.concat(tokens)
})
.on('end', () => {
console.log(JSON.stringify(resultTokens, null, 2))
})
streamTreeConstructor
.on('data', (ast) => {
resultAst = ast
})
.on('end', () => {
console.log(util.inspect(resultAst, { showHidden: false, depth: null }))
})
}).on('error', (err) => {
throw err;
})
```
## Tokens
Here are all kinds of tokens which Hyntax will extract out of HTML string.
![Overview of all possible tokens](./tokens-list.png)
Each token conforms to [Tokenizer.Token](#TokenizerToken) interface.
## AST Format
Resulting syntax tree will have at least one top-level [Document Node](#ast-node-types) with optional children nodes nested within.
<!-- You can play around with the [AST Explorer](https://astexplorer.net) to see how AST looks like. -->
```javascript
{
nodeType: TreeConstructor.NodeTypes.Document,
content: {
children: [
{
nodeType: TreeConstructor.NodeTypes.AnyNodeType,
content: {}
},
{
nodeType: TreeConstructor.NodeTypes.AnyNodeType,
content: {}
}
]
}
}
```
Content of each node is specific to node's type, all of them are described in [AST Node Types](#ast-node-types) reference.
## API Reference
### Tokenizer
Hyntax has its tokenizer as a separate module. You can use generated tokens on their own or pass them further to a tree constructor to build an AST.
#### Interface
```typescript
tokenize(html: String): Tokenizer.Result
```
#### Arguments
- `html`
HTML string to process
Required.
Type: string.
#### Returns [Tokenizer.Result](#TokenizerResult)
### Tree Constructor
After you've got an array of tokens, you can pass them into tree constructor to build an AST.
#### Interface
```typescript
constructTree(tokens: Tokenizer.AnyToken[]): TreeConstructor.Result
```
#### Arguments
- `tokens`
Array of tokens received from the tokenizer.
Required.
Type: [Tokenizer.AnyToken[]](#tokenizeranytoken)
#### Returns [TreeConstructor.Result](#TreeConstructorResult)
## Types Reference
#### Tokenizer.Result
```typescript
interface Result {
state: Tokenizer.State
tokens: Tokenizer.AnyToken[]
}
```
- `state`
The current state of tokenizer. It can be persisted and passed to the next tokenizer call if the input is coming in chunks.
- `tokens`
Array of resulting tokens.
Type: [Tokenizer.AnyToken[]](#tokenizeranytoken)
#### TreeConstructor.Result
```typescript
interface Result {
state: State
ast: AST
}
```
- `state`
The current state of the tree constructor. Can be persisted and passed to the next tree constructor call in case when tokens are coming in chunks.
- `ast`
Resulting AST.
Type: [TreeConstructor.AST](#treeconstructorast)
#### Tokenizer.Token
Generic Token, other interfaces use it to create a specific Token type.
```typescript
interface Token<T extends TokenTypes.AnyTokenType> {
type: T
content: string
startPosition: number
endPosition: number
}
```
- `type`
One of the [Token types](#TokenizerTokenTypesAnyTokenType).
- `content `
Piece of original HTML string which was recognized as a token.
- `startPosition `
Index of a character in the input HTML string where the token starts.
- `endPosition`
Index of a character in the input HTML string where the token ends.
#### Tokenizer.TokenTypes.AnyTokenType
Shortcut type of all possible tokens.
```typescript
type AnyTokenType =
| Text
| OpenTagStart
| AttributeKey
| AttributeAssigment
| AttributeValueWrapperStart
| AttributeValue
| AttributeValueWrapperEnd
| OpenTagEnd
| CloseTag
| OpenTagStartScript
| ScriptTagContent
| OpenTagEndScript
| CloseTagScript
| OpenTagStartStyle
| StyleTagContent
| OpenTagEndStyle
| CloseTagStyle
| DoctypeStart
| DoctypeEnd
| DoctypeAttributeWrapperStart
| DoctypeAttribute
| DoctypeAttributeWrapperEnd
| CommentStart
| CommentContent
| CommentEnd
```
#### Tokenizer.AnyToken
Shortcut to reference any possible token.
```typescript
type AnyToken = Token<TokenTypes.AnyTokenType>
```
#### TreeConstructor.AST
Just an alias to DocumentNode. AST always has one top-level DocumentNode. See [AST Node Types](#ast-node-types)
```typescript
type AST = TreeConstructor.DocumentNode
```
### AST Node Types
There are 7 possible types of Node. Each type has a specific content.
```typescript
type DocumentNode = Node<NodeTypes.Document, NodeContents.Document>
```
```typescript
type DoctypeNode = Node<NodeTypes.Doctype, NodeContents.Doctype>
```
```typescript
type TextNode = Node<NodeTypes.Text, NodeContents.Text>
```
```typescript
type TagNode = Node<NodeTypes.Tag, NodeContents.Tag>
```
```typescript
type CommentNode = Node<NodeTypes.Comment, NodeContents.Comment>
```
```typescript
type ScriptNode = Node<NodeTypes.Script, NodeContents.Script>
```
```typescript
type StyleNode = Node<NodeTypes.Style, NodeContents.Style>
```
Interfaces for each content type:
- [Document](#TreeConstructorNodeContentsDocument)
- [Doctype](#TreeConstructorNodeContentsDoctype)
- [Text](#TreeConstructorNodeContentsText)
- [Tag](#TreeConstructorNodeContentsTag)
- [Comment](#TreeConstructorNodeContentsComment)
- [Script](#TreeConstructorNodeContentsScript)
- [Style](#TreeConstructorNodeContentsStyle)
#### TreeConstructor.Node
Generic Node, other interfaces use it to create specific Nodes by providing type of Node and type of the content inside the Node.
```typescript
interface Node<T extends NodeTypes.AnyNodeType, C extends NodeContents.AnyNodeContent> {
nodeType: T
content: C
}
```
#### TreeConstructor.NodeTypes.AnyNodeType
Shortcut type of all possible Node types.
```typescript
type AnyNodeType =
| Document
| Doctype
| Tag
| Text
| Comment
| Script
| Style
```
### Node Content Types
#### TreeConstructor.NodeTypes.AnyNodeContent
Shortcut type of all possible types of content inside a Node.
```typescript
type AnyNodeContent =
| Document
| Doctype
| Text
| Tag
| Comment
| Script
| Style
```
#### TreeConstructor.NodeContents.Document
```typescript
interface Document {
children: AnyNode[]
}
```
#### TreeConstructor.NodeContents.Doctype
```typescript
interface Doctype {
start: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeStart>
attributes?: DoctypeAttribute[]
end: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeEnd>
}
```
#### TreeConstructor.NodeContents.Text
```typescript
interface Text {
value: Tokenizer.Token<Tokenizer.TokenTypes.Text>
}
```
#### TreeConstructor.NodeContents.Tag
```typescript
interface Tag {
name: string
selfClosing: boolean
openStart: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagStart>
attributes?: TagAttribute[]
openEnd: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagEnd>
children?: AnyNode[]
close?: Tokenizer.Token<Tokenizer.TokenTypes.CloseTag>
}
```
#### TreeConstructor.NodeContents.Comment
```typescript
interface Comment {
start: Tokenizer.Token<Tokenizer.TokenTypes.CommentStart>
value: Tokenizer.Token<Tokenizer.TokenTypes.CommentContent>
end: Tokenizer.Token<Tokenizer.TokenTypes.CommentEnd>
}
```
#### TreeConstructor.NodeContents.Script
```typescript
interface Script {
openStart: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagStartScript>
attributes?: TagAttribute[]
openEnd: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagEndScript>
value: Tokenizer.Token<Tokenizer.TokenTypes.ScriptTagContent>
close: Tokenizer.Token<Tokenizer.TokenTypes.CloseTagScript>
}
```
#### TreeConstructor.NodeContents.Style
```typescript
interface Style {
openStart: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagStartStyle>,
attributes?: TagAttribute[],
openEnd: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagEndStyle>,
value: Tokenizer.Token<Tokenizer.TokenTypes.StyleTagContent>,
close: Tokenizer.Token<Tokenizer.TokenTypes.CloseTagStyle>
}
```
#### TreeConstructor.DoctypeAttribute
```typescript
interface DoctypeAttribute {
startWrapper?: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeAttributeWrapperStart>,
value: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeAttribute>,
endWrapper?: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeAttributeWrapperEnd>
}
```
#### TreeConstructor.TagAttribute
```typescript
interface TagAttribute {
key?: Tokenizer.Token<Tokenizer.TokenTypes.AttributeKey>,
startWrapper?: Tokenizer.Token<Tokenizer.TokenTypes.AttributeValueWrapperStart>,
value?: Tokenizer.Token<Tokenizer.TokenTypes.AttributeValue>,
endWrapper?: Tokenizer.Token<Tokenizer.TokenTypes.AttributeValueWrapperEnd>
}
```

17
app/node_modules/hyntax/generate-toc.js generated vendored Normal file
View File

@@ -0,0 +1,17 @@
#!/usr/bin/env node
const fs = require('fs')
const remark = require('remark')
const toc = require('remark-toc')
const readme = fs.readFileSync('./README.md').toString()
remark()
.use(toc, { tight: true, maxDepth: 2 })
.process(readme, function (err, file) {
if (err) {
throw err
}
fs.writeFileSync('./README.md', file)
})

5
app/node_modules/hyntax/hooks/install-hooks.sh generated vendored Normal file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -v
ln -s ../../hooks/pre-commit ./.git/hooks/pre-commit

5
app/node_modules/hyntax/hooks/pre-commit generated vendored Normal file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -v
npx eslint index.js ./lib/**

298
app/node_modules/hyntax/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,298 @@
import { Transform } from 'stream'
declare function tokenize(
html: string,
existingState?: Tokenizer.State,
options?: Tokenizer.Options
): Tokenizer.Result
declare function constructTree(
tokens: Tokenizer.AnyToken[],
existingState?: TreeConstructor.State
): TreeConstructor.Result
declare class StreamTokenizer extends Transform {}
declare class StreamTreeConstructor extends Transform {}
export namespace Tokenizer {
namespace ContextTypes {
type Data = 'tokenizer-context:data'
type OpenTagStart = 'tokenizer-context:open-tag-start'
type CloseTag = 'tokenizer-context:close-tag'
type Attributes = 'tokenizer-context:attributes'
type OpenTagEnd = 'tokenizer-context:open-tag-end'
type AttributeKey = 'tokenizer-context:attribute-key'
type AttributeValue = 'tokenizer-context:attribute-value'
type AttributeValueBare = 'tokenizer-context:attribute-value-bare'
type AttributeValueWrapped = 'tokenizer-context:attribute-value-wrapped'
type ScriptContent = 'tokenizer-context:script-content'
type StyleContent = 'tokenizer-context:style-content'
type DoctypeStart = 'tokenizer-context:doctype-start'
type DoctypeEnd = 'tokenizer-context:doctype-end'
type DoctypeAttributes = 'tokenizer-context:doctype-attributes'
type DoctypeAttributeWrapped = 'tokenizer-context:doctype-attribute-wrapped'
type DoctypeAttributeBare = 'tokenizer-context:doctype-attribute-bare'
type CommentStart = 'tokenizer-context:comment-start'
type CommentContent = 'tokenizer-context:comment-content'
type CommentEnd = 'tokenizer-context:comment-end'
type AnyContextType =
| Data
| OpenTagStart
| CloseTag
| Attributes
| OpenTagEnd
| AttributeKey
| AttributeValue
| AttributeValueBare
| AttributeValueWrapped
| ScriptContent
| StyleContent
| DoctypeStart
| DoctypeEnd
| DoctypeAttributes
| DoctypeAttributeWrapped
| DoctypeAttributeBare
| CommentStart
| CommentContent
| CommentEnd
}
namespace TokenTypes {
type Text = 'token:text'
type OpenTagStart = 'token:open-tag-start'
type AttributeKey = 'token:attribute-key'
type AttributeAssigment = 'token:attribute-assignment'
type AttributeValueWrapperStart = 'token:attribute-value-wrapper-start'
type AttributeValue = 'token:attribute-value'
type AttributeValueWrapperEnd = 'token:attribute-value-wrapper-end'
type OpenTagEnd = 'token:open-tag-end'
type CloseTag = 'token:close-tag'
type OpenTagStartScript = 'token:open-tag-start-script'
type ScriptTagContent = 'token:script-tag-content'
type OpenTagEndScript = 'token:open-tag-end-script'
type CloseTagScript = 'token:close-tag-script'
type OpenTagStartStyle = 'token:open-tag-start-style'
type StyleTagContent = 'token:style-tag-content'
type OpenTagEndStyle = 'token:open-tag-end-style'
type CloseTagStyle = 'token:close-tag-style'
type DoctypeStart = 'token:doctype-start'
type DoctypeEnd = 'token:doctype-end'
type DoctypeAttributeWrapperStart = 'token:doctype-attribute-wrapper-start'
type DoctypeAttribute = 'token:doctype-attribute'
type DoctypeAttributeWrapperEnd = 'token:doctype-attribute-wrapper-end'
type CommentStart = 'token:comment-start'
type CommentContent = 'token:comment-content'
type CommentEnd = 'token:comment-end'
type AnyTokenType =
| Text
| OpenTagStart
| AttributeKey
| AttributeAssigment
| AttributeValueWrapperStart
| AttributeValue
| AttributeValueWrapperEnd
| OpenTagEnd
| CloseTag
| OpenTagStartScript
| ScriptTagContent
| OpenTagEndScript
| CloseTagScript
| OpenTagStartStyle
| StyleTagContent
| OpenTagEndStyle
| CloseTagStyle
| DoctypeStart
| DoctypeEnd
| DoctypeAttributeWrapperStart
| DoctypeAttribute
| DoctypeAttributeWrapperEnd
| CommentStart
| CommentContent
| CommentEnd
}
interface Options {
isFinalChunk: boolean
}
interface State {
currentContext: string
contextParams: ContextParams
decisionBuffer: string
accumulatedContent: string
caretPosition: number
}
interface Result {
state: State
tokens: AnyToken[]
}
type AnyToken = Token<TokenTypes.AnyTokenType>
interface Token<T extends TokenTypes.AnyTokenType> {
type: T
content: string
startPosition: number
endPosition: number
}
type ContextParams = {
[C in ContextTypes.AnyContextType]?: {
wrapper?: '"' | '\'',
tagName?: string
}
}
}
export namespace TreeConstructor {
namespace NodeTypes {
type Document = 'document'
type Doctype = 'doctype'
type Tag = 'tag'
type Text = 'text'
type Comment = 'comment'
type Script = 'script'
type Style = 'style'
type AnyNodeType =
| Document
| Doctype
| Tag
| Text
| Comment
| Script
| Style
}
namespace ContextTypes {
type TagContent = 'tree-constructor-context:tag-content'
type Tag = 'tree-constructor-context:tag'
type TagName = 'tree-constructor-context:tag-name'
type Attributes = 'tree-constructor-context:attributes'
type Attribute = 'tree-constructor-context:attribute'
type AttributeValue = 'tree-constructor-context:attribute-value'
type Comment = 'tree-constructor-context:comment'
type Doctype = 'tree-constructor-context:doctype'
type DoctypeAttributes = 'tree-constructor-context:doctype-attributes'
type DoctypeAttribute = 'tree-constructor-context:doctype-attribute'
type ScriptTag = 'tree-constructor-context:script-tag'
type StyleTag = 'tree-constructor-context:style-tag'
type AnyContextType =
| TagContent
| Tag
| TagName
| Attributes
| Attribute
| AttributeValue
| Comment
| Doctype
| DoctypeAttributes
| DoctypeAttribute
| ScriptTag
| StyleTag
}
namespace NodeContents {
interface Document {
children: AnyNode[]
}
interface Doctype {
start: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeStart>
attributes?: DoctypeAttribute[]
end: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeEnd>
}
interface Text {
value: Tokenizer.Token<Tokenizer.TokenTypes.Text>
}
interface Tag {
name: string
selfClosing: boolean
openStart: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagStart>
attributes?: TagAttribute[]
openEnd: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagEnd>
children?: AnyNode[]
close?: Tokenizer.Token<Tokenizer.TokenTypes.CloseTag>
}
interface Comment {
start: Tokenizer.Token<Tokenizer.TokenTypes.CommentStart>
value: Tokenizer.Token<Tokenizer.TokenTypes.CommentContent>
end: Tokenizer.Token<Tokenizer.TokenTypes.CommentEnd>
}
interface Script {
openStart: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagStartScript>
attributes?: TagAttribute[]
openEnd: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagEndScript>
value: Tokenizer.Token<Tokenizer.TokenTypes.ScriptTagContent>
close: Tokenizer.Token<Tokenizer.TokenTypes.CloseTagScript>
}
interface Style {
openStart: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagStartStyle>,
attributes?: TagAttribute[],
openEnd: Tokenizer.Token<Tokenizer.TokenTypes.OpenTagEndStyle>,
value: Tokenizer.Token<Tokenizer.TokenTypes.StyleTagContent>,
close: Tokenizer.Token<Tokenizer.TokenTypes.CloseTagStyle>
}
type AnyNodeContent =
| Document
| Doctype
| Text
| Tag
| Comment
| Script
| Style
}
interface State {
caretPosition: number
currentContext: ContextTypes.AnyContextType
currentNode: NodeTypes.AnyNodeType
rootNode: NodeTypes.Document
}
interface Result {
state: State
ast: AST
}
type AST = DocumentNode
interface Node<T extends NodeTypes.AnyNodeType, C extends NodeContents.AnyNodeContent> {
nodeType: T
content: C
}
type AnyNode = Node<NodeTypes.AnyNodeType, NodeContents.AnyNodeContent>
type DocumentNode = Node<NodeTypes.Document, NodeContents.Document>
type DoctypeNode = Node<NodeTypes.Doctype, NodeContents.Doctype>
type TextNode = Node<NodeTypes.Text, NodeContents.Text>
type TagNode = Node<NodeTypes.Tag, NodeContents.Tag>
type CommentNode = Node<NodeTypes.Comment, NodeContents.Comment>
type ScriptNode = Node<NodeTypes.Script, NodeContents.Script>
type StyleNode = Node<NodeTypes.Style, NodeContents.Style>
interface DoctypeAttribute {
startWrapper?: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeAttributeWrapperStart>,
value: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeAttribute>,
endWrapper?: Tokenizer.Token<Tokenizer.TokenTypes.DoctypeAttributeWrapperEnd>
}
interface TagAttribute {
key?: Tokenizer.Token<Tokenizer.TokenTypes.AttributeKey>,
startWrapper?: Tokenizer.Token<Tokenizer.TokenTypes.AttributeValueWrapperStart>,
value?: Tokenizer.Token<Tokenizer.TokenTypes.AttributeValue>,
endWrapper?: Tokenizer.Token<Tokenizer.TokenTypes.AttributeValueWrapperEnd>
}
}

18
app/node_modules/hyntax/index.es5.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
"use strict";
var tokenize = require('./lib/tokenize');
var constructTree = require('./lib/construct-tree');
var StreamTokenizer = require('./lib/stream-tokenizer');
var StreamTreeConstructor = require('./lib/stream-tree-constructor'); // Need to be separate exports
// in order to be properly bundled
// and recognised by Rollup as named
// exports
module.exports.tokenize = tokenize;
module.exports.constructTree = constructTree;
module.exports.StreamTokenizer = StreamTokenizer;
module.exports.StreamTreeConstructor = StreamTreeConstructor;

13
app/node_modules/hyntax/index.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
const tokenize = require('./lib/tokenize')
const constructTree = require('./lib/construct-tree')
const StreamTokenizer = require('./lib/stream-tokenizer')
const StreamTreeConstructor = require('./lib/stream-tree-constructor')
// Need to be separate exports
// in order to be properly bundled
// and recognised by Rollup as named
// exports
module.exports.tokenize = tokenize
module.exports.constructTree = constructTree
module.exports.StreamTokenizer = StreamTokenizer
module.exports.StreamTreeConstructor = StreamTreeConstructor

11
app/node_modules/hyntax/lib-es5/constants/ast-nodes.js generated vendored Normal file
View File

@@ -0,0 +1,11 @@
"use strict";
module.exports = {
NODE_DOCUMENT: 'document',
NODE_TAG: 'tag',
NODE_TEXT: 'text',
NODE_DOCTYPE: 'doctype',
NODE_COMMENT: 'comment',
NODE_SCRIPT: 'script',
NODE_STYLE: 'style'
};

View File

@@ -0,0 +1,29 @@
"use strict";
module.exports = {
TOKEN_TEXT: 'token:text',
TOKEN_OPEN_TAG_START: 'token:open-tag-start',
TOKEN_ATTRIBUTE_KEY: 'token:attribute-key',
TOKEN_ATTRIBUTE_ASSIGNMENT: 'token:attribute-assignment',
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START: 'token:attribute-value-wrapper-start',
TOKEN_ATTRIBUTE_VALUE: 'token:attribute-value',
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END: 'token:attribute-value-wrapper-end',
TOKEN_OPEN_TAG_END: 'token:open-tag-end',
TOKEN_CLOSE_TAG: 'token:close-tag',
TOKEN_OPEN_TAG_START_SCRIPT: 'token:open-tag-start-script',
TOKEN_SCRIPT_TAG_CONTENT: 'token:script-tag-content',
TOKEN_OPEN_TAG_END_SCRIPT: 'token:open-tag-end-script',
TOKEN_CLOSE_TAG_SCRIPT: 'token:close-tag-script',
TOKEN_OPEN_TAG_START_STYLE: 'token:open-tag-start-style',
TOKEN_STYLE_TAG_CONTENT: 'token:style-tag-content',
TOKEN_OPEN_TAG_END_STYLE: 'token:open-tag-end-style',
TOKEN_CLOSE_TAG_STYLE: 'token:close-tag-style',
TOKEN_DOCTYPE_START: 'token:doctype-start',
TOKEN_DOCTYPE_END: 'token:doctype-end',
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START: 'token:doctype-attribute-wrapper-start',
TOKEN_DOCTYPE_ATTRIBUTE: 'token:doctype-attribute',
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END: 'token:doctype-attribute-wrapper-end',
TOKEN_COMMENT_START: 'token:comment-start',
TOKEN_COMMENT_CONTENT: 'token:comment-content',
TOKEN_COMMENT_END: 'token:comment-end'
};

View File

@@ -0,0 +1,23 @@
"use strict";
module.exports = {
DATA_CONTEXT: 'tokenizer-context:data',
OPEN_TAG_START_CONTEXT: 'tokenizer-context:open-tag-start',
CLOSE_TAG_CONTEXT: 'tokenizer-context:close-tag',
ATTRIBUTES_CONTEXT: 'tokenizer-context:attributes',
OPEN_TAG_END_CONTEXT: 'tokenizer-context:open-tag-end',
ATTRIBUTE_KEY_CONTEXT: 'tokenizer-context:attribute-key',
ATTRIBUTE_VALUE_CONTEXT: 'tokenizer-context:attribute-value',
ATTRIBUTE_VALUE_BARE_CONTEXT: 'tokenizer-context:attribute-value-bare',
ATTRIBUTE_VALUE_WRAPPED_CONTEXT: 'tokenizer-context:attribute-value-wrapped',
SCRIPT_CONTENT_CONTEXT: 'tokenizer-context:script-content',
STYLE_CONTENT_CONTEXT: 'tokenizer-context:style-content',
DOCTYPE_START_CONTEXT: 'tokenizer-context:doctype-start',
DOCTYPE_END_CONTEXT: 'tokenizer-context:doctype-end',
DOCTYPE_ATTRIBUTES_CONTEXT: 'tokenizer-context:doctype-attributes',
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT: 'tokenizer-context:doctype-attribute-wrapped',
DOCTYPE_ATTRIBUTE_BARE_CONTEXT: 'tokenizer-context:doctype-attribute-bare',
COMMENT_START_CONTEXT: 'tokenizer-context:comment-start',
COMMENT_CONTENT_CONTEXT: 'tokenizer-context:comment-content',
COMMENT_END_CONTEXT: 'tokenizer-context:comment-end'
};

View File

@@ -0,0 +1,16 @@
"use strict";
module.exports = {
TAG_CONTENT_CONTEXT: 'tree-constructor-context:tag-content',
TAG_CONTEXT: 'tree-constructor-context:tag',
TAG_NAME_CONTEXT: 'tree-constructor-context:tag-name',
ATTRIBUTES_CONTEXT: 'tree-constructor-context:attributes',
ATTRIBUTE_CONTEXT: 'tree-constructor-context:attribute',
ATTRIBUTE_VALUE_CONTEXT: 'tree-constructor-context:attribute-value',
COMMENT_CONTEXT: 'tree-constructor-context:comment',
DOCTYPE_CONTEXT: 'tree-constructor-context:doctype',
DOCTYPE_ATTRIBUTES_CONTEXT: 'tree-constructor-context:doctype-attributes',
DOCTYPE_ATTRIBUTE_CONTEXT: 'tree-constructor-context:doctype-attribute',
SCRIPT_TAG_CONTEXT: 'tree-constructor-context:script-tag',
STYLE_TAG_CONTEXT: 'tree-constructor-context:style-tag'
};

93
app/node_modules/hyntax/lib-es5/construct-tree.js generated vendored Normal file
View File

@@ -0,0 +1,93 @@
"use strict";
var _contextsMap;
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var tag = require('./tree-constructor-context-handlers/tag');
var tagContent = require('./tree-constructor-context-handlers/tag-content');
var tagName = require('./tree-constructor-context-handlers/tag-name');
var attributes = require('./tree-constructor-context-handlers/attributes');
var attribute = require('./tree-constructor-context-handlers/attribute');
var attributeValue = require('./tree-constructor-context-handlers/attribute-value');
var comment = require('./tree-constructor-context-handlers/comment');
var doctype = require('./tree-constructor-context-handlers/doctype');
var doctypeAttributes = require('./tree-constructor-context-handlers/doctype-attributes');
var doctypeAttribute = require('./tree-constructor-context-handlers/doctype-attribute');
var scriptTag = require('./tree-constructor-context-handlers/script-tag');
var styleTag = require('./tree-constructor-context-handlers/style-tag');
var _require = require('./constants/tree-constructor-contexts'),
TAG_CONTENT_CONTEXT = _require.TAG_CONTENT_CONTEXT,
TAG_CONTEXT = _require.TAG_CONTEXT,
TAG_NAME_CONTEXT = _require.TAG_NAME_CONTEXT,
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
ATTRIBUTE_CONTEXT = _require.ATTRIBUTE_CONTEXT,
ATTRIBUTE_VALUE_CONTEXT = _require.ATTRIBUTE_VALUE_CONTEXT,
COMMENT_CONTEXT = _require.COMMENT_CONTEXT,
DOCTYPE_CONTEXT = _require.DOCTYPE_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require.DOCTYPE_ATTRIBUTES_CONTEXT,
DOCTYPE_ATTRIBUTE_CONTEXT = _require.DOCTYPE_ATTRIBUTE_CONTEXT,
SCRIPT_TAG_CONTEXT = _require.SCRIPT_TAG_CONTEXT,
STYLE_TAG_CONTEXT = _require.STYLE_TAG_CONTEXT;
var _require2 = require('./constants/ast-nodes'),
NODE_DOCUMENT = _require2.NODE_DOCUMENT;
var contextsMap = (_contextsMap = {}, _defineProperty(_contextsMap, TAG_CONTENT_CONTEXT, tagContent), _defineProperty(_contextsMap, TAG_CONTEXT, tag), _defineProperty(_contextsMap, TAG_NAME_CONTEXT, tagName), _defineProperty(_contextsMap, ATTRIBUTES_CONTEXT, attributes), _defineProperty(_contextsMap, ATTRIBUTE_CONTEXT, attribute), _defineProperty(_contextsMap, ATTRIBUTE_VALUE_CONTEXT, attributeValue), _defineProperty(_contextsMap, COMMENT_CONTEXT, comment), _defineProperty(_contextsMap, DOCTYPE_CONTEXT, doctype), _defineProperty(_contextsMap, DOCTYPE_ATTRIBUTES_CONTEXT, doctypeAttributes), _defineProperty(_contextsMap, DOCTYPE_ATTRIBUTE_CONTEXT, doctypeAttribute), _defineProperty(_contextsMap, SCRIPT_TAG_CONTEXT, scriptTag), _defineProperty(_contextsMap, STYLE_TAG_CONTEXT, styleTag), _contextsMap);
function processTokens(tokens, state, positionOffset) {
var tokenIndex = state.caretPosition - positionOffset;
while (tokenIndex < tokens.length) {
var token = tokens[tokenIndex];
var contextHandler = contextsMap[state.currentContext.type];
state = contextHandler(token, state);
tokenIndex = state.caretPosition - positionOffset;
}
return state;
}
module.exports = function constructTree() {
var tokens = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : [];
var existingState = arguments.length > 1 ? arguments[1] : undefined;
var state = existingState;
if (existingState === undefined) {
var rootContext = {
type: TAG_CONTENT_CONTEXT,
parentRef: undefined,
content: []
};
var rootNode = {
nodeType: NODE_DOCUMENT,
parentRef: undefined,
content: {}
};
state = {
caretPosition: 0,
currentContext: rootContext,
currentNode: rootNode,
rootNode: rootNode
};
}
var positionOffset = state.caretPosition;
processTokens(tokens, state, positionOffset);
return {
state: state,
ast: state.rootNode
};
};

82
app/node_modules/hyntax/lib-es5/helpers.js generated vendored Normal file
View File

@@ -0,0 +1,82 @@
"use strict";
var OPEN_TAG_NAME_PATTERN = /^<(\S+)/;
var CLOSE_TAG_NAME_PATTERN = /^<\/((?:.|\n)*)>$/;
function prettyJSON(obj) {
return JSON.stringify(obj, null, 2);
}
/**
* Clear tree of nodes from everything
* "parentRef" properties so the tree
* can be easily stringified into JSON.
*/
function clearAst(ast) {
var cleanAst = ast;
delete cleanAst.parentRef;
if (Array.isArray(ast.content.children)) {
cleanAst.content.children = ast.content.children.map(function (node) {
return clearAst(node);
});
}
return cleanAst;
}
function parseOpenTagName(openTagStartTokenContent) {
var match = openTagStartTokenContent.match(OPEN_TAG_NAME_PATTERN);
if (match === null) {
throw new Error('Unable to parse open tag name.\n' + "".concat(openTagStartTokenContent, " does not match pattern of opening tag."));
}
return match[1].toLowerCase();
}
function parseCloseTagName(closeTagTokenContent) {
var match = closeTagTokenContent.match(CLOSE_TAG_NAME_PATTERN);
if (match === null) {
throw new Error('Unable to parse close tag name.\n' + "".concat(closeTagTokenContent, " does not match pattern of closing tag."));
}
return match[1].trim().toLowerCase();
}
function calculateTokenCharactersRange(state, _ref) {
var keepBuffer = _ref.keepBuffer;
if (keepBuffer === undefined) {
throw new Error('Unable to calculate characters range for token.\n' + '"keepBuffer" parameter is not specified to decide if ' + 'the decision buffer is a part of characters range.');
}
var startPosition = state.caretPosition - (state.accumulatedContent.length - 1) - state.decisionBuffer.length;
var endPosition;
if (!keepBuffer) {
endPosition = state.caretPosition - state.decisionBuffer.length;
} else {
endPosition = state.caretPosition;
}
return {
startPosition: startPosition,
endPosition: endPosition
};
}
function isWhitespace(_char) {
return _char === ' ' || _char === '\n' || _char === '\t';
}
module.exports = {
prettyJSON: prettyJSON,
clearAst: clearAst,
parseOpenTagName: parseOpenTagName,
parseCloseTagName: parseCloseTagName,
calculateTokenCharactersRange: calculateTokenCharactersRange,
isWhitespace: isWhitespace
};

83
app/node_modules/hyntax/lib-es5/stream-tokenizer.js generated vendored Normal file
View File

@@ -0,0 +1,83 @@
"use strict";
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
var _require = require('stream'),
Transform = _require.Transform;
var tokenize = require('./tokenize');
var StreamTokenizer = /*#__PURE__*/function (_Transform) {
_inherits(StreamTokenizer, _Transform);
var _super = _createSuper(StreamTokenizer);
function StreamTokenizer(options) {
var _this;
_classCallCheck(this, StreamTokenizer);
_this = _super.call(this, Object.assign({}, options, {
decodeStrings: false,
readableObjectMode: true
}));
_this.currentTokenizerState = undefined;
_this.setDefaultEncoding('utf8');
return _this;
}
_createClass(StreamTokenizer, [{
key: "_transform",
value: function _transform(chunk, encoding, callback) {
var chunkString = chunk;
if (Buffer.isBuffer(chunk)) {
chunkString = chunk.toString();
}
var _tokenize = tokenize(chunkString, this.currentTokenizerState, {
isFinalChunk: false
}),
state = _tokenize.state,
tokens = _tokenize.tokens;
this.currentTokenizerState = state;
callback(null, tokens);
}
}, {
key: "_flush",
value: function _flush(callback) {
var tokenizeResults = tokenize('', this.currentTokenizerState, {
isFinalChunk: true
});
this.push(tokenizeResults.tokens);
callback();
}
}]);
return StreamTokenizer;
}(Transform);
module.exports = StreamTokenizer;

View File

@@ -0,0 +1,63 @@
"use strict";
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }
function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); }
function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; }
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }
function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }
var _require = require('stream'),
Transform = _require.Transform;
var constructTree = require('./construct-tree');
var StreamTreeConstructor = /*#__PURE__*/function (_Transform) {
_inherits(StreamTreeConstructor, _Transform);
var _super = _createSuper(StreamTreeConstructor);
function StreamTreeConstructor(options) {
var _this;
_classCallCheck(this, StreamTreeConstructor);
_this = _super.call(this, Object.assign({}, options, {
objectMode: true,
readableObjectMode: true
}));
_this.currentState = undefined;
return _this;
}
_createClass(StreamTreeConstructor, [{
key: "_transform",
value: function _transform(tokensChunk, encoding, callback) {
var _constructTree = constructTree(tokensChunk, this.currentState),
state = _constructTree.state,
ast = _constructTree.ast;
this.currentState = state;
callback(null, ast);
}
}]);
return StreamTreeConstructor;
}(Transform);
module.exports = StreamTreeConstructor;

View File

@@ -0,0 +1,46 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_ASSIGNMENT = _require2.TOKEN_ATTRIBUTE_ASSIGNMENT;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_ASSIGNMENT_CONTEXT = _require3.ATTRIBUTE_ASSIGNMENT_CONTEXT,
ATTRIBUTE_VALUE_CONTEXT = _require3.ATTRIBUTE_VALUE_CONTEXT;
var syntaxHandlers = {
equal: function equal(state, tokens, contextFactories, options) {
var attributeValueContext = contextFactories[ATTRIBUTE_VALUE_CONTEXT](contextFactories, options);
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
content: '' + state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = attributeValueContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '=') {
return function (state, tokens) {
return syntaxHandlers.equal(state, tokens, contextFactories, options);
};
}
}
module.exports = function attributeKeyContextFactory(contextFactories, options) {
return {
factoryName: ATTRIBUTE_ASSIGNMENT_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,49 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_KEY = _require2.TOKEN_ATTRIBUTE_KEY;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_KEY_CONTEXT = _require3.ATTRIBUTE_KEY_CONTEXT,
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
var syntaxHandlers = {
keyEnd: function keyEnd(state, tokens, contextFactories, options) {
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_ATTRIBUTE_KEY,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributesContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
var KEY_BREAK_CHARS = [' ', '\n', '\t', '=', '/', '>'];
if (KEY_BREAK_CHARS.indexOf(chars) !== -1) {
return function (state, tokens) {
return syntaxHandlers.keyEnd(state, tokens, contextFactories, options);
};
}
}
module.exports = function attributeKeyContextFactory(contextFactories, options) {
return {
factoryName: ATTRIBUTE_KEY_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,49 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_BARE_CONTEXT = _require3.ATTRIBUTE_VALUE_BARE_CONTEXT,
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
var syntaxHandlers = {
valueEnd: function valueEnd(state, tokens, contextFactories, options) {
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributesContext;
}
};
var BARE_VALUE_END_PATTERN = /\s/;
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (BARE_VALUE_END_PATTERN.test(chars) || chars === '>' || chars === '/') {
return function (state, tokens) {
return syntaxHandlers.valueEnd(state, tokens, contextFactories, options);
};
}
}
module.exports = function attributeValueBareContextFactory(contextFactories, options) {
return {
factoryName: ATTRIBUTE_VALUE_BARE_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,46 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT,
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
var syntaxHandlers = {
wrapper: function wrapper(state, tokens, contextFactories, options) {
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = attributesContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === options.wrapper) {
return function (state, tokens) {
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
};
}
}
module.exports = function attributeValueWrappedEndContextFactory(contextFactories, options) {
return {
factoryName: ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,46 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT,
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
var syntaxHandlers = {
wrapper: function wrapper(state, tokens, contextFactories, options) {
var attributeValueWrappedContext = contextFactories[ATTRIBUTE_VALUE_WRAPPED_CONTEXT](contextFactories, options);
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = attributeValueWrappedContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === options.wrapper) {
return function (state, tokens) {
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
};
}
}
module.exports = function attributeValueWrappedStartContextFactory(contextFactories, options) {
return {
factoryName: ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,47 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT;
var syntaxHandlers = {
wrapper: function wrapper(state, tokens, contextFactories, options) {
var attributeValueWrappedEndContext = contextFactories[ATTRIBUTE_VALUE_WRAPPED_END_CONTEXT](contextFactories, options);
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributeValueWrappedEndContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === options.wrapper) {
return function (state, tokens) {
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
};
}
}
module.exports = function attributeValueWrappedContextFactory(contextFactories, options) {
return {
factoryName: ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,65 @@
'use strict';
var _require = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_CONTEXT = _require.ATTRIBUTE_VALUE_CONTEXT,
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT = _require.ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT,
ATTRIBUTE_VALUE_BARE_CONTEXT = _require.ATTRIBUTE_VALUE_BARE_CONTEXT;
var syntaxHandlers = {
wrapper: function wrapper(state, tokens, contextFactories, options) {
var attributeValueWrappedStartContext = contextFactories[ATTRIBUTE_VALUE_WRAPPED_START_CONTEXT](contextFactories, Object.assign({}, options, { wrapper: state.decisionBuffer }));
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributeValueWrappedStartContext;
},
bare: function bare(state, tokens, contextFactories, options) {
var attributeValueBareContext = contextFactories[ATTRIBUTE_VALUE_BARE_CONTEXT](contextFactories, options);
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributeValueBareContext;
},
tagEnd: function tagEnd(state, tokens, contextFactories, options) {
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, options);
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributesContext;
}
};
var BARE_VALUE_PATTERN = /\S/;
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '"' || chars === '\'') {
return function (state, tokens) {
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
};
}
if (chars === '>' || chars === '/') {
return function (state, tokens) {
return syntaxHandlers.tagEnd(state, tokens, contextFactories, options);
};
}
if (BARE_VALUE_PATTERN.test(chars)) {
return function (state, tokens) {
return syntaxHandlers.bare(state, tokens, contextFactories, options);
};
}
}
module.exports = function attributeValueContextFactory(contextFactories, options) {
return {
factoryName: ATTRIBUTE_VALUE_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,65 @@
'use strict';
var _require = require('../constants/tokenizer-contexts'),
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
OPEN_TAG_END_CONTEXT = _require.OPEN_TAG_END_CONTEXT,
ATTRIBUTE_ASSIGNMENT_CONTEXT = _require.ATTRIBUTE_ASSIGNMENT_CONTEXT,
ATTRIBUTE_KEY_CONTEXT = _require.ATTRIBUTE_KEY_CONTEXT;
var syntaxHandlers = {
tagEnd: function tagEnd(state, tokens, contextFactories, options) {
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, options);
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = openTagEndContext;
},
noneWhitespace: function noneWhitespace(state, tokens, contextFactories, options) {
var attributeKeyContext = contextFactories[ATTRIBUTE_KEY_CONTEXT](contextFactories, options);
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributeKeyContext;
},
equal: function equal(state, tokens, contextFactories, options) {
var attributeAssignmentContext = contextFactories[ATTRIBUTE_ASSIGNMENT_CONTEXT](contextFactories, options);
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributeAssignmentContext;
}
};
var ATTRIBUTE_KEY_PATTERN = /^\S/;
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '>' || chars === '/') {
return function (state, tokens) {
return syntaxHandlers.tagEnd(state, tokens, contextFactories, options);
};
}
if (chars === '=') {
return function (state, tokens) {
return syntaxHandlers.equal(state, tokens, contextFactories, options);
};
}
if (ATTRIBUTE_KEY_PATTERN.test(chars)) {
return function (state, tokens) {
return syntaxHandlers.noneWhitespace(state, tokens, contextFactories, options);
};
}
}
module.exports = function attributesContextFactory(contextFactories, options) {
return {
factoryName: ATTRIBUTES_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,74 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_CLOSE_TAG = _require2.TOKEN_CLOSE_TAG,
TOKEN_CLOSE_TAG_SCRIPT = _require2.TOKEN_CLOSE_TAG_SCRIPT,
TOKEN_CLOSE_TAG_STYLE = _require2.TOKEN_CLOSE_TAG_STYLE;
var _require3 = require('../constants/tokenizer-contexts'),
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT,
DATA_CONTEXT = _require3.DATA_CONTEXT;
/**
* @param withinContent — type of content withing
* which the close tag was found
*/
function getCloseTokenType(withinContent) {
switch (withinContent) {
case 'script':
{
return TOKEN_CLOSE_TAG_SCRIPT;
}
case 'style':
{
return TOKEN_CLOSE_TAG_STYLE;
}
case 'data':
{
return TOKEN_CLOSE_TAG;
}
}
}
var syntaxHandlers = {
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories, options) {
var tokenType = getCloseTokenType(options.withinContent);
var dataContext = contextFactories[DATA_CONTEXT](contextFactories, options);
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: tokenType,
content: '' + state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = dataContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '>') {
return function (state, tokens) {
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
};
}
}
module.exports = function closeTagContextFactory(contextFactories, options) {
return {
factoryName: CLOSE_TAG_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,55 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_COMMENT_CONTENT = _require2.TOKEN_COMMENT_CONTENT;
var _require3 = require('../constants/tokenizer-contexts'),
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT,
COMMENT_END_CONTEXT = _require3.COMMENT_END_CONTEXT;
var syntaxHandlers = {
commentEnd: function commentEnd(state, tokens, contextFactories) {
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
var commentContentContext = contextFactories[COMMENT_END_CONTEXT](contextFactories);
tokens.push({
type: TOKEN_COMMENT_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = commentContentContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '-' || chars === '--') {
/**
* Signals to wait for more characters in
* the decision buffer to decide about syntax
*/
return function () {};
}
if (chars === '-->') {
return function (state, tokens) {
return syntaxHandlers.commentEnd(state, tokens, contextFactories, options);
};
}
}
module.exports = function commentContentContextFactory(contextFactories, options) {
return {
factoryName: COMMENT_CONTENT_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,46 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_COMMENT_END = _require2.TOKEN_COMMENT_END;
var _require3 = require('../constants/tokenizer-contexts'),
COMMENT_END_CONTEXT = _require3.COMMENT_END_CONTEXT,
DATA_CONTEXT = _require3.DATA_CONTEXT;
var syntaxHandlers = {
commentEnd: function commentEnd(state, tokens, contextFactories) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
var dataContext = contextFactories[DATA_CONTEXT](contextFactories);
tokens.push({
type: TOKEN_COMMENT_END,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = dataContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '>') {
return function (state, tokens) {
return syntaxHandlers.commentEnd(state, tokens, contextFactories, options);
};
}
}
module.exports = function commentEndContextFactory(contextFactories, options) {
return {
factoryName: COMMENT_END_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,54 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_COMMENT_START = _require2.TOKEN_COMMENT_START;
var _require3 = require('../constants/tokenizer-contexts'),
COMMENT_START_CONTEXT = _require3.COMMENT_START_CONTEXT,
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT;
var syntaxHandlers = {
commentStart: function commentStart(state, tokens, contextFactories) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
var commentContentContext = contextFactories[COMMENT_CONTENT_CONTEXT](contextFactories);
tokens.push({
type: TOKEN_COMMENT_START,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = commentContentContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '<' || chars === '<!' || chars === '<!-') {
/**
* Signals to wait for more characters in
* the decision buffer to decide about syntax
*/
return function () {};
}
if (chars === '<!--') {
return function (state, tokens) {
return syntaxHandlers.commentStart(state, tokens, contextFactories, options);
};
}
}
module.exports = function commentStartContextFactory(contextFactories, options) {
return {
factoryName: COMMENT_START_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,139 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_TEXT = _require2.TOKEN_TEXT;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT,
OPEN_TAG_START_CONTEXT = _require3.OPEN_TAG_START_CONTEXT,
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT,
DOCTYPE_START_CONTEXT = _require3.DOCTYPE_START_CONTEXT,
COMMENT_START_CONTEXT = _require3.COMMENT_START_CONTEXT;
function generateTextToken(state) {
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
return {
type: TOKEN_TEXT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
};
}
var syntaxHandlers = {
openingCornerBraceWithText: function openingCornerBraceWithText(state, tokens, contextFactories) {
var openTagStartContext = contextFactories[OPEN_TAG_START_CONTEXT](contextFactories);
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = openTagStartContext;
},
openingCornerBraceWithSlash: function openingCornerBraceWithSlash(state, tokens, contextFactories) {
var closeTagContext = contextFactories[CLOSE_TAG_CONTEXT](contextFactories, { withinContent: 'data' });
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = closeTagContext;
},
doctypeStart: function doctypeStart(state, tokens, contextFactories) {
var doctypeStartContext = contextFactories[DOCTYPE_START_CONTEXT](contextFactories);
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = doctypeStartContext;
},
commentStart: function commentStart(state, tokens, contextFactories) {
var commentStartContext = contextFactories[COMMENT_START_CONTEXT](contextFactories);
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = commentStartContext;
}
};
function handleDataContextContentEnd(state, tokens) {
var textContent = '' + state.accumulatedContent + state.decisionBuffer;
if (textContent.length !== 0) {
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_TEXT,
content: textContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
}
}
var INCOMPLETE_DOCTYPE_START = /<!\w*$/;
var COMPLETE_DOCTYPE_START = /<!DOCTYPE/i;
var OPEN_TAG_START_PATTERN = /^<\w/;
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '<' || chars === '<!' || chars === '<!-' || INCOMPLETE_DOCTYPE_START.test(chars)) {
/**
* Signals to wait for more characters in
* the decision buffer to decide about syntax
*/
return function () {};
}
if (chars === '<!--') {
return function (state, tokens) {
return syntaxHandlers.commentStart(state, tokens, contextFactories, options);
};
}
if (COMPLETE_DOCTYPE_START.test(chars)) {
return function (state, tokens) {
return syntaxHandlers.doctypeStart(state, tokens, contextFactories, options);
};
}
if (OPEN_TAG_START_PATTERN.test(chars)) {
return function (state, tokens) {
return syntaxHandlers.openingCornerBraceWithText(state, tokens, contextFactories, options);
};
}
if (chars === '</') {
return function (state, tokens) {
return syntaxHandlers.openingCornerBraceWithSlash(state, tokens, contextFactories, options);
};
}
}
module.exports = function dataContextFactory(contextFactories, options) {
return {
factoryName: DATA_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
},
handleContentEnd: handleDataContextContentEnd
};
};

View File

@@ -0,0 +1,48 @@
'use strict';
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
var syntaxHandlers = {
attributeEnd: function attributeEnd(state, tokens, contextFactories, options) {
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
var doctypeAttributesContext = contextFactories[DOCTYPE_ATTRIBUTES_CONTEXT](contextFactories, options);
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = doctypeAttributesContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (isWhitespace(chars) || chars === '>') {
return function (state, tokens) {
return syntaxHandlers.attributeEnd(state, tokens, contextFactories, options);
};
}
}
module.exports = function doctypeAttributeBareContextFactory(contextFactories, options) {
return {
factoryName: DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,46 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
var syntaxHandlers = {
wrapper: function wrapper(state, tokens, contextFactories, options) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
var doctypeAttributesContext = contextFactories[DOCTYPE_ATTRIBUTES_CONTEXT](contextFactories, options);
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = doctypeAttributesContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === options.wrapper) {
return function (state, tokens) {
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
};
}
}
module.exports = function doctypeAttributeWrappedEndContextFactory(contextFactories, options) {
return {
factoryName: DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,46 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT,
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
var syntaxHandlers = {
wrapper: function wrapper(state, tokens, contextFactories, options) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
var doctypeAttributeWrappedContext = contextFactories[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT](contextFactories, options);
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = doctypeAttributeWrappedContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === options.wrapper) {
return function (state, tokens) {
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
};
}
}
module.exports = function doctypeAttributeWrappedStartContextFactory(contextFactories, options) {
return {
factoryName: DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,47 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT;
var syntaxHandlers = {
wrapper: function wrapper(state, tokens, contextFactories, options) {
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
var doctypeAttributeWrappedEndContext = contextFactories[DOCTYPE_ATTRIBUTE_WRAPPED_END_CONTEXT](contextFactories, options);
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = doctypeAttributeWrappedEndContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === options.wrapper) {
return function (state, tokens) {
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
};
}
}
module.exports = function doctypeAttributeWrappedContextFactory(contextFactories, options) {
return {
factoryName: DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,66 @@
'use strict';
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace;
var _require2 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTES_CONTEXT = _require2.DOCTYPE_ATTRIBUTES_CONTEXT,
DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT,
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
DOCTYPE_END_CONTEXT = _require2.DOCTYPE_END_CONTEXT;
var syntaxHandlers = {
wrapper: function wrapper(state, tokens, contextFactories) {
var doctypeAttributeWrappedStartContext = contextFactories[DOCTYPE_ATTRIBUTE_WRAPPED_START_CONTEXT](contextFactories, { wrapper: state.decisionBuffer });
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = doctypeAttributeWrappedStartContext;
},
bare: function bare(state, tokens, contextFactories) {
var doctypeAttributeBareStartContext = contextFactories[DOCTYPE_ATTRIBUTE_BARE_CONTEXT](contextFactories);
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = doctypeAttributeBareStartContext;
},
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories) {
var doctypeEndContext = contextFactories[DOCTYPE_END_CONTEXT](contextFactories);
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = doctypeEndContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '"' || chars === '\'') {
return function (state, tokens) {
return syntaxHandlers.wrapper(state, tokens, contextFactories, options);
};
}
if (chars === '>') {
return function (state, tokens) {
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
};
}
if (!isWhitespace(chars)) {
return function (state, tokens) {
return syntaxHandlers.bare(state, tokens, contextFactories, options);
};
}
}
module.exports = function doctypeAttributesContextFactory(contextFactories, options) {
return {
factoryName: DOCTYPE_ATTRIBUTES_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,47 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_END = _require2.TOKEN_DOCTYPE_END;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_END_CONTEXT = _require3.DOCTYPE_END_CONTEXT,
DATA_CONTEXT = _require3.DATA_CONTEXT;
var syntaxHandlers = {
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
var dataContext = contextFactories[DATA_CONTEXT](contextFactories);
tokens.push({
type: TOKEN_DOCTYPE_END,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = dataContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '>') {
return function (state, tokens) {
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
};
}
}
module.exports = function doctypeEndContextFactory(contextFactories, options) {
return {
factoryName: DOCTYPE_END_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,70 @@
'use strict';
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_START = _require2.TOKEN_DOCTYPE_START;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_START_CONTEXT = _require3.DOCTYPE_START_CONTEXT,
DOCTYPE_END_CONTEXT = _require3.DOCTYPE_END_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
function generateDoctypeStartToken(state) {
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
return {
type: TOKEN_DOCTYPE_START,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
};
}
var syntaxHandlers = {
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories) {
var doctypeEndContext = contextFactories[DOCTYPE_END_CONTEXT](contextFactories);
tokens.push(generateDoctypeStartToken(state));
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = doctypeEndContext;
},
whitespace: function whitespace(state, tokens, contextFactories) {
var attributesContext = contextFactories[DOCTYPE_ATTRIBUTES_CONTEXT](contextFactories);
tokens.push(generateDoctypeStartToken(state));
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributesContext;
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (isWhitespace(chars)) {
return function (state, tokens) {
return syntaxHandlers.whitespace(state, tokens, contextFactories, options);
};
}
if (chars === '>') {
return function (state, tokens) {
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
};
}
}
module.exports = function doctypeStartContextFactory(contextFactories, options) {
return {
factoryName: DOCTYPE_START_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,87 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_OPEN_TAG_END = _require2.TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT = _require2.TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE = _require2.TOKEN_OPEN_TAG_END_STYLE;
var _require3 = require('../constants/tokenizer-contexts'),
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
DATA_CONTEXT = _require3.DATA_CONTEXT,
SCRIPT_CONTENT_CONTEXT = _require3.SCRIPT_CONTENT_CONTEXT,
STYLE_CONTENT_CONTEXT = _require3.STYLE_CONTENT_CONTEXT;
function getTokenType(tagName) {
switch (tagName) {
case 'script':
{
return TOKEN_OPEN_TAG_END_SCRIPT;
}
case 'style':
{
return TOKEN_OPEN_TAG_END_STYLE;
}
default:
{
return TOKEN_OPEN_TAG_END;
}
}
}
function getContentContext(tagName, contextFactories, options) {
switch (tagName) {
case 'script':
{
return contextFactories[SCRIPT_CONTENT_CONTEXT](contextFactories, options);
}
case 'style':
{
return contextFactories[STYLE_CONTENT_CONTEXT](contextFactories, options);
}
default:
{
return contextFactories[DATA_CONTEXT](contextFactories, options);
}
}
}
var syntaxHandlers = {
closingCornerBrace: function closingCornerBrace(state, tokens, contextFactories, options) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: getTokenType(options.tagName),
content: '' + state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = getContentContext(options.tagName, contextFactories, options);
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '>') {
return function (state, tokens) {
return syntaxHandlers.closingCornerBrace(state, tokens, contextFactories, options);
};
}
}
module.exports = function openTagEndContextFactory(contextFactories, options) {
return {
factoryName: OPEN_TAG_END_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,188 @@
'use strict';
var _require = require('../helpers'),
parseOpenTagName = _require.parseOpenTagName,
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_OPEN_TAG_START = _require2.TOKEN_OPEN_TAG_START,
TOKEN_OPEN_TAG_START_SCRIPT = _require2.TOKEN_OPEN_TAG_START_SCRIPT,
TOKEN_OPEN_TAG_START_STYLE = _require2.TOKEN_OPEN_TAG_START_STYLE;
var _require3 = require('../constants/tokenizer-contexts'),
OPEN_TAG_START_CONTEXT = _require3.OPEN_TAG_START_CONTEXT,
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
function handleTagEndAfterScriptOpenTagStart(state, tokens, contextFactories) {
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, { tagName: 'script' });
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_OPEN_TAG_START_SCRIPT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = openTagEndContext;
}
function handleTagEndAfterStyleOpenTagStart(state, tokens, contextFactories) {
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, { tagName: 'style' });
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_OPEN_TAG_START_STYLE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = openTagEndContext;
}
function handleTagEndAfterOpenTagStart(state, tokens, contextFactories) {
var openTagEndContext = contextFactories[OPEN_TAG_END_CONTEXT](contextFactories, { tagName: undefined });
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_OPEN_TAG_START,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = openTagEndContext;
}
function handleWhitespaceAfterScriptOpenTagStart(state, tokens, contextFactories) {
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, { tagName: 'script' });
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_OPEN_TAG_START_SCRIPT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributesContext;
}
function handleWhitespaceAfterStyleOpenTagStart(state, tokens, contextFactories) {
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, { tagName: 'style' });
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_OPEN_TAG_START_STYLE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributesContext;
}
function handleWhitespaceAfterOpenTagStart(state, tokens, contextFactories) {
var attributesContext = contextFactories[ATTRIBUTES_CONTEXT](contextFactories, { tagName: undefined });
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_OPEN_TAG_START,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = attributesContext;
}
var syntaxHandlers = {
tagEnd: function tagEnd(state, tokens, contextFactories, options) {
var tagName = parseOpenTagName(state.accumulatedContent);
switch (tagName) {
case 'script':
{
handleTagEndAfterScriptOpenTagStart(state, tokens, contextFactories, options);
break;
}
case 'style':
{
handleTagEndAfterStyleOpenTagStart(state, tokens, contextFactories, options);
break;
}
default:
{
handleTagEndAfterOpenTagStart(state, tokens, contextFactories, options);
}
}
},
whitespace: function whitespace(state, tokens, contextFactories, options) {
var tagName = parseOpenTagName(state.accumulatedContent);
switch (tagName) {
case 'script':
{
handleWhitespaceAfterScriptOpenTagStart(state, tokens, contextFactories, options);
break;
}
case 'style':
{
handleWhitespaceAfterStyleOpenTagStart(state, tokens, contextFactories, options);
break;
}
default:
{
handleWhitespaceAfterOpenTagStart(state, tokens, contextFactories, options);
}
}
}
};
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '>' || chars === '/') {
return function (state, tokens) {
return syntaxHandlers.tagEnd(state, tokens, contextFactories, options);
};
}
if (isWhitespace(chars)) {
return function (state, tokens) {
return syntaxHandlers.whitespace(state, tokens, contextFactories, options);
};
}
}
module.exports = function openTagStartContextFactory(contextFactories, options) {
return {
factoryName: OPEN_TAG_START_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,61 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_SCRIPT_TAG_CONTENT = _require2.TOKEN_SCRIPT_TAG_CONTENT;
var _require3 = require('../constants/tokenizer-contexts'),
SCRIPT_CONTENT_CONTEXT = _require3.SCRIPT_CONTENT_CONTEXT,
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT;
var syntaxHandlers = {
closingScriptTag: function closingScriptTag(state, tokens, contextFactories) {
var closeTagContext = contextFactories[CLOSE_TAG_CONTEXT](contextFactories, { withinContent: 'script' });
if (state.accumulatedContent !== '') {
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_SCRIPT_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
}
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = closeTagContext;
}
};
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
var CLOSING_SCRIPT_TAG_PATTERN = /<\/script\s*>/i;
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
/**
* Signals to wait for more characters in
* the decision buffer to decide about syntax
*/
return function () {};
}
if (CLOSING_SCRIPT_TAG_PATTERN.test(chars)) {
return function (state, tokens) {
return syntaxHandlers.closingScriptTag(state, tokens, contextFactories, options);
};
}
}
module.exports = function scriptTagContentContextFactory(contextFactories, options) {
return {
factoryName: SCRIPT_CONTENT_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

View File

@@ -0,0 +1,61 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_STYLE_TAG_CONTENT = _require2.TOKEN_STYLE_TAG_CONTENT;
var _require3 = require('../constants/tokenizer-contexts'),
STYLE_CONTENT_CONTEXT = _require3.STYLE_CONTENT_CONTEXT,
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT;
var syntaxHandlers = {
closingStyleTag: function closingStyleTag(state, tokens, contextFactories) {
var closeTagContext = contextFactories[CLOSE_TAG_CONTEXT](contextFactories, { withinContent: 'style' });
if (state.accumulatedContent !== '') {
var range = calculateTokenCharactersRange(state, { keepBuffer: false });
tokens.push({
type: TOKEN_STYLE_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
}
state.accumulatedContent = '';
state.caretPosition -= state.decisionBuffer.length;
state.decisionBuffer = '';
state.currentContext = closeTagContext;
}
};
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
var CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i;
function _parseSyntax(chars, syntaxHandlers, contextFactories, options) {
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
/**
* Signals to wait for more characters in
* the decision buffer to decide about syntax
*/
return function () {};
}
if (CLOSING_STYLE_TAG_PATTERN.test(chars)) {
return function (state, tokens) {
return syntaxHandlers.closingStyleTag(state, tokens, contextFactories, options);
};
}
}
module.exports = function styleTagContentContextFactory(contextFactories, options) {
return {
factoryName: STYLE_CONTENT_CONTEXT,
parseSyntax: function parseSyntax(chars) {
return _parseSyntax(chars, syntaxHandlers, contextFactories, options);
}
};
};

123
app/node_modules/hyntax/lib-es5/tokenize.js generated vendored Normal file
View File

@@ -0,0 +1,123 @@
"use strict";
var _contextHandlersMap;
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var dataContext = require('./tokenizer-context-handlers/data');
var openTagStartContext = require('./tokenizer-context-handlers/open-tag-start');
var closeTagContext = require('./tokenizer-context-handlers/close-tag');
var openTagEndContext = require('./tokenizer-context-handlers/open-tag-end');
var attributesContext = require('./tokenizer-context-handlers/attributes');
var attributeKeyContext = require('./tokenizer-context-handlers/attribute-key');
var attributeValueContext = require('./tokenizer-context-handlers/attribute-value');
var attributeValueBareContext = require('./tokenizer-context-handlers/attribute-value-bare');
var attributeValueWrappedContext = require('./tokenizer-context-handlers/attribute-value-wrapped');
var scriptContentContext = require('./tokenizer-context-handlers/script-tag-content');
var styleContentContext = require('./tokenizer-context-handlers/style-tag-content');
var doctypeStartContext = require('./tokenizer-context-handlers/doctype-start');
var doctypeEndContextFactory = require('./tokenizer-context-handlers/doctype-end');
var doctypeAttributesContext = require('./tokenizer-context-handlers/doctype-attributes');
var doctypeAttributeWrappedContext = require('./tokenizer-context-handlers/doctype-attribute-wrapped');
var doctypeAttributeBareEndContext = require('./tokenizer-context-handlers/doctype-attribute-bare');
var commentContentContext = require('./tokenizer-context-handlers/comment-content');
var _require = require('./constants/tokenizer-contexts'),
DATA_CONTEXT = _require.DATA_CONTEXT,
OPEN_TAG_START_CONTEXT = _require.OPEN_TAG_START_CONTEXT,
CLOSE_TAG_CONTEXT = _require.CLOSE_TAG_CONTEXT,
ATTRIBUTES_CONTEXT = _require.ATTRIBUTES_CONTEXT,
OPEN_TAG_END_CONTEXT = _require.OPEN_TAG_END_CONTEXT,
ATTRIBUTE_KEY_CONTEXT = _require.ATTRIBUTE_KEY_CONTEXT,
ATTRIBUTE_VALUE_CONTEXT = _require.ATTRIBUTE_VALUE_CONTEXT,
ATTRIBUTE_VALUE_BARE_CONTEXT = _require.ATTRIBUTE_VALUE_BARE_CONTEXT,
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
SCRIPT_CONTENT_CONTEXT = _require.SCRIPT_CONTENT_CONTEXT,
STYLE_CONTENT_CONTEXT = _require.STYLE_CONTENT_CONTEXT,
DOCTYPE_START_CONTEXT = _require.DOCTYPE_START_CONTEXT,
DOCTYPE_END_CONTEXT = _require.DOCTYPE_END_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require.DOCTYPE_ATTRIBUTES_CONTEXT,
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
COMMENT_CONTENT_CONTEXT = _require.COMMENT_CONTENT_CONTEXT;
var contextHandlersMap = (_contextHandlersMap = {}, _defineProperty(_contextHandlersMap, DATA_CONTEXT, dataContext), _defineProperty(_contextHandlersMap, OPEN_TAG_START_CONTEXT, openTagStartContext), _defineProperty(_contextHandlersMap, CLOSE_TAG_CONTEXT, closeTagContext), _defineProperty(_contextHandlersMap, ATTRIBUTES_CONTEXT, attributesContext), _defineProperty(_contextHandlersMap, OPEN_TAG_END_CONTEXT, openTagEndContext), _defineProperty(_contextHandlersMap, ATTRIBUTE_KEY_CONTEXT, attributeKeyContext), _defineProperty(_contextHandlersMap, ATTRIBUTE_VALUE_CONTEXT, attributeValueContext), _defineProperty(_contextHandlersMap, ATTRIBUTE_VALUE_BARE_CONTEXT, attributeValueBareContext), _defineProperty(_contextHandlersMap, ATTRIBUTE_VALUE_WRAPPED_CONTEXT, attributeValueWrappedContext), _defineProperty(_contextHandlersMap, SCRIPT_CONTENT_CONTEXT, scriptContentContext), _defineProperty(_contextHandlersMap, STYLE_CONTENT_CONTEXT, styleContentContext), _defineProperty(_contextHandlersMap, DOCTYPE_START_CONTEXT, doctypeStartContext), _defineProperty(_contextHandlersMap, DOCTYPE_END_CONTEXT, doctypeEndContextFactory), _defineProperty(_contextHandlersMap, DOCTYPE_ATTRIBUTES_CONTEXT, doctypeAttributesContext), _defineProperty(_contextHandlersMap, DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT, doctypeAttributeWrappedContext), _defineProperty(_contextHandlersMap, DOCTYPE_ATTRIBUTE_BARE_CONTEXT, doctypeAttributeBareEndContext), _defineProperty(_contextHandlersMap, COMMENT_CONTENT_CONTEXT, commentContentContext), _contextHandlersMap);
function tokenizeChars(chars, state, tokens, _ref) {
var isFinalChunk = _ref.isFinalChunk,
positionOffset = _ref.positionOffset;
var charIndex = state.caretPosition - positionOffset;
while (charIndex < chars.length) {
var context = contextHandlersMap[state.currentContext];
state.decisionBuffer += chars[charIndex];
context.parseSyntax(state.decisionBuffer, state, tokens);
charIndex = state.caretPosition - positionOffset;
}
if (isFinalChunk) {
var _context = contextHandlersMap[state.currentContext]; // Move the caret back, as at this point
// it in the position outside of chars array,
// and it should not be taken into account
// when calculating characters range
state.caretPosition--;
if (_context.handleContentEnd !== undefined) {
_context.handleContentEnd(state, tokens);
}
}
}
function tokenize() {
var content = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : '';
var existingState = arguments.length > 1 ? arguments[1] : undefined;
var _ref2 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {},
isFinalChunk = _ref2.isFinalChunk;
isFinalChunk = isFinalChunk === undefined ? true : isFinalChunk;
var state;
if (existingState !== undefined) {
state = Object.assign({}, existingState);
} else {
state = {
currentContext: DATA_CONTEXT,
contextParams: {},
decisionBuffer: '',
accumulatedContent: '',
caretPosition: 0
};
}
var chars = state.decisionBuffer + content;
var tokens = [];
var positionOffset = state.caretPosition - state.decisionBuffer.length;
tokenizeChars(chars, state, tokens, {
isFinalChunk: isFinalChunk,
positionOffset: positionOffset
});
return {
state: state,
tokens: tokens
};
}
module.exports = tokenize;

View File

@@ -0,0 +1,35 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_ASSIGNMENT = _require2.TOKEN_ATTRIBUTE_ASSIGNMENT;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_CONTEXT = _require3.ATTRIBUTE_VALUE_CONTEXT;
function equal(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '=') {
return equal(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,43 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_KEY = _require2.TOKEN_ATTRIBUTE_KEY;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
function keyEnd(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_ATTRIBUTE_KEY,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
}
function isKeyBreak(chars) {
return chars === '=' || chars === ' ' || chars === '\n' || chars === '\t' || chars === '/' || chars === '>';
}
function parseSyntax(chars, state, tokens) {
if (isKeyBreak(chars)) {
return keyEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,40 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange,
isWhitespace = _require.isWhitespace;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
function valueEnd(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (isWhitespace(chars) || chars === '>' || chars === '/') {
return valueEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,40 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
delete state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT];
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,37 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,51 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE = _require2.TOKEN_ATTRIBUTE_VALUE,
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require2.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
var _require3 = require('../constants/tokenizer-contexts'),
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT,
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require3.ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
var endWrapperPosition = range.endPosition + 1;
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
}, {
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: endWrapperPosition,
endPosition: endWrapperPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
state.caretPosition++;
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = undefined;
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,63 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace;
var _require2 = require('../constants/tokenizer-contexts'),
ATTRIBUTE_VALUE_WRAPPED_CONTEXT = _require2.ATTRIBUTE_VALUE_WRAPPED_CONTEXT,
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT,
ATTRIBUTE_VALUE_BARE_CONTEXT = _require2.ATTRIBUTE_VALUE_BARE_CONTEXT;
var _require3 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require3.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START;
function wrapper(state, tokens) {
var wrapper = state.decisionBuffer;
tokens.push({
type: TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
content: wrapper,
startPosition: state.caretPosition,
endPosition: state.caretPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_WRAPPED_CONTEXT;
state.contextParams[ATTRIBUTE_VALUE_WRAPPED_CONTEXT] = {
wrapper: wrapper
};
state.caretPosition++;
}
function bare(state) {
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_BARE_CONTEXT;
state.caretPosition++;
}
function tagEnd(state) {
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '"' || chars === '\'') {
return wrapper(state, tokens);
}
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens);
}
if (!isWhitespace(chars)) {
return bare(state, tokens);
}
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,69 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/tokenizer-contexts'),
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT,
OPEN_TAG_END_CONTEXT = _require2.OPEN_TAG_END_CONTEXT,
ATTRIBUTE_VALUE_CONTEXT = _require2.ATTRIBUTE_VALUE_CONTEXT,
ATTRIBUTE_KEY_CONTEXT = _require2.ATTRIBUTE_KEY_CONTEXT;
var _require3 = require('../constants/token-types'),
TOKEN_ATTRIBUTE_ASSIGNMENT = _require3.TOKEN_ATTRIBUTE_ASSIGNMENT;
function tagEnd(state) {
var tagName = state.contextParams[ATTRIBUTES_CONTEXT].tagName;
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = OPEN_TAG_END_CONTEXT;
state.contextParams[OPEN_TAG_END_CONTEXT] = {
tagName: tagName
};
state.contextParams[ATTRIBUTES_CONTEXT] = undefined;
}
function noneWhitespace(state) {
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_KEY_CONTEXT;
state.caretPosition++;
}
function equal(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: true
});
tokens.push({
type: TOKEN_ATTRIBUTE_ASSIGNMENT,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTE_VALUE_CONTEXT;
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens);
}
if (chars === '=') {
return equal(state, tokens);
}
if (!isWhitespace(chars)) {
return noneWhitespace(state, tokens);
}
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,40 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_CLOSE_TAG = _require2.TOKEN_CLOSE_TAG;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function closingCornerBrace(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: true
});
tokens.push({
type: TOKEN_CLOSE_TAG,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>') {
return closingCornerBrace(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,58 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_COMMENT_END = _require2.TOKEN_COMMENT_END,
TOKEN_COMMENT_CONTENT = _require2.TOKEN_COMMENT_CONTENT;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
var COMMENT_END = '-->';
function commentEnd(state, tokens) {
var contentRange = calculateTokenCharactersRange(state, {
keepBuffer: false
});
var commentEndRange = {
startPosition: contentRange.endPosition + 1,
endPosition: contentRange.endPosition + COMMENT_END.length
};
tokens.push({
type: TOKEN_COMMENT_CONTENT,
content: state.accumulatedContent,
startPosition: contentRange.startPosition,
endPosition: contentRange.endPosition
});
tokens.push({
type: TOKEN_COMMENT_END,
content: state.decisionBuffer,
startPosition: commentEndRange.startPosition,
endPosition: commentEndRange.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
if (chars === '-' || chars === '--') {
state.caretPosition++;
return;
}
if (chars === COMMENT_END) {
return commentEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,38 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_COMMENT_END = _require2.TOKEN_COMMENT_END;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function commentEnd(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_COMMENT_END,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>') {
return commentEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,35 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_COMMENT_START = _require2.TOKEN_COMMENT_START;
var _require3 = require('../constants/tokenizer-contexts'),
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT;
function commentStart(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_COMMENT_START,
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = COMMENT_CONTENT_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '<!--') {
return commentStart(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,142 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_TEXT = _require2.TOKEN_TEXT,
TOKEN_COMMENT_START = _require2.TOKEN_COMMENT_START;
var _require3 = require('../constants/tokenizer-contexts'),
OPEN_TAG_START_CONTEXT = _require3.OPEN_TAG_START_CONTEXT,
CLOSE_TAG_CONTEXT = _require3.CLOSE_TAG_CONTEXT,
DOCTYPE_START_CONTEXT = _require3.DOCTYPE_START_CONTEXT,
COMMENT_CONTENT_CONTEXT = _require3.COMMENT_CONTENT_CONTEXT;
var COMMENT_START = '<!--';
function generateTextToken(state) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
return {
type: TOKEN_TEXT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
};
}
function openingCornerBraceWithText(state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = OPEN_TAG_START_CONTEXT;
state.caretPosition++;
}
function openingCornerBraceWithSlash(state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = CLOSE_TAG_CONTEXT;
state.caretPosition++;
}
function doctypeStart(state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = DOCTYPE_START_CONTEXT;
state.caretPosition++;
}
function commentStart(state, tokens) {
if (state.accumulatedContent.length !== 0) {
tokens.push(generateTextToken(state));
}
var commentStartRange = {
startPosition: state.caretPosition - (COMMENT_START.length - 1),
endPosition: state.caretPosition
};
tokens.push({
type: TOKEN_COMMENT_START,
content: state.decisionBuffer,
startPosition: commentStartRange.startPosition,
endPosition: commentStartRange.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = COMMENT_CONTENT_CONTEXT;
state.caretPosition++;
}
function handleContentEnd(state, tokens) {
var textContent = state.accumulatedContent + state.decisionBuffer;
if (textContent.length !== 0) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_TEXT,
content: textContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
}
}
function isIncompleteDoctype(chars) {
var charsUpperCase = chars.toUpperCase();
return charsUpperCase === '<!' || charsUpperCase === '<!D' || charsUpperCase === '<!DO' || charsUpperCase === '<!DOC' || charsUpperCase === '<!DOCT' || charsUpperCase === '<!DOCTY' || charsUpperCase === '<!DOCTYP';
}
var OPEN_TAG_START_PATTERN = /^<\w/;
function parseSyntax(chars, state, tokens) {
if (OPEN_TAG_START_PATTERN.test(chars)) {
return openingCornerBraceWithText(state, tokens);
}
if (chars === '</') {
return openingCornerBraceWithSlash(state, tokens);
}
if (chars === '<' || chars === '<!' || chars === '<!-') {
state.caretPosition++;
return;
}
if (chars === COMMENT_START) {
return commentStart(state, tokens);
}
if (isIncompleteDoctype(chars)) {
state.caretPosition++;
return;
}
if (chars.toUpperCase() === '<!DOCTYPE') {
return doctypeStart(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax,
handleContentEnd: handleContentEnd
};

View File

@@ -0,0 +1,40 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
function attributeEnd(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (isWhitespace(chars) || chars === '>') {
return attributeEnd(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,40 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
delete state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT];
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,37 @@
'use strict';
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, { keepBuffer: true });
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,52 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require3.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
function wrapper(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
var endWrapperPosition = range.endPosition + 1;
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END,
content: state.decisionBuffer,
startPosition: endWrapperPosition,
endPosition: endWrapperPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
state.caretPosition++;
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = undefined;
}
function parseSyntax(chars, state, tokens) {
var wrapperChar = state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT].wrapper;
if (chars === wrapperChar) {
return wrapper(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,63 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace;
var _require2 = require('../constants/tokenizer-contexts'),
DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT,
DOCTYPE_ATTRIBUTE_BARE_CONTEXT = _require2.DOCTYPE_ATTRIBUTE_BARE_CONTEXT,
DOCTYPE_END_CONTEXT = _require2.DOCTYPE_END_CONTEXT;
var _require3 = require('../constants/token-types'),
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require3.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
function wrapper(state, tokens) {
var wrapper = state.decisionBuffer;
tokens.push({
type: TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
content: wrapper,
startPosition: state.caretPosition,
endPosition: state.caretPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT;
state.contextParams[DOCTYPE_ATTRIBUTE_WRAPPED_CONTEXT] = {
wrapper: wrapper
};
state.caretPosition++;
}
function bare(state) {
state.accumulatedContent = state.decisionBuffer;
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTE_BARE_CONTEXT;
state.caretPosition++;
}
function closingCornerBrace(state) {
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_END_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (chars === '"' || chars === '\'') {
return wrapper(state, tokens);
}
if (chars === '>') {
return closingCornerBrace(state, tokens);
}
if (!isWhitespace(chars)) {
return bare(state, tokens);
}
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,34 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_END = _require2.TOKEN_DOCTYPE_END;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function closingCornerBrace(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: true
});
tokens.push({
type: TOKEN_DOCTYPE_END,
content: state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
return closingCornerBrace(state, tokens);
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,55 @@
"use strict";
var _require = require('../helpers'),
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_START = _require2.TOKEN_DOCTYPE_START;
var _require3 = require('../constants/tokenizer-contexts'),
DOCTYPE_END_CONTEXT = _require3.DOCTYPE_END_CONTEXT,
DOCTYPE_ATTRIBUTES_CONTEXT = _require3.DOCTYPE_ATTRIBUTES_CONTEXT;
function generateDoctypeStartToken(state) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
return {
type: TOKEN_DOCTYPE_START,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
};
}
function closingCornerBrace(state, tokens) {
tokens.push(generateDoctypeStartToken(state));
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_END_CONTEXT;
}
function whitespace(state, tokens) {
tokens.push(generateDoctypeStartToken(state));
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DOCTYPE_ATTRIBUTES_CONTEXT;
}
function parseSyntax(chars, state, tokens) {
if (isWhitespace(chars)) {
return whitespace(state, tokens);
}
if (chars === '>') {
return closingCornerBrace(state, tokens);
}
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,58 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_OPEN_TAG_END = _require2.TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT = _require2.TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE = _require2.TOKEN_OPEN_TAG_END_STYLE;
var _require3 = require('../constants/tokenizer-contexts'),
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
DATA_CONTEXT = _require3.DATA_CONTEXT,
SCRIPT_CONTENT_CONTEXT = _require3.SCRIPT_CONTENT_CONTEXT,
STYLE_CONTENT_CONTEXT = _require3.STYLE_CONTENT_CONTEXT;
var tokensMap = {
'script': TOKEN_OPEN_TAG_END_SCRIPT,
'style': TOKEN_OPEN_TAG_END_STYLE,
'default': TOKEN_OPEN_TAG_END
};
var contextsMap = {
'script': SCRIPT_CONTENT_CONTEXT,
'style': STYLE_CONTENT_CONTEXT,
'default': DATA_CONTEXT
};
function closingCornerBrace(state, tokens) {
var range = calculateTokenCharactersRange(state, {
keepBuffer: true
});
var tagName = state.contextParams[OPEN_TAG_END_CONTEXT].tagName;
tokens.push({
type: tokensMap[tagName] || tokensMap["default"],
content: state.accumulatedContent + state.decisionBuffer,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = contextsMap[tagName] || contextsMap["default"];
state.caretPosition++;
state.contextParams[OPEN_TAG_END_CONTEXT] = undefined;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>') {
return closingCornerBrace(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,78 @@
"use strict";
var _require = require('../helpers'),
parseOpenTagName = _require.parseOpenTagName,
isWhitespace = _require.isWhitespace,
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_OPEN_TAG_START = _require2.TOKEN_OPEN_TAG_START,
TOKEN_OPEN_TAG_START_SCRIPT = _require2.TOKEN_OPEN_TAG_START_SCRIPT,
TOKEN_OPEN_TAG_START_STYLE = _require2.TOKEN_OPEN_TAG_START_STYLE;
var _require3 = require('../constants/tokenizer-contexts'),
OPEN_TAG_END_CONTEXT = _require3.OPEN_TAG_END_CONTEXT,
ATTRIBUTES_CONTEXT = _require3.ATTRIBUTES_CONTEXT;
var tokensMap = {
'script': TOKEN_OPEN_TAG_START_SCRIPT,
'style': TOKEN_OPEN_TAG_START_STYLE,
'default': TOKEN_OPEN_TAG_START
};
function tagEnd(state, tokens) {
var tagName = parseOpenTagName(state.accumulatedContent);
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: tokensMap[tagName] || tokensMap["default"],
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.decisionBuffer = '';
state.accumulatedContent = '';
state.currentContext = OPEN_TAG_END_CONTEXT;
state.contextParams[OPEN_TAG_END_CONTEXT] = {
tagName: tagName
};
}
function whitespace(state, tokens) {
var tagName = parseOpenTagName(state.accumulatedContent);
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: tokensMap[tagName] || tokensMap["default"],
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = ATTRIBUTES_CONTEXT;
state.contextParams[ATTRIBUTES_CONTEXT] = {
tagName: tagName
};
state.caretPosition++;
}
function parseSyntax(chars, state, tokens) {
if (chars === '>' || chars === '/') {
return tagEnd(state, tokens);
}
if (isWhitespace(chars)) {
return whitespace(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,58 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_SCRIPT_TAG_CONTENT = _require2.TOKEN_SCRIPT_TAG_CONTENT,
TOKEN_CLOSE_TAG_SCRIPT = _require2.TOKEN_CLOSE_TAG_SCRIPT;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function closingScriptTag(state, tokens) {
if (state.accumulatedContent !== '') {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_SCRIPT_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
}
tokens.push({
type: TOKEN_CLOSE_TAG_SCRIPT,
content: state.decisionBuffer,
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
endPosition: state.caretPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
var CLOSING_SCRIPT_TAG_PATTERN = /<\/script\s*>/i;
function parseSyntax(chars, state, tokens) {
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
state.caretPosition++;
return;
}
if (CLOSING_SCRIPT_TAG_PATTERN.test(chars)) {
return closingScriptTag(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,58 @@
"use strict";
var _require = require('../helpers'),
calculateTokenCharactersRange = _require.calculateTokenCharactersRange;
var _require2 = require('../constants/token-types'),
TOKEN_STYLE_TAG_CONTENT = _require2.TOKEN_STYLE_TAG_CONTENT,
TOKEN_CLOSE_TAG_STYLE = _require2.TOKEN_CLOSE_TAG_STYLE;
var _require3 = require('../constants/tokenizer-contexts'),
DATA_CONTEXT = _require3.DATA_CONTEXT;
function closingStyleTag(state, tokens) {
if (state.accumulatedContent !== '') {
var range = calculateTokenCharactersRange(state, {
keepBuffer: false
});
tokens.push({
type: TOKEN_STYLE_TAG_CONTENT,
content: state.accumulatedContent,
startPosition: range.startPosition,
endPosition: range.endPosition
});
}
tokens.push({
type: TOKEN_CLOSE_TAG_STYLE,
content: state.decisionBuffer,
startPosition: state.caretPosition - (state.decisionBuffer.length - 1),
endPosition: state.caretPosition
});
state.accumulatedContent = '';
state.decisionBuffer = '';
state.currentContext = DATA_CONTEXT;
state.caretPosition++;
}
var INCOMPLETE_CLOSING_TAG_PATTERN = /<\/[^>]+$/;
var CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i;
function parseSyntax(chars, state, tokens) {
if (chars === '<' || chars === '</' || INCOMPLETE_CLOSING_TAG_PATTERN.test(chars)) {
state.caretPosition++;
return;
}
if (CLOSING_STYLE_TAG_PATTERN.test(chars)) {
return closingStyleTag(state, tokens);
}
state.accumulatedContent += state.decisionBuffer;
state.decisionBuffer = '';
state.caretPosition++;
}
module.exports = {
parseSyntax: parseSyntax
};

View File

@@ -0,0 +1,65 @@
"use strict";
var _require = require('../constants/token-types'),
TOKEN_OPEN_TAG_END = _require.TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT = _require.TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE = _require.TOKEN_OPEN_TAG_END_STYLE,
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT,
TOKEN_ATTRIBUTE_VALUE = _require.TOKEN_ATTRIBUTE_VALUE,
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START = _require.TOKEN_ATTRIBUTE_VALUE_WRAPPER_START,
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END = _require.TOKEN_ATTRIBUTE_VALUE_WRAPPER_END;
function getLastAttribute(state) {
var attributes = state.currentNode.content.attributes;
return attributes[attributes.length - 1];
}
function handleValueEnd(state) {
state.currentContext = state.currentContext.parentRef;
return state;
}
function handleAttributeValue(state, token) {
var attribute = getLastAttribute(state);
attribute.value = token;
state.caretPosition++;
return state;
}
function handleAttributeValueWrapperStart(state, token) {
var attribute = getLastAttribute(state);
attribute.startWrapper = token;
state.caretPosition++;
return state;
}
function handleAttributeValueWrapperEnd(state, token) {
var attribute = getLastAttribute(state);
attribute.endWrapper = token;
state.caretPosition++;
return state;
}
module.exports = function attributeValue(token, state) {
var VALUE_END_TOKENS = [TOKEN_OPEN_TAG_END, TOKEN_OPEN_TAG_END_SCRIPT, TOKEN_OPEN_TAG_END_STYLE, TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
if (VALUE_END_TOKENS.indexOf(token.type) !== -1) {
return handleValueEnd(state);
}
if (token.type === TOKEN_ATTRIBUTE_VALUE) {
return handleAttributeValue(state, token);
}
if (token.type === TOKEN_ATTRIBUTE_VALUE_WRAPPER_START) {
return handleAttributeValueWrapperStart(state, token);
}
if (token.type === TOKEN_ATTRIBUTE_VALUE_WRAPPER_END) {
return handleAttributeValueWrapperEnd(state, token);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,69 @@
"use strict";
var _require = require('../constants/token-types'),
TOKEN_OPEN_TAG_END = _require.TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT = _require.TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE = _require.TOKEN_OPEN_TAG_END_STYLE,
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT;
var _require2 = require('../constants/tree-constructor-contexts'),
ATTRIBUTE_VALUE_CONTEXT = _require2.ATTRIBUTE_VALUE_CONTEXT;
function getLastAttribute(state) {
var attributes = state.currentNode.content.attributes;
return attributes[attributes.length - 1];
}
function handleOpenTagEnd(state) {
state.currentContext = state.currentContext.parentRef;
return state;
}
function handleAttributeKey(state, token) {
var attribute = getLastAttribute(state);
if (attribute.key !== undefined || attribute.value !== undefined) {
state.currentContext = state.currentContext.parentRef;
return state;
}
attribute.key = token;
state.caretPosition++;
return state;
}
function handleAttributeAssignment(state) {
var attribute = getLastAttribute(state);
if (attribute.value !== undefined) {
state.currentContext = state.currentContext.parentRef;
return state;
}
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTE_VALUE_CONTEXT
};
state.caretPosition++;
return state;
}
module.exports = function attribute(token, state) {
var OPEN_TAG_END_TOKENS = [TOKEN_OPEN_TAG_END, TOKEN_OPEN_TAG_END_SCRIPT, TOKEN_OPEN_TAG_END_STYLE];
if (OPEN_TAG_END_TOKENS.indexOf(token.type) !== -1) {
return handleOpenTagEnd(state);
}
if (token.type === TOKEN_ATTRIBUTE_KEY) {
return handleAttributeKey(state, token);
}
if (token.type === TOKEN_ATTRIBUTE_ASSIGNMENT) {
return handleAttributeAssignment(state);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,47 @@
"use strict";
var _require = require('../constants/token-types'),
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT,
TOKEN_OPEN_TAG_END = _require.TOKEN_OPEN_TAG_END,
TOKEN_OPEN_TAG_END_SCRIPT = _require.TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_OPEN_TAG_END_STYLE = _require.TOKEN_OPEN_TAG_END_STYLE;
var _require2 = require('../constants/tree-constructor-contexts'),
ATTRIBUTE_CONTEXT = _require2.ATTRIBUTE_CONTEXT;
function handlerAttributeStart(state) {
if (state.currentNode.content.attributes === undefined) {
state.currentNode.content.attributes = [];
} // new empty attribute
state.currentNode.content.attributes.push({});
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTE_CONTEXT
};
return state;
}
function handleOpenTagEnd(state) {
state.currentContext = state.currentContext.parentRef;
return state;
}
module.exports = function attributes(token, state) {
var ATTRIBUTE_START_TOKENS = [TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handlerAttributeStart(state);
}
var ATTRIBUTES_END_TOKENS = [TOKEN_OPEN_TAG_END, TOKEN_OPEN_TAG_END_SCRIPT, TOKEN_OPEN_TAG_END_STYLE];
if (ATTRIBUTES_END_TOKENS.indexOf(token.type) !== -1) {
return handleOpenTagEnd(state);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,43 @@
"use strict";
var _require = require('../constants/token-types'),
TOKEN_COMMENT_START = _require.TOKEN_COMMENT_START,
TOKEN_COMMENT_END = _require.TOKEN_COMMENT_END,
TOKEN_COMMENT_CONTENT = _require.TOKEN_COMMENT_CONTENT;
function handleCommentStart(state, token) {
state.currentNode.content.start = token;
state.caretPosition++;
return state;
}
function handleCommentContent(state, token) {
state.currentNode.content.value = token;
state.caretPosition++;
return state;
}
function handleCommentEnd(state, token) {
state.currentNode.content.end = token;
state.currentNode = state.currentNode.parentRef;
state.currentContext = state.currentContext.parentRef;
state.caretPosition++;
return state;
}
module.exports = function comment(token, state) {
if (token.type === TOKEN_COMMENT_START) {
return handleCommentStart(state, token);
}
if (token.type === TOKEN_COMMENT_CONTENT) {
return handleCommentContent(state, token);
}
if (token.type === TOKEN_COMMENT_END) {
return handleCommentEnd(state, token);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,72 @@
"use strict";
var _require = require('../constants/token-types'),
TOKEN_DOCTYPE_END = _require.TOKEN_DOCTYPE_END,
TOKEN_DOCTYPE_ATTRIBUTE = _require.TOKEN_DOCTYPE_ATTRIBUTE,
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START,
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END = _require.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END;
function getLastAttribute(state) {
var attributes = state.currentNode.content.attributes;
return attributes[attributes.length - 1];
}
function handleDoctypeEnd(state) {
state.currentContext = state.currentContext.parentRef;
return state;
}
function handleAttributeValue(state, token) {
var attribute = getLastAttribute(state);
if (attribute.value !== undefined) {
state.currentContext = state.currentContext.parentRef;
return state;
}
attribute.value = token;
state.caretPosition++;
return state;
}
function handleAttributeWrapperStart(state, token) {
var attribute = getLastAttribute(state);
if (attribute.start !== undefined || attribute.value !== undefined) {
state.currentContext = state.currentContext.parentRef;
return state;
}
attribute.startWrapper = token;
state.caretPosition++;
return state;
}
function handleAttributeWrapperEnd(state, token) {
var attribute = getLastAttribute(state);
attribute.endWrapper = token;
state.currentContext = state.currentContext.parentRef;
state.caretPosition++;
return state;
}
module.exports = function doctypeAttribute(token, state) {
if (token.type === TOKEN_DOCTYPE_END) {
return handleDoctypeEnd(state, token);
}
if (token.type === TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START) {
return handleAttributeWrapperStart(state, token);
}
if (token.type === TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END) {
return handleAttributeWrapperEnd(state, token);
}
if (token.type === TOKEN_DOCTYPE_ATTRIBUTE) {
return handleAttributeValue(state, token);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,43 @@
"use strict";
var _require = require('../constants/tree-constructor-contexts'),
DOCTYPE_ATTRIBUTE_CONTEXT = _require.DOCTYPE_ATTRIBUTE_CONTEXT;
var _require2 = require('../constants/token-types'),
TOKEN_DOCTYPE_END = _require2.TOKEN_DOCTYPE_END,
TOKEN_DOCTYPE_ATTRIBUTE = _require2.TOKEN_DOCTYPE_ATTRIBUTE,
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require2.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
function handleDoctypeEnd(state) {
state.currentContext = state.currentContext.parentRef;
return state;
}
function handleAttribute(state) {
if (state.currentNode.content.attributes === undefined) {
state.currentNode.content.attributes = [];
} // new empty attribute
state.currentNode.content.attributes.push({});
state.currentContext = {
type: DOCTYPE_ATTRIBUTE_CONTEXT,
parentRef: state.currentContext
};
return state;
}
module.exports = function doctypeAttributes(token, state) {
if (token.type === TOKEN_DOCTYPE_END) {
return handleDoctypeEnd(state, token);
}
var ATTRIBUTE_START_TOKENS = [TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START, TOKEN_DOCTYPE_ATTRIBUTE];
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handleAttribute(state, token);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,51 @@
"use strict";
var _require = require('../constants/token-types'),
TOKEN_DOCTYPE_END = _require.TOKEN_DOCTYPE_END,
TOKEN_DOCTYPE_ATTRIBUTE = _require.TOKEN_DOCTYPE_ATTRIBUTE,
TOKEN_DOCTYPE_START = _require.TOKEN_DOCTYPE_START,
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START = _require.TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START;
var _require2 = require('../constants/tree-constructor-contexts'),
DOCTYPE_ATTRIBUTES_CONTEXT = _require2.DOCTYPE_ATTRIBUTES_CONTEXT;
function handleDoctypeStart(state, token) {
state.currentNode.content.start = token;
state.caretPosition++;
return state;
}
function handleDoctypeEnd(state, token) {
state.currentNode.content.end = token;
state.currentNode = state.currentNode.parentRef;
state.currentContext = state.currentContext.parentRef;
state.caretPosition++;
return state;
}
function handleDoctypeAttributes(state) {
state.currentContext = {
parentRef: state.currentContext,
type: DOCTYPE_ATTRIBUTES_CONTEXT
};
return state;
}
module.exports = function doctype(token, state) {
if (token.type === TOKEN_DOCTYPE_START) {
return handleDoctypeStart(state, token);
}
if (token.type === TOKEN_DOCTYPE_END) {
return handleDoctypeEnd(state, token);
}
var ATTRIBUTES_START_TOKENS = [TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START, TOKEN_DOCTYPE_ATTRIBUTE];
if (ATTRIBUTES_START_TOKENS.indexOf(token.type) !== -1) {
return handleDoctypeAttributes(state, token);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,73 @@
"use strict";
var _require = require('../constants/token-types'),
TOKEN_OPEN_TAG_START_SCRIPT = _require.TOKEN_OPEN_TAG_START_SCRIPT,
TOKEN_OPEN_TAG_END_SCRIPT = _require.TOKEN_OPEN_TAG_END_SCRIPT,
TOKEN_CLOSE_TAG_SCRIPT = _require.TOKEN_CLOSE_TAG_SCRIPT,
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT,
TOKEN_SCRIPT_TAG_CONTENT = _require.TOKEN_SCRIPT_TAG_CONTENT;
var _require2 = require('../constants/tree-constructor-contexts'),
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT;
function handleOpenTagStartScript(state, token) {
state.currentNode.content.openStart = token;
state.caretPosition++;
return state;
}
function handleAttributeStartScript(state) {
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTES_CONTEXT
};
return state;
}
function handleOpenTagEndScript(state, token) {
state.currentNode.content.openEnd = token;
state.caretPosition++;
return state;
}
function handleScriptContent(state, token) {
state.currentNode.content.value = token;
state.caretPosition++;
return state;
}
function handleCloseTagScript(state, token) {
state.currentNode.content.close = token;
state.currentNode = state.currentNode.parentRef;
state.currentContext = state.currentContext.parentRef;
state.caretPosition++;
return state;
}
module.exports = function scriptTag(token, state) {
if (token.type === TOKEN_OPEN_TAG_START_SCRIPT) {
return handleOpenTagStartScript(state, token);
}
var ATTRIBUTE_START_TOKENS = [TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handleAttributeStartScript(state);
}
if (token.type === TOKEN_OPEN_TAG_END_SCRIPT) {
return handleOpenTagEndScript(state, token);
}
if (token.type === TOKEN_SCRIPT_TAG_CONTENT) {
return handleScriptContent(state, token);
}
if (token.type === TOKEN_CLOSE_TAG_SCRIPT) {
return handleCloseTagScript(state, token);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,73 @@
"use strict";
var _require = require('../constants/token-types'),
TOKEN_OPEN_TAG_START_STYLE = _require.TOKEN_OPEN_TAG_START_STYLE,
TOKEN_OPEN_TAG_END_STYLE = _require.TOKEN_OPEN_TAG_END_STYLE,
TOKEN_CLOSE_TAG_STYLE = _require.TOKEN_CLOSE_TAG_STYLE,
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT,
TOKEN_STYLE_TAG_CONTENT = _require.TOKEN_STYLE_TAG_CONTENT;
var _require2 = require('../constants/tree-constructor-contexts'),
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT;
function handleOpenTagStartStyle(state, token) {
state.currentNode.content.openStart = token;
state.caretPosition++;
return state;
}
function handleAttributeStartStyle(state) {
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTES_CONTEXT
};
return state;
}
function handleOpenTagEndStyle(state, token) {
state.currentNode.content.openEnd = token;
state.caretPosition++;
return state;
}
function handleStyleContent(state, token) {
state.currentNode.content.value = token;
state.caretPosition++;
return state;
}
function handleCloseTagStyle(state, token) {
state.currentNode.content.close = token;
state.currentNode = state.currentNode.parentRef;
state.currentContext = state.currentContext.parentRef;
state.caretPosition++;
return state;
}
module.exports = function styleTag(token, state) {
if (token.type === TOKEN_OPEN_TAG_START_STYLE) {
return handleOpenTagStartStyle(state, token);
}
var ATTRIBUTE_START_TOKENS = [TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handleAttributeStartStyle(state);
}
if (token.type === TOKEN_OPEN_TAG_END_STYLE) {
return handleOpenTagEndStyle(state, token);
}
if (token.type === TOKEN_STYLE_TAG_CONTENT) {
return handleStyleContent(state, token);
}
if (token.type === TOKEN_CLOSE_TAG_STYLE) {
return handleCloseTagStyle(state, token);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,184 @@
"use strict";
var parseCloseTagName = require('../helpers').parseCloseTagName;
var _require = require('../constants/token-types'),
TOKEN_OPEN_TAG_START = _require.TOKEN_OPEN_TAG_START,
TOKEN_CLOSE_TAG = _require.TOKEN_CLOSE_TAG,
TOKEN_COMMENT_START = _require.TOKEN_COMMENT_START,
TOKEN_DOCTYPE_START = _require.TOKEN_DOCTYPE_START,
TOKEN_TEXT = _require.TOKEN_TEXT,
TOKEN_OPEN_TAG_START_SCRIPT = _require.TOKEN_OPEN_TAG_START_SCRIPT,
TOKEN_OPEN_TAG_START_STYLE = _require.TOKEN_OPEN_TAG_START_STYLE;
var _require2 = require('../constants/tree-constructor-contexts'),
TAG_CONTEXT = _require2.TAG_CONTEXT,
COMMENT_CONTEXT = _require2.COMMENT_CONTEXT,
DOCTYPE_CONTEXT = _require2.DOCTYPE_CONTEXT,
SCRIPT_TAG_CONTEXT = _require2.SCRIPT_TAG_CONTEXT,
STYLE_TAG_CONTEXT = _require2.STYLE_TAG_CONTEXT;
var _require3 = require('../constants/ast-nodes'),
NODE_TAG = _require3.NODE_TAG,
NODE_TEXT = _require3.NODE_TEXT,
NODE_DOCTYPE = _require3.NODE_DOCTYPE,
NODE_COMMENT = _require3.NODE_COMMENT,
NODE_SCRIPT = _require3.NODE_SCRIPT,
NODE_STYLE = _require3.NODE_STYLE;
function handleOpenTagStart(state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = [];
}
var tagNode = {
nodeType: NODE_TAG,
parentRef: state.currentNode,
content: {}
};
state.currentNode.content.children.push(tagNode);
state.currentNode = tagNode;
state.currentContext = {
parentRef: state.currentContext,
type: TAG_CONTEXT
};
return state;
}
function handleCloseTag(state, token) {
var closeTagName = parseCloseTagName(token.content);
if (closeTagName !== state.currentNode.content.name) {
state.caretPosition++;
return state;
}
state.currentContext = state.currentContext.parentRef;
return state;
}
function handleCommentStart(state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = [];
}
var commentNode = {
nodeType: NODE_COMMENT,
parentRef: state.currentNode,
content: {}
};
state.currentNode.content.children.push(commentNode);
state.currentNode = commentNode;
state.currentContext = {
parentRef: state.currentContext,
type: COMMENT_CONTEXT
};
return state;
}
function handleDoctypeStart(state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = [];
}
var doctypeNode = {
nodeType: NODE_DOCTYPE,
parentRef: state.currentNode,
content: {}
};
state.currentNode.content.children.push(doctypeNode);
state.currentNode = doctypeNode;
state.currentContext = {
parentRef: state.currentContext,
type: DOCTYPE_CONTEXT
};
return state;
}
function handleText(state, token) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = [];
}
var textNode = {
nodeType: NODE_TEXT,
parentRef: state.currentNode,
content: {
value: token
}
};
state.currentNode.content.children.push(textNode);
state.caretPosition++;
return state;
}
function handleOpenTagStartScript(state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = [];
}
var scriptNode = {
nodeType: NODE_SCRIPT,
parentRef: state.currentNode,
content: {}
};
state.currentNode.content.children.push(scriptNode);
state.currentNode = scriptNode;
state.currentContext = {
type: SCRIPT_TAG_CONTEXT,
parentRef: state.currentContext
};
return state;
}
function handleOpenTagStartStyle(state) {
if (state.currentNode.content.children === undefined) {
state.currentNode.content.children = [];
}
var styleNode = {
nodeType: NODE_STYLE,
parentRef: state.currentNode,
content: {}
};
state.currentNode.content.children.push(styleNode);
state.currentNode = styleNode;
state.currentContext = {
type: STYLE_TAG_CONTEXT,
parentRef: state.currentContext
};
return state;
}
module.exports = function tagContent(token, state) {
if (token.type === TOKEN_OPEN_TAG_START) {
return handleOpenTagStart(state, token);
}
if (token.type === TOKEN_TEXT) {
return handleText(state, token);
}
if (token.type === TOKEN_CLOSE_TAG) {
return handleCloseTag(state, token);
}
if (token.type === TOKEN_COMMENT_START) {
return handleCommentStart(state, token);
}
if (token.type === TOKEN_DOCTYPE_START) {
return handleDoctypeStart(state, token);
}
if (token.type === TOKEN_OPEN_TAG_START_SCRIPT) {
return handleOpenTagStartScript(state, token);
}
if (token.type === TOKEN_OPEN_TAG_START_STYLE) {
return handleOpenTagStartStyle(state, token);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,27 @@
"use strict";
/**
* Parser for 'tag-name' context.
* Parses tag name from 'open-tag-start' (<div)
* token and save the tag name as self content.
* Ignores tokens others than 'open-tag-start'.
*/
var parseOpenTagName = require('../helpers').parseOpenTagName;
var _require = require('../constants/token-types'),
TOKEN_OPEN_TAG_START = _require.TOKEN_OPEN_TAG_START;
function handleTagOpenStart(state, token) {
state.currentNode.content.name = parseOpenTagName(token.content);
state.currentContext = state.currentContext.parentRef;
return state;
}
module.exports = function tagName(token, state) {
if (token.type === TOKEN_OPEN_TAG_START) {
handleTagOpenStart(state, token);
}
state.caretPosition++;
return state;
};

View File

@@ -0,0 +1,83 @@
"use strict";
var _require = require('../constants/token-types'),
TOKEN_OPEN_TAG_START = _require.TOKEN_OPEN_TAG_START,
TOKEN_OPEN_TAG_END = _require.TOKEN_OPEN_TAG_END,
TOKEN_CLOSE_TAG = _require.TOKEN_CLOSE_TAG,
TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY,
TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT;
var _require2 = require('../constants/tree-constructor-contexts'),
TAG_NAME_CONTEXT = _require2.TAG_NAME_CONTEXT,
ATTRIBUTES_CONTEXT = _require2.ATTRIBUTES_CONTEXT,
TAG_CONTENT_CONTEXT = _require2.TAG_CONTENT_CONTEXT;
function handleOpenTagStart(state, token) {
state.currentNode.content.openStart = token;
state.currentContext = {
parentRef: state.currentContext,
type: TAG_NAME_CONTEXT
};
return state;
}
function handleAttributeStart(state) {
state.currentContext = {
parentRef: state.currentContext,
type: ATTRIBUTES_CONTEXT
};
return state;
}
function handleOpenTagEnd(state, token) {
var SELF_CLOSING_TAGS = ['area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'meta', 'param', 'source', 'track', 'wbr'];
var tagName = state.currentNode.content.name;
state.currentNode.content.openEnd = token;
if (SELF_CLOSING_TAGS.indexOf(tagName) !== -1) {
state.currentNode.content.selfClosing = true;
state.currentNode = state.currentNode.parentRef;
state.currentContext = state.currentContext.parentRef;
state.caretPosition++;
return state;
}
state.currentNode.content.selfClosing = false;
state.currentContext = {
parentRef: state.currentContext,
type: TAG_CONTENT_CONTEXT
};
state.caretPosition++;
return state;
}
function handleCloseTag(state, token) {
state.currentNode.content.close = token;
state.currentNode = state.currentNode.parentRef;
state.currentContext = state.currentContext.parentRef;
state.caretPosition++;
return state;
}
module.exports = function tag(token, state) {
if (token.type === TOKEN_OPEN_TAG_START) {
return handleOpenTagStart(state, token);
}
var ATTRIBUTE_START_TOKENS = [TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT];
if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) {
return handleAttributeStart(state);
}
if (token.type === TOKEN_OPEN_TAG_END) {
return handleOpenTagEnd(state, token);
}
if (token.type === TOKEN_CLOSE_TAG) {
return handleCloseTag(state, token);
}
state.caretPosition++;
return state;
};

9
app/node_modules/hyntax/lib/constants/ast-nodes.js generated vendored Normal file
View File

@@ -0,0 +1,9 @@
module.exports = {
NODE_DOCUMENT: 'document',
NODE_TAG: 'tag',
NODE_TEXT: 'text',
NODE_DOCTYPE: 'doctype',
NODE_COMMENT: 'comment',
NODE_SCRIPT: 'script',
NODE_STYLE: 'style'
}

35
app/node_modules/hyntax/lib/constants/token-types.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
module.exports = {
TOKEN_TEXT: 'token:text',
TOKEN_OPEN_TAG_START: 'token:open-tag-start',
TOKEN_ATTRIBUTE_KEY: 'token:attribute-key',
TOKEN_ATTRIBUTE_ASSIGNMENT: 'token:attribute-assignment',
TOKEN_ATTRIBUTE_VALUE_WRAPPER_START: 'token:attribute-value-wrapper-start',
TOKEN_ATTRIBUTE_VALUE: 'token:attribute-value',
TOKEN_ATTRIBUTE_VALUE_WRAPPER_END: 'token:attribute-value-wrapper-end',
TOKEN_OPEN_TAG_END: 'token:open-tag-end',
TOKEN_CLOSE_TAG: 'token:close-tag',
TOKEN_OPEN_TAG_START_SCRIPT: 'token:open-tag-start-script',
TOKEN_SCRIPT_TAG_CONTENT: 'token:script-tag-content',
TOKEN_OPEN_TAG_END_SCRIPT: 'token:open-tag-end-script',
TOKEN_CLOSE_TAG_SCRIPT: 'token:close-tag-script',
TOKEN_OPEN_TAG_START_STYLE: 'token:open-tag-start-style',
TOKEN_STYLE_TAG_CONTENT: 'token:style-tag-content',
TOKEN_OPEN_TAG_END_STYLE: 'token:open-tag-end-style',
TOKEN_CLOSE_TAG_STYLE: 'token:close-tag-style',
TOKEN_DOCTYPE_START: 'token:doctype-start',
TOKEN_DOCTYPE_END: 'token:doctype-end',
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_START: 'token:doctype-attribute-wrapper-start',
TOKEN_DOCTYPE_ATTRIBUTE: 'token:doctype-attribute',
TOKEN_DOCTYPE_ATTRIBUTE_WRAPPER_END: 'token:doctype-attribute-wrapper-end',
TOKEN_COMMENT_START: 'token:comment-start',
TOKEN_COMMENT_CONTENT: 'token:comment-content',
TOKEN_COMMENT_END: 'token:comment-end'
}

Some files were not shown because too many files have changed in this diff Show More