Best JavaScript code snippet using wpt
lex.test.js
Source:lex.test.js
...9 * Convert an array of Token to an array of strings.10 * Also works with an array of arrays, to arbitrary depth, converting each Token to a string of its text property.11 * @param array {Token[]|Token[][]}12 * @returns {string[]|string[][]} */13function tokensToText(array) {14 if (!array)15 return undefined;16 let result = [];17 for (let i in array)18 if (Array.isArray(array[i]))19 result[i] = tokensToText(array[i]);20 else {21 result[i] = array[i].text;22 result[i] = Object.assign(result[i], {tokens: tokensToText(array[i].tokens), type: array[i].type, mode: array[i].mode});23 }24 return result;25}262728Deno.test('lex.js', () => {29 let code = 'var a = 3';30 let tokens = lex(jsHtml, code);31 assertEquals(tokens.map(t=>t.text), ['var', ' ', 'a', ' ', '=', ' ', '3']);32 assertEquals(tokens.map(t=>t.type), ['keyword','whitespace','identifier','whitespace','operator','whitespace','number']);33 assertEquals(tokens.map(t=>t.mode), ['js','js','js','js','js','js','js']);34});3536Deno.test('lex.comment', () => {37 let code = 'var a = 3;// comment\nline2';38 let tokens = lex(jsHtml, code);39 assertEquals(tokens.map(t=>t.text), ['var',' ','a',' ','=',' ','3',';','// comment','\n','line2']);40 assertEquals(tokens.map(t=>t.type), ['keyword','whitespace','identifier','whitespace','operator','whitespace','number','semicolon','comment','ln','identifier']);41});4243Deno.test('lex.comment2', () => {44 let code = 'var a;\n// comment\nline2';45 let tokens = lex(jsHtml, code);46 assertEquals(tokens.map(t=>t.text), ['var',' ','a',';','\n','// comment','\n','line2']);47 assertEquals(tokens.map(t=>t.type), ['keyword','whitespace','identifier','semicolon','ln','comment','ln','identifier']);48});4950Deno.test('lex.comment3', () => {51 let code = 'var a;\n/*comment1\nline2\r\nline3*/\nline2/*comment2*/';52 let tokens = lex(jsHtml, code);53 assertEquals(tokens.map(t=>t.text), ['var',' ','a',';','\n','/*comment1\nline2\r\nline3*/','\n','line2','/*comment2*/']);54 assertEquals(tokens.map(t=>t.type), ['keyword','whitespace','identifier','semicolon','ln','comment','ln','identifier','comment']);55});5657Deno.test('lex.template', () => {58 let code = 'var a=`hello ${name}`;';59 let tokens = lex(jsHtml, code);60 // Javascript level61 assertEquals(tokens.map(t=>t.text), ['var', ' ', 'a', '=', '`hello ${name}`', ';']);62 assertEquals(tokens.map(t=>t.type), ['keyword','whitespace','identifier','operator','template','semicolon']);63 assertEquals(tokens.map(t=>t.mode), ['js','js','js','js','js','js']);6465 // Template string66 assertEquals(tokens[4].tokens.map(t=>t.text), ['`', 'hello ', '${name}', '`']);67 assertEquals(tokens[4].tokens[0].mode, 'template');68 assertEquals(tokens[4].tokens.map(t=>t.type), ["template","text","expr","templateEnd"]);69 assertEquals(tokens[4].tokens.map(t=>t.mode), ['template','template','template','template']);7071 // Js inside template string.72 assertEquals(tokens[4].tokens[2].tokens.map(t=>t.text), ['${','name','}']);73 assertEquals(tokens[4].tokens[2].tokens[0].mode, 'js');74});7576Deno.test('lex.identifier', () => {77 let code = 'formula=3'; // Make sure it doesn't match the keyword "for"78 let tokens = lex(jsHtml, code);79 assertEquals(tokens.map(t=>t.text), ['formula', '=', '3']);80});8182Deno.test('lex.templateHash', () => {83 let old = jsHtml.allowHashTemplates;84 jsHtml.allowHashTemplates = true;85 let code = 'var a=`hello #{name}`;';86 let tokens = lex(jsHtml, code);87 // Javascript level88 assertEquals(tokens.map(t=>t.text), ['var', ' ', 'a', '=', '`hello #{name}`', ';']);8990 // Template string91 assertEquals(tokens[4].tokens.map(t=>t.text), ['`', 'hello ', '#{name}', '`']);92 assertEquals(tokens[4].tokens[0].mode, 'template');9394 // Js inside template string.95 assertEquals(tokens[4].tokens[2].tokens.map(t=>t.text), ['#{','name','}']);96 assertEquals(tokens[4].tokens[2].tokens[0].mode, 'js');9798 jsHtml.allowHashTemplates = old;99});100101Deno.test('lex.template-escape', () => {102 let code = 'var a=`hello \\${name}`;'; // Same as \$ instide a template string.103 let tokens = lex(jsHtml, code);104 // Javascript level105 assertEquals(tokens.map(t=>t.text), ['var', ' ', 'a', '=', '`hello \\' +106 '${name}`', ';']);107108 // Template string109 assertEquals(tokens[4].tokens.map(t=>t.text), ['`', 'hello \\${name}', '`']); // It's not split into "hello" and ${name}110 assertEquals(tokens[4].tokens[0].mode, 'template');111});112113114115Deno.test('lex.template-hash-escape', () => {116 let code = 'var a=`hello \\#{name}`;';117 let tokens = lex(jsHtml, code);118 // Javascript level119 assertEquals(tokens.map(t=>t.text), ['var', ' ', 'a', '=', '`hello \\#{name}`', ';']);120121 // Template string122 assertEquals(tokens[4].tokens.map(t=>t.text), ['`', 'hello \\#{name}', '`']);123124 // Js inside template string.125 assertEquals(tokens[4].tokens[2].tokens, undefined);126});127128Deno.test('lex.template-brace-depth', () => {129 let code = '<div>${{a: `a`})}</div>';130 let tokens = lex(jsHtml, code, 'template');131 assertEquals(tokensToText(tokens), ['<div>', '${{a: `a`})}', '</div>']);132});133134Deno.test('lex.template-brace-depth2', () => {135 let code = '`a ${{b: `${{c: 3}}`}}`.length';136 let tokens = lex(jsHtml, code);137 // Test braceDepth138 // TOOD: test.139 tokens = tokensToText(tokens);140141 assertEquals(tokens, ['`a ${{b: `${{c: 3}}`}}`', '.', 'length']);142 assertEquals(tokens[0].tokens, ['`', 'a ', '${{b: `${{c: 3}}`}}', '`']);143 assertEquals(tokens[0].tokens[2].tokens, ['${', '{', 'b', ':', ' ', '`${{c: 3}}`', '}', '}']);144 assertEquals(tokens[0].tokens[2].tokens[5].tokens, ['`', '${{c: 3}}', '`']);145 assertEquals(tokens[0].tokens[2].tokens[5].tokens[1].tokens, ['${', '{', 'c', ':', ' ', '3', '}', '}']);146});147148Deno.test('lex.template-tag-expr', () => {149 let code = 'var a=`hello <b class="one ${class}">world</b>!`;';150 let tokens = lex(jsHtml, code);151 tokens = tokensToText(tokens);152153 // Javascript level154 assertEquals(tokens, ['var',' ','a','=','`hello <b class="one ${class}">world</b>!`',';']);155156 // Template string157 assertEquals(tokens[4].tokens, ['`','hello ','<b class="one ${class}">','world','</b>','!','`']);158 assertEquals(tokens[4].tokens[0].mode, 'template');159160 // Html tag inside template string.161 assertEquals(tokens[4].tokens[2].tokens, ['<b',' ','class','=','"one ${class}"','>']);162 assertEquals(tokens[4].tokens[2].tokens[0].mode, 'templateTag');163164 // dquote string inside tag.165 assertEquals(tokens[4].tokens[2].tokens[4].tokens, ['"','one ','${class}','"']);166 assertEquals(tokens[4].tokens[2].tokens[4].tokens[0].mode, 'dquote');167168 // js expression inside dquote string.169 assertEquals(tokens[4].tokens[2].tokens[4].tokens[2].tokens, ['${','class','}']);170 assertEquals(tokens[4].tokens[2].tokens[4].tokens[2].tokens[0].mode, 'js');171});172173Deno.test('lex.error', () => {174 let code = 'div \n ; ';175 let msg = '';176177 jsHtml.allowUnknownTagTokens = false;178 try {179 lex(jsHtml, code, 'tag');180 }181 catch (e) {182 msg = e.message;183 }184 jsHtml.allowUnknownTagTokens = true;185 assertStartsWith(msg, 'Unknown token within "tag" at 2:3');186});187188Deno.test('lex.template-multiple', () => {189 let code = '${this.$one}#${this.$two}';190191 let tokens = lex(jsHtml, code, 'template');192 tokens = tokensToText(tokens);193 console.log(tokens); // TODO194195});196197Deno.test('lex.template-multiple2', () => {198 let old = jsHtml.allowHashTemplates;199 jsHtml.allowHashTemplates = true;200201 let code = '#{this.$one} # #{this.$two} # #{this.three}';202203 let tokens = lex(jsHtml, code, 'template');204 tokens = tokensToText(tokens);205 assertEquals(tokens, ['#{this.$one}', ' # ', '#{this.$two}', ' # ', '#{this.three}']);206207 jsHtml.allowHashTemplates = old;208});209210Deno.test('lex.template-misc', () => {211 let code = '${`<div>${this.one}#${this.two}#${this.three}</div>`}';212213 let tokens = lex(jsHtml, code, 'template');214 console.log(tokens); // TODO215});216217218219Deno.test('lex.template-script-tag', () => {220 let code = '${var a=`<script>var b=1<3</script>`}';221 let tokens = lex(jsHtml, code, 'template');222 tokens = tokensToText(tokens);223224 let js = tokens[0].tokens;225 assertEquals(js, ['${', 'var', ' ', 'a', '=', '`<script>var b=1<3</script>`', '}']);226227 let template = js[5].tokens;228 assertEquals(template, ['`', '<script>', 'var b=1<3', '</script>', '`']);229 assertEquals(template.map(t=>t.type), ['template', 'openTag', 'script', 'closeTag', 'templateEnd']);230231232 let js2 = template[2].tokens;233 assertEquals(js2, ['var', ' ', 'b', '=', '1', '<', '3']);234235236 //console.log(tokens[0].tokens[1].tokens.tokens); // TODO237238});239240Deno.test('lex.template-script-tag2', () => {241 let code = '${`<div>${var a=`<script>var b=1<3</script>`}</div>`}';242243 let tokens = lex(jsHtml, code, 'template');244 tokens = tokensToText(tokens);245 console.log(tokens.tokens); // TODO246247});248249Deno.test('lex.regex', () => {250 let code = 'a=/^\\/(\\\\\\\\|\\\\\\/|\\[\\^\\/]|\\[[^]]]|[^/])+\\/[agimsx]*/';251 let tokens = lex(jsHtml, code, 'js');252 tokens = tokensToText(tokens);253 assertEquals(tokens, ['a', '=', '/^\\/(\\\\\\\\|\\\\\\/|\\[\\^\\/]|\\[[^]]]|[^/])+\\/[agimsx]*/']);254 assertEquals(tokens[2].type, 'regex');255});256257Deno.test('lex.regex2', () => {258 let code = `/[/]+/g; b='/'`;259 let tokens = lex(jsHtml, code, 'js');260 tokens = tokensToText(tokens);261 assertEquals(tokens, ['/[/]+/g', ';', ' ', 'b', '=', "'/'"]);262 assertEquals(tokens[0].type, 'regex');263});264265Deno.test('lex.html-self-closing', () => {266 let code = '<img/>';267268 let tokens = lex(jsHtml, code, 'html');269 tokens = tokensToText(tokens);270 assertEquals(tokens[0].tokens, ['<img', '/>']);271 assertEquals(tokens[0].tokens.map(t=>t.type), ['openTag', 'tagEnd']);272 assertEquals(tokens[0].tokens.map(t=>t.mode), ['tag', 'tag']);273});274275Deno.test('lex.html-comment', () => {276 let code = '<div><!-- \r\ncomment --></div>';277278 let tokens = lex(jsHtml, code, 'html');279 tokens = tokensToText(tokens);280281 assertEquals(tokens, ['<div>', '<!-- \r\ncomment -->', '</div>']);282 assertEquals(tokens.map(t=>t.type), ['openTag', 'comment', 'closeTag']);283 assertEquals(tokens.map(t=>t.mode), ['html', 'html', 'html']);284});285286Deno.test('lex.comment-expr', () => {287 let code = '`<div><!-- ${a} --></div>`';288289 let tokens = lex(jsHtml, code, 'js');290 tokens = tokensToText(tokens);291292 assertEquals(tokens[0].tokens[2].tokens, ['<!--', ' ', '${a}', ' ', '-->']);293 assertEquals(tokens[0].tokens[2].tokens.map(t=>t.type), ['comment', 'commentBody', 'expr', 'commentBody', 'commentEnd']);294});295296Deno.test('lex.attr', () => {297 let code = '<div a="${one}" b="#{two}" ${three} #{four}></div>';298299 let old = lexHtmlJs.allowHashTemplates;300 lexHtmlJs.allowHashTemplates = true;301 let tokens = lex(jsHtml, code, 'template');302 tokens = tokensToText(tokens);303 lexHtmlJs.allowHashTemplates = old;304305 console.log(tokens[0]);306307 assertEquals(tokens[0].tokens[4].tokens[1].tokens, ['${', 'one', '}']);308 assertEquals(tokens[0].tokens[8].tokens[1].tokens, ['#{', 'two', '}']);309 assertEquals(tokens[0].tokens[10].tokens, ['${', 'three', '}']);310 assertEquals(tokens[0].tokens[12].tokens, ['#{', 'four', '}']);311312 // assertEquals(tokens[0].tokens[2].tokens, ['<!--', ' ', '${a}', ' ', '-->']);313 // assertEquals(tokens[0].tokens[2].tokens.map(t=>t.type), ['comment', 'commentBody', 'expr', 'commentBody', 'commentEnd']);314});315316// console.error( 'Oops, something went wrong!' );317// console.error( 'Please, report the following error on https://github.com/ckeditor/ckeditor5/issues with the build id and the error stack trace:' );318// console.warn( 'Build id: hhtn6uwszmtw-2t2n2eo10ccs' );319Deno.test('lex.unclosed-tag', () => {320 let code = `<p>text`;321322 let tokens = lex(jsHtml, code, 'html');323 tokens = tokensToText(tokens);324325 assertEquals(tokens, ['<p>', 'text']);326 assertEquals(tokens.map(t=>t.type), ['openTag', 'text']);327});328329Deno.test('lex.unclosed-comment', () => {330 let code = `<!--text`;331332 let tokens = lex(jsHtml, code, 'html');333 tokens = tokensToText(tokens);334335 assertEquals(tokens, ['<!--text']);336 assertEquals(tokens[0].tokens, ['<!--', 'text']);337 assertEquals(tokens[0].tokens.map(t=>t.type), ['comment', 'commentBody']);338});339340Deno.test('lex.badHtml1', () => {341 let code = "a = `Template <${3}>`;";342 let tokens = lex(htmljs, code, 'js');343 tokens = tokensToText(tokens);344 console.log(tokens);345});346347Deno.test('lex.badHtml2', () => {348349 let code = "a = `Template <$3}>`;";350 let tokens = lex(htmljs, code, 'js');351 tokens = tokensToText(tokens);352 console.log(tokens); // TODO353354355});356357Deno.test('lex.badHtml3', () => {358 let code = "a = `Template <${3>`;";359 let tokens = lex(htmljs, code, 'js');360 tokens = tokensToText(tokens);361 console.log(tokens); // TODO362});363364Deno.test('lex.php', () => {365 var code = `<?php print 1?-->`;366 let tokens = lex(htmljs, code, 'html');367 tokens = tokensToText(tokens);368 assertEquals(tokens, ['<?php print 1?-->']);369 assertEquals(tokens[0].type, 'text');370371372});373374375Deno.test('lex.benchmark.100kOptions', () => {376 const num = 100_000;377 const code = `<select id="select">${Array(num).fill(`<option>item</option>`).join('')}</select>`;378379 let start = new Date();380381 let tokens = lex(htmljs, code, 'html');
...
Parse.test.js
Source:Parse.test.js
...8 * Convert an array of Token to an array of strings.9 * Also works with an array of arrays, to arbitrary depth, converting each Token to a string of its text property.10 * @param array {Token[]|Token[][]}11 * @returns {string[]|string[][]} */12function tokensToText(array) {13 let result = [];14 for (let i in array)15 if (Array.isArray(array[i]))16 result[i] = tokensToText(array[i]);17 else {18 result[i] = array[i].text;19 if (array[i].tokens)20 result[i] = Object.assign(result[i], {tokens: tokensToText(array[i].tokens)});21 }22 return result;23}24252627Deno.test('Parse.singleVar', () => {28 let code = 'fruit';29 let tokens = lex(htmljs, code, 'js');3031 let pathTokens = Parse.varExpressions_(tokens, ['fruit']);32 assertEquals(tokensToText(pathTokens), [['fruit']]);3334});353637Deno.test('Parse.thisVars', () => {3839 let code = 'this.one';40 let tokens = lex(htmljs, code, 'js');4142 let pathTokens = Parse.varExpressions_(tokens);43 pathTokens = tokensToText(pathTokens);44 assertEquals(pathTokens, [['this', '.', 'one']]);45});464748Deno.test('Parse.multipleVars', () => {4950 let code = 'this.one.two(); test["a"].b; test()';51 let tokens = lex(htmljs, code, 'js');5253 let pathTokens = Parse.varExpressions_(tokens, ['test']);54 pathTokens = tokensToText(pathTokens);5556 assertEquals(pathTokens,57 [58 ['this', '.', 'one'],59 ['test', '[', '"a"', ']', '.', 'b']60 ]61 );62});6364Deno.test('Parse.duplicate', () => {6566 let code = 'this.one.two; one.three';67 let tokens = lex(htmljs, code, 'js');6869 let pathTokens = Parse.varExpressions_(tokens, ['one']);70 pathTokens = tokensToText(pathTokens);71 assertEquals(pathTokens,72 [ // Make sure we don't match the "one.two" within "this.one.two."73 ['this', '.', 'one', '.', 'two'],74 ['one', '.', 'three']75 ]76 );7778});7980Deno.test('Parse.varExpressionToPath', () => {81 let code = 'this["fruit"][0].name';82 let tokens = lex(htmljs, code, 'js');83 let pathTokens = Parse.varExpressions_(tokens);84 let paths = pathTokens.map(Parse.varExpressionToPath_);8586 assertEquals(paths, [['this', 'fruit', '0', 'name']]);87});8889Deno.test('Parse.varExpressionWithinParens', () => {90 let code = 'escapeHtml(sport[0].name)';91 let tokens = lex(htmljs, code, 'js');92 let pathTokens = Parse.varExpressions_(tokens, ['fruit', 'sport']);93 let paths = pathTokens.map(Parse.varExpressionToPath_);9495 assertEquals(paths, [['sport', '0', 'name']]);96});979899100Deno.test('Parse.findFunction.arrow1', () => {101 let code = 'b=3;a => a+1; b=4;';102 let tokens = lex(htmljs, code, 'js');103104 let result = Parse.findFunction(tokens);105106107 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a => a+1');108});109110Deno.test('Parse.findFunction.arrow2', () => {111 let code = 'b=3;a => (a+1); b=4;';112 let tokens = lex(htmljs, code, 'js');113114 let result = Parse.findFunction(tokens);115 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a => (a+1)');116});117118Deno.test('Parse.findFunction.arrow3', () => {119 let code = 'b=3;(a => a+1); b=4;';120 let tokens = lex(htmljs, code, 'js');121122 let result = Parse.findFunction(tokens);123 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a => a+1');124});125126Deno.test('Parse.findFunction.arrow4', () => {127 let code = 'b=3;a => { return a+1 }; b=4;';128 let tokens = lex(htmljs, code, 'js');129130 let result = Parse.findFunction(tokens);131 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a => { return a+1 }');132});133134Deno.test('Parse.findFunction.arrow5', () => {135 let code = 'b=3;a => { return {a:1} }; b=4;';136 let tokens = lex(htmljs, code, 'js');137138 let result = Parse.findFunction(tokens);139 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a => { return {a:1} }');140});141142Deno.test('Parse.findFunction.arrow6', () => {143 let code = 'b=3;(a) => a+1;';144 let tokens = lex(htmljs, code, 'js');145146 let result = Parse.findFunction(tokens);147 assertEquals(tokensToText(tokens.slice(...result)).join(''), '(a) => a+1');148});149150Deno.test('Parse.findFunction.arrow7', () => {151 let code = '() => a+1;';152 let tokens = lex(htmljs, code, 'js');153154 let result = Parse.findFunction(tokens);155 assertEquals(tokensToText(tokens.slice(...result)).join(''), '() => a+1');156});157158Deno.test('Parse.findFunction.func', () => {159 let code = 'b=3;function(a) { return a+1 }; b=4;';160 let tokens = lex(htmljs, code, 'js');161162 let result = Parse.findFunction(tokens);163 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'function(a) { return a+1 }');164});165166167168169170Deno.test('Parse.findFunctionArgs.arrow1', () => {171 let code = 'a => a+1';172 let tokens = lex(htmljs, code, 'js');173174 let result = Parse.findFunctionArgs(tokens);175 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a');176});177178Deno.test('Parse.findFunctionArgs.arrow2', () => {179 let code = '(a) => (a+1)';180 let tokens = lex(htmljs, code, 'js');181182 let result = Parse.findFunctionArgs(tokens);183 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a');184});185186Deno.test('Parse.findFunctionArgs.arrow3', () => {187 let code = '(a, b) => a+1';188 let tokens = lex(htmljs, code, 'js');189190 let result = Parse.findFunctionArgs(tokens);191 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a, b');192});193194Deno.test('Parse.findFunctionArgs.arrow4', () => {195 let code = '(a=1) => { return a+1 }';196 let tokens = lex(htmljs, code, 'js');197198 let result = Parse.findFunctionArgs(tokens);199 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a=1');200});201202Deno.test('Parse.findFunctionArgs.arrow5', () => {203 let code = '(a={}, b) => { return {a:1} }';204 let tokens = lex(htmljs, code, 'js');205206 let result = Parse.findFunctionArgs(tokens);207 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a={}, b');208});209210Deno.test('Parse.findFunctionArgs.arrow6', () => {211 let code = '(a=x=> {return (x+1)},b) => { return {a:1} }';212 let tokens = lex(htmljs, code, 'js');213214 let result = Parse.findFunctionArgs(tokens);215 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a=x=> {return (x+1)},b');216});217218Deno.test('Parse.findFunctionArgs.func', () => {219 let code = 'function(a) { return a+1 }';220 let tokens = lex(htmljs, code, 'js');221222 let result = Parse.findFunctionArgs(tokens);223 assertEquals(tokensToText(tokens.slice(...result)).join(''), 'a');
...
error.js
Source:error.js
...47 return count > 048 ? source.slice(position, position + count)49 : source.slice(Math.max(position + count, 0), position);50 }51 function tokensToText(inputs, { precedes } = {}) {52 const text = inputs.map((t) => t.trivia + t.value).join("");53 const nextToken = source[position];54 if (nextToken.type === "eof") {55 return text;56 }57 if (precedes) {58 return text + nextToken.trivia;59 }60 return text.slice(nextToken.trivia.length);61 }62 const maxTokens = 5; // arbitrary but works well enough63 const line =64 source[position].type !== "eof"65 ? source[position].line66 : source.length > 167 ? source[position - 1].line68 : 1;69 const precedingLastLine = lastLine(70 tokensToText(sliceTokens(-maxTokens), { precedes: true })71 );72 const subsequentTokens = sliceTokens(maxTokens);73 const subsequentText = tokensToText(subsequentTokens);74 const subsequentFirstLine = subsequentText.split("\n")[0];75 const spaced = " ".repeat(precedingLastLine.length) + "^";76 const sourceContext = precedingLastLine + subsequentFirstLine + "\n" + spaced;77 const contextType = kind === "Syntax" ? "since" : "inside";78 const inSourceName = source.name ? ` in ${source.name}` : "";79 const grammaticalContext =80 current && current.name81 ? `, ${contextType} \`${current.partial ? "partial " : ""}${contextAsText(82 current83 )}\``84 : "";85 const context = `${kind} error at line ${line}${inSourceName}${grammaticalContext}:\n${sourceContext}`;86 return {87 message: `${context} ${message}`,...
Using AI Code Generation
1var WPTokenizer = require("wptokenizer");2var tokenizer = new WPTokenizer();3var text = "The quick brown fox jumps over the lazy dog.";4var tokens = tokenizer.tokenize(text);5console.log(tokenizer.tokensToText(tokens));6var WPTokenizer = require("wptokenizer");7var tokenizer = new WPTokenizer();8var text = "The quick brown fox jumps over the lazy dog.";9var tokens = tokenizer.tokenize(text);10console.log(tokenizer.tokensToText(tokens));11var WPTokenizer = require("wptokenizer");12var tokenizer = new WPTokenizer();13var text = "The quick brown fox jumps over the lazy dog.";14var tokens = tokenizer.tokenize(text);15console.log(tokenizer.tokensToText(tokens));16var WPTokenizer = require("wptokenizer");17var tokenizer = new WPTokenizer();18var text = "The quick brown fox jumps over the lazy dog.";19var tokens = tokenizer.tokenize(text);20console.log(tokenizer.tokensToText(tokens));21var WPTokenizer = require("wptokenizer");22var tokenizer = new WPTokenizer();23var text = "The quick brown fox jumps over the lazy dog.";24var tokens = tokenizer.tokenize(text);25console.log(tokenizer.tokensToText(tokens));26var WPTokenizer = require("wptokenizer");27var tokenizer = new WPTokenizer();28var text = "The quick brown fox jumps over the lazy dog.";29var tokens = tokenizer.tokenize(text);30console.log(tokenizer.tokensToText(tokens));
Using AI Code Generation
1var wptokenizer = require('wptokenizer');2var text = 'The quick brown fox jumps over the lazy dog.';3var tokens = wptokenizer.tokenize(text);4var result = wptokenizer.tokensToText(tokens);5console.log(result);6function tokenize() {7var text = document.getElementById('text').value;8var tokens = wptokenizer.tokenize(text);9var result = wptokenizer.tokensToText(tokens);10document.getElementById('result').innerHTML = result;11}12<button onclick="tokenize();">Tokenize</button>
Using AI Code Generation
1var tokenizer = require('wptokenizer');2var text = "The quick brown fox jumps over the lazy dog.";3var tokens = tokenizer.tokenize(text);4console.log(tokenizer.tokensToText(tokens));5var tokenizer = require('wptokenizer');6var text = "The quick brown fox jumps over the lazy dog.";7var tokens = tokenizer.tokenize(text);8console.log(tokenizer.tokensToText(tokens));9var tokenizer = require('wptokenizer');10var text = "The quick brown fox jumps over the lazy dog.";11var tokens = tokenizer.tokenize(text);12console.log(tokenizer.tokensToText(tokens));13var tokenizer = require('wptokenizer');14var text = "The quick brown fox jumps over the lazy dog.";15var tokens = tokenizer.tokenize(text);16console.log(tokenizer.tokensToText(tokens));17var tokenizer = require('wptokenizer');18var text = "The quick brown fox jumps over the lazy dog.";19var tokens = tokenizer.tokenize(text);20console.log(tokenizer.tokensToText(tokens));21var tokenizer = require('wptokenizer');22var text = "The quick brown fox jumps over the lazy dog.";23var tokens = tokenizer.tokenize(text);24console.log(tokenizer.tokensToText(tokens));25var tokenizer = require('wptokenizer');26var text = "The quick brown fox jumps over the lazy dog.";27var tokens = tokenizer.tokenize(text);28console.log(tokenizer.tokensToText(tokens));29var tokenizer = require('wptokenizer');30var text = "The quick brown fox jumps over the lazy dog.";
Using AI Code Generation
1var wptokenizer = require('wptokenizer');2var fs = require('fs');3var text = fs.readFileSync('test.txt', 'utf8');4var tokens = wptokenizer.tokenize(text);5var text = wptokenizer.tokensToText(tokens);6console.log(text);7var wptokenizer = require('wptokenizer');8var fs = require('fs');9var text = fs.readFileSync('test.txt', 'utf8');10var tokens = wptokenizer.tokenize(text);11var text = wptokenizer.tokensToText(tokens);12console.log(text);13var wptokenizer = require('wptokenizer');14var fs = require('fs');15var text = fs.readFileSync('test.txt', 'utf8');16var tokens = wptokenizer.tokenize(text);17var text = wptokenizer.tokensToText(tokens);18console.log(text);19var wptokenizer = require('wptokenizer');20var fs = require('fs');21var text = fs.readFileSync('test.txt', 'utf8');22var tokens = wptokenizer.tokenize(text);23var text = wptokenizer.tokensToText(tokens);24console.log(text);25var wptokenizer = require('wptokenizer');26var fs = require('fs');27var text = fs.readFileSync('test.txt', 'utf8');28var tokens = wptokenizer.tokenize(text);29var text = wptokenizer.tokensToText(tokens);30console.log(text);31var wptokenizer = require('wptokenizer');32var fs = require('fs');33var text = fs.readFileSync('test.txt', 'utf8');34var tokens = wptokenizer.tokenize(text);35var text = wptokenizer.tokensToText(tokens);36console.log(text);
Using AI Code Generation
1var wptools = require('wptools');2var fs = require('fs');3var text = fs.readFileSync('./test.txt').toString('utf-8');4var tokens = wptools.tokenize(text);5var result = wptools.tokensToText(tokens);6console.log(result);
Using AI Code Generation
1var wptokenizer = require('wptokenizer');2var tokens = wptokenizer.tokenize('अगर आप इस विषय के बारे में अधिक जानना चाहते हैं, तो यहां क्लिक करें।');3console.log(wptokenizer.tokensToText(tokens));4tokensToText: function(tokens) {5 var text = '';6 for (var i = 0; i < tokens.length; i++) {7 text += tokens[i].value;8 }9 return text;10}11tokenize: function(text) {12 var tokens = [];13 var token = '';14 var isToken = false;15 for (var i = 0; i < text.length; i++) {16 var char = text[i];17 if (char == ' ') {18 if (isToken) {19 tokens.push({ value: token, type: 'word' });20 token = '';21 }22 isToken = false;23 } else if (char == '.' || char == ',' || char == '!' || char == '?' || char == ':' || char == ';' || char == '(' || char == ')' || char == '[' || char == ']' || char == '{' || char == '}' || char == '"' || char == "'") {24 if (isToken) {25 tokens.push({ value: token, type: 'word' });26 token = '';27 }28 isToken = false;29 tokens.push({ value: char, type: 'punctuation' });30 } else {31 token += char;32 isToken = true;33 }34 }35 if (isToken) {36 tokens.push({ value: token, type: 'word' });37 }38 return tokens;39}
Using AI Code Generation
1var wptokenizer = require('wptokenizer');2var text = "This is an example of a sentence. This is another sentence.";3var tokens = wptokenizer.tokenize(text);4console.log(wptokenizer.tokensToText(tokens));5var wptokenizer = require('wptokenizer');6var text = "This is an example of a sentence. This is another sentence.";7var tokens = wptokenizer.tokenize(text);8console.log(wptokenizer.tokensToText(tokens));9var wptokenizer = require('wptokenizer');10var text = "This is an example of a sentence. This is another sentence.";11var tokens = wptokenizer.tokenize(text);12console.log(wptokenizer.tokensToText(tokens));13var wptokenizer = require('wptokenizer');14var text = "This is an example of a sentence. This is another sentence.";15var tokens = wptokenizer.tokenize(text);16console.log(wptokenizer.tokensToText(tokens));17var wptokenizer = require('wptokenizer');18var text = "This is an example of a sentence. This is another sentence.";19var tokens = wptokenizer.tokenize(text);20console.log(wptokenizer.tokensToText(tokens));21var wptokenizer = require('wptokenizer');22var text = "This is an example of a sentence. This is another sentence.";23var tokens = wptokenizer.tokenize(text);24console.log(wptokenizer.tokensToText(tokens));25var wptokenizer = require('wptokenizer');26var text = "This is an example of a sentence. This is another sentence.";27var tokens = wptokenizer.tokenize(text);28console.log(wpt
Using AI Code Generation
1var WpTokenizer = require('wptokenizer');2var tokenizer = new WpTokenizer();3var text = "This is the first sentence. This is the second sentence.";4var tokenizedText = tokenizer.tokensToText(tokenizer.tokenize(text));5console.log(tokenizedText);6var WpTokenizer = require('wptokenizer');7var tokenizer = new WpTokenizer();8var text = "This is the first sentence. This is the second sentence.";9var tokenizedText = tokenizer.tokensToText(tokenizer.tokenize(text), {reconstruct: false});10console.log(tokenizedText);11var WpTokenizer = require('wptokenizer');12var tokenizer = new WpTokenizer();13var text = "This is the first sentence. This is the second sentence.";14var tokenizedText = tokenizer.tokensToText(tokenizer.tokenize(text), {reconstruct: false, preserveCase: true});15console.log(tokenizedText);16var WpTokenizer = require('wptokenizer');17var tokenizer = new WpTokenizer();18var text = "This is the first sentence. This is the second sentence.";19var tokenizedText = tokenizer.tokensToText(tokenizer.tokenize(text), {reconstruct: false, preserveCase: true, preservePunctuation: true});20console.log(tokenizedText);21var WpTokenizer = require('wptokenizer');22var tokenizer = new WpTokenizer();23var text = "This is the first sentence. This is the second sentence.";24var tokenizedText = tokenizer.tokensToText(tokenizer.tokenize(text), {reconstruct: true, preserveCase: true, preservePunctuation: true});25console.log(tokenizedText);
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!