Skip to content

Commit 3bee1db

Browse files
committed
allow elements to start with underscore
because with gjs we can reference variables that start with underscore endtag too add test add endtag test also allow leading $ more tests add unicode check allow unicode add unicode test
1 parent 15b1d6e commit 3bee1db

File tree

3 files changed

+42
-3
lines changed

3 files changed

+42
-3
lines changed

src/evented-tokenizer.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { preprocessInput, isAlpha, isSpace } from './utils';
1+
import { preprocessInput, isAlpha, isUnicode, isSpace } from './utils';
22
import { EntityParser, TokenizerDelegate, TokenizerState } from './types';
33

44
export default class EventedTokenizer {
@@ -170,7 +170,7 @@ export default class EventedTokenizer {
170170
this.transitionTo(TokenizerState.markupDeclarationOpen);
171171
} else if (char === '/') {
172172
this.transitionTo(TokenizerState.endTagOpen);
173-
} else if (char === '@' || char === ':' || isAlpha(char)) {
173+
} else if (char === '$' || char === '_' || char === '@' || char === ':' || isAlpha(char) || isUnicode(char)) {
174174
this.transitionTo(TokenizerState.tagName);
175175
this.tagNameBuffer = '';
176176
this.delegate.beginStartTag();
@@ -671,7 +671,7 @@ export default class EventedTokenizer {
671671
endTagOpen() {
672672
let char = this.consume();
673673

674-
if (char === '@' || char === ':' || isAlpha(char)) {
674+
if (char === '$' || char === '_' || char === '@' || char === ':' || isAlpha(char) || isUnicode(char)) {
675675
this.transitionTo(TokenizerState.endTagName);
676676
this.tagNameBuffer = '';
677677
this.delegate.beginEndTag();

src/utils.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
const WSP = /[\t\n\f ]/;
22
const ALPHA = /[A-Za-z]/;
3+
const UNICODE = /[^\u0000-\u00ff]/;
34
const CRLF = /\r\n?/g;
45

56
export function isSpace(char: string): boolean {
@@ -10,6 +11,10 @@ export function isAlpha(char: string): boolean {
1011
return ALPHA.test(char);
1112
}
1213

14+
export function isUnicode(char: string): boolean {
15+
return UNICODE.test(s);
16+
}
17+
1318
export function preprocessInput(input: string): string {
1419
return input.replace(CRLF, '\n');
1520
}

tests/tokenizer-tests.ts

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -169,6 +169,23 @@ QUnit.test('A simple tag', function(assert) {
169169
assert.deepEqual(tokens, [startTag('div')]);
170170
});
171171

172+
QUnit.test('A simple tag with leading non alpha chars', function(assert) {
173+
let tokens = tokenize('<_div>');
174+
assert.deepEqual(tokens, [startTag('_div')]);
175+
176+
tokens = tokenize('<$div>');
177+
assert.deepEqual(tokens, [startTag('$div')]);
178+
179+
tokens = tokenize('<:div>');
180+
assert.deepEqual(tokens, [startTag(':div')]);
181+
182+
tokens = tokenize('<@div>');
183+
assert.deepEqual(tokens, [startTag('@div')]);
184+
185+
tokens = tokenize('<üdiv>');
186+
assert.deepEqual(tokens, [startTag('üdiv')]);
187+
});
188+
172189
QUnit.test('A simple tag with trailing spaces', function(assert) {
173190
let tokens = tokenize('<div \t\n>');
174191
assert.deepEqual(tokens, [startTag('div')]);
@@ -179,6 +196,23 @@ QUnit.test('A simple closing tag', function(assert) {
179196
assert.deepEqual(tokens, [endTag('div')]);
180197
});
181198

199+
QUnit.test('A simple closing tag with leading non alpha chars', function(assert) {
200+
let tokens = tokenize('</_div>');
201+
assert.deepEqual(tokens, [endTag('_div')]);
202+
203+
tokens = tokenize('</$div>');
204+
assert.deepEqual(tokens, [endTag('$div')]);
205+
206+
tokens = tokenize('</:div>');
207+
assert.deepEqual(tokens, [endTag(':div')]);
208+
209+
tokens = tokenize('</@div>');
210+
assert.deepEqual(tokens, [endTag('@div')]);
211+
212+
tokens = tokenize('</üdiv>');
213+
assert.deepEqual(tokens, [endTag('üdiv')]);
214+
});
215+
182216
QUnit.test('A simple closing tag with trailing spaces', function(assert) {
183217
let tokens = tokenize('</div \t\n>');
184218
assert.deepEqual(tokens, [endTag('div')]);

0 commit comments

Comments
 (0)