@@ -243,6 +243,19 @@ export async function doParse(iterator: string, options: ParserOptions = {}): Pr
243243
244244 if ( item . hint != null && BadTokensTypes . includes ( item . hint ) ) {
245245
246+ const node : Token = getTokenType ( item . token , item . hint ) ;
247+
248+ errors . push ( {
249+ action : 'drop' ,
250+ message : 'Bad token' ,
251+ syntax : null ,
252+ node,
253+ location : {
254+ src,
255+ sta : item . sta ,
256+ end : item . end
257+ }
258+ } ) ;
246259 // bad token
247260 continue ;
248261 }
@@ -310,7 +323,12 @@ export async function doParse(iterator: string, options: ParserOptions = {}): Pr
310323 errors . push ( {
311324 action : 'drop' ,
312325 message : 'invalid block' ,
313- rawTokens : tokens . slice ( )
326+ rawTokens : tokens . slice ( ) ,
327+ location : {
328+ src,
329+ sta : tokens [ 0 ] . sta ,
330+ end : tokens [ tokens . length - 1 ] . end
331+ }
314332 } ) ;
315333 }
316334 }
@@ -563,6 +581,7 @@ function parseNode(results: TokenizeResult[], context: AstRuleList | AstInvalidR
563581 errors . push ( {
564582 action : 'drop' ,
565583 message : `CDOCOMM not allowed here ${ JSON . stringify ( tokens [ i ] , null , 1 ) } ` ,
584+ node : tokens [ i ] ,
566585 location
567586 } ) ;
568587 continue ;
@@ -607,7 +626,7 @@ function parseNode(results: TokenizeResult[], context: AstRuleList | AstInvalidR
607626
608627 if ( tokens [ 0 ] ?. typ == EnumToken . AtRuleTokenType ) {
609628
610- const atRule : AtRuleToken = < AtRuleToken > tokens . shift ( ) ;
629+ const atRule : AtRuleToken = tokens . shift ( ) as AtRuleToken ;
611630 const location : Location = < Location > map . get ( atRule ) ;
612631
613632 // @ts -ignore
@@ -637,7 +656,8 @@ function parseNode(results: TokenizeResult[], context: AstRuleList | AstInvalidR
637656 // @ts -ignore
638657 [ 'charset' , 'layer' , 'import' ] . includes ( ( < AstInvalidAtRule > context . chi [ i ] ) . nam as string ) ) ) {
639658
640- errors . push ( { action : 'drop' , message : 'invalid @import' , location} ) ;
659+ // @ts -ignore
660+ errors . push ( { action : 'drop' , message : 'invalid @import' , location, rawTokens : [ atRule , ...tokens ] } ) ;
641661 return null ;
642662 }
643663 }
@@ -742,7 +762,7 @@ function parseNode(results: TokenizeResult[], context: AstRuleList | AstInvalidR
742762 action : 'drop' ,
743763 message : '@charset must have only one space' ,
744764 // @ts -ignore
745- location
765+ location, rawTokens : [ atRule , ... tokens ]
746766 } ) ;
747767
748768 return null ;
@@ -841,6 +861,7 @@ function parseNode(results: TokenizeResult[], context: AstRuleList | AstInvalidR
841861 errors . push ( {
842862 action : 'drop' ,
843863 message : valid . error + ' - "' + tokens . reduce ( ( acc , curr ) => acc + renderToken ( curr , { minify : false } ) , '' ) + '"' ,
864+ node,
844865 // @ts -ignore
845866 location : { src, ...( map . get ( valid . node ) ?? location ) }
846867 } ) ;
@@ -980,6 +1001,7 @@ function parseNode(results: TokenizeResult[], context: AstRuleList | AstInvalidR
9801001 errors . push ( {
9811002 action : 'drop' ,
9821003 message : valid . error + ' - "' + tokens . reduce ( ( acc , curr ) => acc + renderToken ( curr , { minify : false } ) , '' ) + '"' ,
1004+ node,
9831005 // @ts -ignore
9841006 location
9851007 } ) ;
@@ -1922,7 +1944,8 @@ export function parseTokens(tokens: Token[], options: ParseTokenOptions = {}): T
19221944 if ( ( slice . charAt ( 0 ) != '-' || ( slice . charAt ( 0 ) == '-' && isIdentStart ( slice . charCodeAt ( 1 ) ) ) ) && isIdent ( slice ) ) {
19231945 Object . assign ( val , { typ : EnumToken . IdenTokenType , val : slice } ) ;
19241946 }
1925- } else if ( val . typ == EnumToken . LiteralTokenType && ( val as LiteralToken ) . val == '|' ) {
1947+ } else
1948+ if ( val . typ == EnumToken . LiteralTokenType && ( val as LiteralToken ) . val == '|' ) {
19261949
19271950 let upper : number = m ;
19281951 let lower : number = m ;
0 commit comments