@@ -498,12 +498,23 @@ impl<'a> Parser<'a> {
498
498
match self.peek_token().token {
499
499
Token::EOF => break,
500
500
501
+ // end of statement
502
+ Token::Word(word) => {
503
+ if expecting_statement_delimiter && word.keyword == Keyword::END {
504
+ break;
505
+ }
506
+
507
+ if expecting_statement_delimiter && word.keyword == Keyword::GO {
508
+ expecting_statement_delimiter = false;
509
+ }
510
+ },
511
+
501
512
// don't expect a semicolon statement delimiter after a newline when not otherwise required
502
513
Token::Whitespace(Whitespace::Newline) => {
503
514
if !self.options.require_semicolon_stmt_delimiter {
504
515
expecting_statement_delimiter = false;
505
516
}
506
- }
517
+ },
507
518
_ => {}
508
519
}
509
520
@@ -512,8 +523,9 @@ impl<'a> Parser<'a> {
512
523
}
513
524
514
525
let statement = self.parse_statement()?;
526
+ // Treat batch delimiter as an end of statement, so no additional statement delimiter expected here
527
+ expecting_statement_delimiter = !matches!(statement, Statement::Go(_)) && self.options.require_semicolon_stmt_delimiter;
515
528
stmts.push(statement);
516
- expecting_statement_delimiter = self.options.require_semicolon_stmt_delimiter;
517
529
}
518
530
Ok(stmts)
519
531
}
@@ -653,6 +665,10 @@ impl<'a> Parser<'a> {
653
665
Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
654
666
Keyword::PRINT => self.parse_print(),
655
667
Keyword::RETURN => self.parse_return(),
668
+ Keyword::GO => {
669
+ self.prev_token();
670
+ self.parse_go()
671
+ }
656
672
_ => self.expected("an SQL statement", next_token),
657
673
},
658
674
Token::LParen => {
@@ -4039,6 +4055,17 @@ impl<'a> Parser<'a> {
4039
4055
})
4040
4056
}
4041
4057
4058
+ /// Return nth previous token, possibly whitespace
4059
+ /// (or [`Token::EOF`] when before the beginning of the stream).
4060
+ pub(crate) fn peek_prev_nth_token_no_skip_ref(&self, n: usize) -> &TokenWithSpan {
4061
+ // 0 = next token, -1 = current token, -2 = previous token
4062
+ let peek_index = self.index.saturating_sub(1).saturating_sub(n);
4063
+ if peek_index == 0 {
4064
+ return &EOF_TOKEN;
4065
+ }
4066
+ self.tokens.get(peek_index).unwrap_or(&EOF_TOKEN)
4067
+ }
4068
+
4042
4069
/// Return true if the next tokens exactly `expected`
4043
4070
///
4044
4071
/// Does not advance the current token.
@@ -4155,6 +4182,29 @@ impl<'a> Parser<'a> {
4155
4182
)
4156
4183
}
4157
4184
4185
+ /// Look backwards in the token stream and expect that there was only whitespace tokens until the previous newline or beginning of string
4186
+ pub(crate) fn prev_only_whitespace_until_newline(&mut self) -> bool {
4187
+ let mut look_back_count = 1;
4188
+ loop {
4189
+ let prev_token = self.peek_prev_nth_token_no_skip_ref(look_back_count);
4190
+ match prev_token.token {
4191
+ Token::EOF => break true,
4192
+ Token::Whitespace(ref w) => match w {
4193
+ Whitespace::Newline => break true,
4194
+ // special consideration required for single line comments since that string includes the newline
4195
+ Whitespace::SingleLineComment { comment, prefix: _ } => {
4196
+ if comment.ends_with('\n') {
4197
+ break true;
4198
+ }
4199
+ look_back_count += 1;
4200
+ }
4201
+ _ => look_back_count += 1,
4202
+ },
4203
+ _ => break false,
4204
+ };
4205
+ }
4206
+ }
4207
+
4158
4208
/// If the current token is the `expected` keyword, consume it and returns
4159
4209
/// true. Otherwise, no tokens are consumed and returns false.
4160
4210
#[must_use]
@@ -16440,6 +16490,71 @@ impl<'a> Parser<'a> {
16440
16490
}
16441
16491
}
16442
16492
16493
+ /// Parse [Statement::Go]
16494
+ fn parse_go(&mut self) -> Result<Statement, ParserError> {
16495
+ self.expect_keyword_is(Keyword::GO)?;
16496
+
16497
+ // disambiguate between GO as batch delimiter & GO as identifier (etc)
16498
+ // compare:
16499
+ // ```sql
16500
+ // select 1 go
16501
+ // ```
16502
+ // vs
16503
+ // ```sql
16504
+ // select 1
16505
+ // go
16506
+ // ```
16507
+ if !self.prev_only_whitespace_until_newline() {
16508
+ parser_err!(
16509
+ "GO may only be preceded by whitespace on a line",
16510
+ self.peek_token().span.start
16511
+ )?;
16512
+ }
16513
+
16514
+ let count = loop {
16515
+ // using this peek function because we want to halt this statement parsing upon newline
16516
+ let next_token = self.peek_token_no_skip();
16517
+ match next_token.token {
16518
+ Token::EOF => break None::<u64>,
16519
+ Token::Whitespace(ref w) => match w {
16520
+ Whitespace::Newline => break None,
16521
+ _ => _ = self.next_token_no_skip(),
16522
+ },
16523
+ Token::Number(s, _) => {
16524
+ let value = Some(Self::parse::<u64>(s, next_token.span.start)?);
16525
+ self.advance_token();
16526
+ break value;
16527
+ }
16528
+ _ => self.expected("literal int or newline", next_token)?,
16529
+ };
16530
+ };
16531
+
16532
+ loop {
16533
+ let next_token = self.peek_token_no_skip();
16534
+ match next_token.token {
16535
+ Token::EOF => break,
16536
+ Token::Whitespace(ref w) => match w {
16537
+ Whitespace::Newline => break,
16538
+ Whitespace::SingleLineComment { comment, prefix: _ } => {
16539
+ if comment.ends_with('\n') {
16540
+ break;
16541
+ }
16542
+ _ = self.next_token_no_skip();
16543
+ }
16544
+ _ => _ = self.next_token_no_skip(),
16545
+ },
16546
+ _ => {
16547
+ parser_err!(
16548
+ "GO must be followed by a newline or EOF",
16549
+ self.peek_token().span.start
16550
+ )?;
16551
+ }
16552
+ };
16553
+ }
16554
+
16555
+ Ok(Statement::Go(GoStatement { count }))
16556
+ }
16557
+
16443
16558
/// Consume the parser and return its underlying token buffer
16444
16559
pub fn into_tokens(self) -> Vec<TokenWithSpan> {
16445
16560
self.tokens
@@ -16681,6 +16796,31 @@ mod tests {
16681
16796
})
16682
16797
}
16683
16798
16799
+ #[test]
16800
+ fn test_peek_prev_nth_token_no_skip_ref() {
16801
+ all_dialects().run_parser_method(
16802
+ "SELECT 1;\n-- a comment\nRAISERROR('test', 16, 0);",
16803
+ |parser| {
16804
+ parser.index = 1;
16805
+ assert_eq!(parser.peek_prev_nth_token_no_skip_ref(0), &Token::EOF);
16806
+ assert_eq!(parser.index, 1);
16807
+ parser.index = 7;
16808
+ assert_eq!(
16809
+ parser.token_at(parser.index - 1).token,
16810
+ Token::Word(Word {
16811
+ value: "RAISERROR".to_string(),
16812
+ quote_style: None,
16813
+ keyword: Keyword::RAISERROR,
16814
+ })
16815
+ );
16816
+ assert_eq!(
16817
+ parser.peek_prev_nth_token_no_skip_ref(2),
16818
+ &Token::Whitespace(Whitespace::Newline)
16819
+ );
16820
+ },
16821
+ );
16822
+ }
16823
+
16684
16824
#[cfg(test)]
16685
16825
mod test_parse_data_type {
16686
16826
use crate::ast::{
0 commit comments