diff --git a/src/formatter.rs b/src/formatter.rs index 5bf5541..ebdb429 100644 --- a/src/formatter.rs +++ b/src/formatter.rs @@ -101,6 +101,10 @@ pub(crate) fn format( formatter.format_newline_reserved_word(token, &mut formatted_query); formatter.previous_reserved_word = Some(token); } + TokenKind::Join => { + formatter.format_newline_reserved_word(token, &mut formatted_query); + formatter.previous_reserved_word = Some(token); + } TokenKind::Reserved => { formatter.format_with_spaces(token, &mut formatted_query); formatter.previous_reserved_word = Some(token); @@ -210,8 +214,9 @@ impl<'a> Formatter<'a> { self.add_new_line(query); self.indentation.increase_top_level(span_len); query.push_str(&self.equalize_whitespace(&self.format_reserved_word(token.value))); - if !(!["select", "from"].contains(&token.value.to_lowercase().as_str()) - && self.options.inline_first_top_level) + let new_line = ["select", "from"].contains(&token.value.to_lowercase().as_str()) + || !self.options.inline_first_top_level; + if new_line && self .options .max_inline_top_level diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 286c0f0..1069777 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -69,6 +69,7 @@ pub(crate) enum TokenKind { Number, Placeholder, Word, + Join, } #[derive(Debug, Clone)] @@ -379,6 +380,7 @@ fn get_reserved_word_token<'a>( alt(( get_top_level_reserved_token(last_reserved_top_level_token), get_newline_reserved_token(last_reserved_token), + get_join_token(), get_top_level_reserved_token_no_indent, get_plain_reserved_token, )) @@ -497,16 +499,11 @@ fn get_top_level_reserved_token<'a>( } } -fn get_newline_reserved_token<'a>( - last_reserved_token: Option>, -) -> impl Parser<&'a str, Token<'a>, ContextError> { +fn get_join_token<'a>() -> impl Parser<&'a str, Token<'a>, ContextError> { move |input: &mut &'a str| { let uc_input: String = get_uc_words(input, 3); let mut uc_input = uc_input.as_str(); - // We have to break up the alternatives into multiple subsets - // to avoid exceeding the alt() 21 element limit. - // Standard SQL joins let standard_joins = alt(( terminated("JOIN", end_of_word), @@ -544,6 +541,37 @@ fn get_newline_reserved_token<'a>( terminated("GLOBAL FULL JOIN", end_of_word), )); + // Combine all parsers + let result: PResult<&str> = + alt((standard_joins, specific_joins, special_joins)).parse_next(&mut uc_input); + + if let Ok(token) = result { + let final_word = token.split(' ').last().unwrap(); + let input_end_pos = + input.to_ascii_uppercase().find(final_word).unwrap() + final_word.len(); + let token = input.next_slice(input_end_pos); + let kind = TokenKind::Join; + Ok(Token { + kind, + value: token, + key: None, + }) + } else { + Err(ErrMode::from_error_kind(input, ErrorKind::Alt)) + } + } +} + +fn get_newline_reserved_token<'a>( + last_reserved_token: Option>, +) -> impl Parser<&'a str, Token<'a>, ContextError> { + move |input: &mut &'a str| { + let uc_input: String = get_uc_words(input, 3); + let mut uc_input = uc_input.as_str(); + + // We have to break up the alternatives into multiple subsets + // to avoid exceeding the alt() 21 element limit. + // Legacy and logical operators let operators = alt(( terminated("CROSS APPLY", end_of_word), @@ -565,14 +593,7 @@ fn get_newline_reserved_token<'a>( )); // Combine all parsers - let result: PResult<&str> = alt(( - standard_joins, - specific_joins, - special_joins, - operators, - alter_table_actions, - )) - .parse_next(&mut uc_input); + let result: PResult<&str> = alt((operators, alter_table_actions)).parse_next(&mut uc_input); if let Ok(token) = result { let final_word = token.split(' ').last().unwrap();