A tool to compile SQL to Elasticsearch queries
Vous ne pouvez pas sélectionner plus de 25 sujets Les noms de sujets doivent commencer par une lettre ou un nombre, peuvent contenir des tirets ('-') et peuvent comporter jusqu'à 35 caractères.

parser.rs 3.4KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136
  1. use std::{iter::Peekable, slice::Iter};
  2. use crate::error::KappeError;
  3. use crate::token::{Token, TokenType};
  4. #[derive(Debug, PartialEq)]
  5. pub struct Select {
  6. pub fields: Vec<Field>,
  7. pub source: Identifier,
  8. }
  9. #[derive(Debug, PartialEq)]
  10. pub struct Identifier {
  11. pub name: String,
  12. }
  13. #[derive(Debug, PartialEq)]
  14. pub enum Field {
  15. Named(String),
  16. Star,
  17. }
  18. impl Identifier {
  19. pub fn new(name: &str) -> Self {
  20. Self {
  21. name: name.to_string(),
  22. }
  23. }
  24. }
  25. pub fn parse(tokens: Vec<Token>) -> Result<Select, KappeError> {
  26. parse_select_expression(&mut tokens.iter().peekable())
  27. }
  28. fn parse_select_expression(tokens: &mut Peekable<Iter<Token>>) -> Result<Select, KappeError> {
  29. eat(tokens, TokenType::Select)?;
  30. let fields = parse_fields(tokens)?;
  31. eat(tokens, TokenType::From)?;
  32. let source = eat(tokens, TokenType::Identfiier).and_then(|t| Ok(Identifier::new(&t.value)))?;
  33. Ok(Select { fields, source })
  34. }
  35. fn parse_fields(tokens: &mut Peekable<Iter<Token>>) -> Result<Vec<Field>, KappeError> {
  36. if current_token_is(tokens, TokenType::Star) {
  37. eat(tokens, TokenType::Star).and_then(|_| Ok(vec![Field::Star]))
  38. } else {
  39. let mut fields: Vec<Field> = vec![];
  40. loop {
  41. let field = eat(tokens, TokenType::Identfiier)?;
  42. fields.push(Field::Named(field.value.clone()));
  43. if current_token_is(tokens, TokenType::Comma) {
  44. eat(tokens, TokenType::Comma)?;
  45. } else {
  46. break;
  47. }
  48. }
  49. Ok(fields)
  50. }
  51. }
  52. fn current_token_is(tokens: &mut Peekable<Iter<Token>>, token_type: TokenType) -> bool {
  53. match tokens.peek() {
  54. Some(t) => t.token_type == token_type,
  55. None => false,
  56. }
  57. }
  58. fn eat<'a>(
  59. tokens: &'a mut Peekable<Iter<Token>>,
  60. token_type: TokenType,
  61. ) -> Result<&'a Token, KappeError> {
  62. match tokens.next() {
  63. Some(token) => {
  64. if token.token_type == token_type {
  65. Ok(token)
  66. } else {
  67. Err(KappeError::new(&format!(
  68. "Expected {} but got {}",
  69. token_type, token.token_type
  70. )))
  71. }
  72. }
  73. None => Err(KappeError::new("Unexpected end of tokens")),
  74. }
  75. }
  76. #[cfg(test)]
  77. mod tests {
  78. use super::*;
  79. use crate::lexer::scan;
  80. fn _parse(input: &str) -> Result<Select, KappeError> {
  81. let tokens = scan(input).unwrap();
  82. parse(tokens)
  83. }
  84. #[test]
  85. fn it_parses_a_simple_select() {
  86. assert_eq!(
  87. _parse("SELECT * FROM index").unwrap(),
  88. Select {
  89. fields: vec![Field::Star],
  90. source: Identifier::new("index")
  91. },
  92. )
  93. }
  94. #[test]
  95. fn it_parses_a_select_with_field() {
  96. assert_eq!(
  97. _parse("SELECT field FROM index").unwrap(),
  98. Select {
  99. fields: vec![Field::Named("field".to_string())],
  100. source: Identifier::new("index")
  101. },
  102. )
  103. }
  104. #[test]
  105. fn it_parses_a_select_with_multiple_fields() {
  106. assert_eq!(
  107. _parse("SELECT field_one, field_two FROM index").unwrap(),
  108. Select {
  109. fields: vec![
  110. Field::Named("field_one".to_string()),
  111. Field::Named("field_two".to_string())
  112. ],
  113. source: Identifier::new("index")
  114. },
  115. )
  116. }
  117. }