Jelajahi Sumber

Implement command searching

Mark Rousskov 6 tahun lalu
induk
melakukan
a565db5889
3 mengubah file dengan 59 tambahan dan 2 penghapusan
  1. 49 0
      parser/src/command.rs
  2. 1 1
      parser/src/lib.rs
  3. 9 1
      parser/src/token.rs

+ 49 - 0
parser/src/command.rs

@@ -0,0 +1,49 @@
+use crate::token::{Error, Token, Tokenizer};
+
+/// Returns the start of the invocation, or None if at the end of the stream
+pub fn eat_until_invocation<'a>(
+    tok: &mut Tokenizer<'a>,
+    bot: &str,
+) -> Result<Option<usize>, Error<'a>> {
+    let command = format!("@{}", bot);
+    while let Some(token) = tok.peek_token()? {
+        match token {
+            Token::Word(word) if word == command => {
+                // eat invocation of bot
+                let pos = tok.position();
+                tok.next_token().unwrap();
+                return Ok(Some(pos));
+            }
+            // unwrap is safe because we've successfully peeked above
+            _ => {
+                tok.next_token().unwrap();
+            }
+        }
+    }
+    Ok(None)
+}
+
+#[test]
+fn cs_1() {
+    let input = "testing @bot command";
+    let mut toks = Tokenizer::new(input);
+    assert_eq!(toks.peek_token().unwrap(), Some(Token::Word("testing")));
+    assert_eq!(eat_until_invocation(&mut toks, "bot").unwrap(), Some(7));
+    assert_eq!(toks.peek_token().unwrap(), Some(Token::Word("command")));
+}
+
+#[test]
+fn cs_2() {
+    let input = "@bot command";
+    let mut toks = Tokenizer::new(input);
+    assert_eq!(toks.peek_token().unwrap(), Some(Token::Word("@bot")));
+    assert_eq!(eat_until_invocation(&mut toks, "bot").unwrap(), Some(0));
+    assert_eq!(toks.peek_token().unwrap(), Some(Token::Word("command")));
+}
+
+#[test]
+fn cs_3() {
+    let input = "no command";
+    let mut toks = Tokenizer::new(input);
+    assert_eq!(eat_until_invocation(&mut toks, "bot").unwrap(), None);
+}

+ 1 - 1
parser/src/lib.rs

@@ -1,3 +1,3 @@
-//pub mod label;
 pub mod code_block;
+pub mod command;
 pub mod token;

+ 9 - 1
parser/src/token.rs

@@ -14,7 +14,7 @@ pub enum Token<'a> {
     Word(&'a str),
 }
 
-#[derive(Debug)]
+#[derive(Clone, Debug)]
 pub struct Tokenizer<'a> {
     input: &'a str,
     chars: Peekable<CharIndices<'a>>,
@@ -155,6 +155,14 @@ impl<'a> Tokenizer<'a> {
         Ok(Some(Token::Quote(body)))
     }
 
+    pub fn position(&mut self) -> usize {
+        self.cur_pos()
+    }
+
+    pub fn peek_token(&mut self) -> Result<Option<Token<'a>>, Error<'a>> {
+        self.clone().next_token()
+    }
+
     pub fn next_token(&mut self) -> Result<Option<Token<'a>>, Error<'a>> {
         self.consume_whitespace();
         if self.at_end() {