Browse Source

Merge pull request #759 from Sondeyy/395-add-semicolon-as-optional-statement-separator

Mads M. Jensen 2 months ago
parent
commit
afb4674377

+ 5 - 0
book/src/example-numbat_syntax.md

@@ -6,6 +6,11 @@
 # This is a line comment. It can span over
 # multiple lines
 
+# statements can be separated by newlines or semicolons
+1
+2
+1;2
+
 # 1. Imports
 
 use prelude        # This is not necessary. The 'prelude'

+ 5 - 0
examples/numbat_syntax.nbt

@@ -1,6 +1,11 @@
 # This is a line comment. It can span over
 # multiple lines
 
+# statements can be separated by newlines or semicolons
+1
+2
+1;2
+
 # 1. Imports
 
 use prelude        # This is not necessary. The 'prelude'

+ 13 - 3
numbat/src/parser.rs

@@ -2,6 +2,7 @@
 //!
 //! Grammar:
 //! ```txt
+//! prog            ::=   statement ((";" | "\n"+) statement)*
 //! statement       ::=   variable_decl | struct_decl | function_decl | dimension_decl | unit_decl | module_import | procedure_call | expression
 //!
 //! variable_decl   ::=   "let" identifier ( ":" type_annotation ) ? "=" expression
@@ -315,6 +316,9 @@ impl<'a> Parser<'a> {
                     // Skip over empty lines
                     self.skip_empty_lines(tokens);
                 }
+                TokenKind::Semicolon => {
+                    self.advance(tokens);
+                }
                 TokenKind::Eof => {
                     break;
                 }
@@ -359,7 +363,10 @@ impl<'a> Parser<'a> {
     /// Must be called after encountering an error.
     fn recover_from_error(&mut self, tokens: &[Token]) {
         // Skip all the tokens until we encounter a newline or EoF.
-        while !matches!(self.peek(tokens).kind, TokenKind::Newline | TokenKind::Eof) {
+        while !matches!(
+            self.peek(tokens).kind,
+            TokenKind::Newline | TokenKind::Semicolon | TokenKind::Eof
+        ) {
             self.advance(tokens)
         }
     }
@@ -1951,7 +1958,7 @@ impl<'a> Parser<'a> {
     fn look_ahead_beyond_linebreak(&self, tokens: &[Token], token_kind: TokenKind) -> bool {
         let mut i = self.current;
         while i < tokens.len() {
-            if tokens[i].kind != TokenKind::Newline {
+            if !matches!(tokens[i].kind, TokenKind::Newline | TokenKind::Semicolon) {
                 return tokens[i].kind == token_kind;
             }
             i += 1;
@@ -2004,7 +2011,10 @@ impl<'a> Parser<'a> {
     }
 
     pub fn is_end_of_statement(&self, tokens: &[Token]) -> bool {
-        self.peek(tokens).kind == TokenKind::Newline || self.is_at_end(tokens)
+        matches!(
+            self.peek(tokens).kind,
+            TokenKind::Newline | TokenKind::Semicolon | TokenKind::Eof
+        )
     }
 
     pub fn is_at_end(&self, tokens: &[Token]) -> bool {

+ 12 - 0
numbat/src/tokenizer.rs

@@ -159,6 +159,7 @@ pub enum TokenKind {
 
     // Other
     Newline,
+    Semicolon,
     Eof,
 }
 
@@ -639,6 +640,7 @@ impl Tokenizer {
                 return Ok(None);
             }
             '\n' => TokenKind::Newline,
+            ';' => TokenKind::Semicolon,
             '&' if self.match_char(input, '&') => TokenKind::LogicalAnd,
             '|' if self.match_char(input, '|') => TokenKind::LogicalOr,
             '|' if self.match_char(input, '>') => TokenKind::PostfixApply,
@@ -983,6 +985,16 @@ fn test_tokenize_basic() {
         ]
     );
 
+    assert_eq!(
+        tokenize_reduced("1;42").unwrap(),
+        [
+            ("1", Number, ByteIndex(0)),
+            (";", Semicolon, ByteIndex(1)),
+            ("42", Number, ByteIndex(2)),
+            ("", Eof, ByteIndex(4))
+        ]
+    );
+
     assert_eq!(
         tokenize_reduced("…").unwrap(),
         [

+ 5 - 0
numbat/tests/interpreter.rs

@@ -500,6 +500,11 @@ fn test_misc_examples() {
     expect_output("3m/4m", "0.75");
     expect_output("4/2*2", "4");
     expect_output("1/2 Hz -> s", "0.5 s");
+
+    expect_output("let b = \";\"; b", "\";\"");
+    expect_output("let b = \";\"\nb", "\";\"");
+    expect_output("let a = 3m; 5; a", "3 m");
+    expect_output("let a = 3m\n5\na", "3 m");
 }
 
 #[test]