|
|
@@ -1198,6 +1198,30 @@ fn test_tokenize_string() {
|
|
|
]
|
|
|
);
|
|
|
|
|
|
+ assert_eq!(
|
|
|
+ tokenize_reduced("\"foo = {\"foo\"}, and bar = {\"bar\"}\"").unwrap(),
|
|
|
+ [
|
|
|
+ ("\"foo = {", StringInterpolationStart, ByteIndex(0)),
|
|
|
+ ("\"foo\"", StringFixed, ByteIndex(8)),
|
|
|
+ ("}, and bar = {", StringInterpolationMiddle, ByteIndex(13)),
|
|
|
+ ("\"bar\"", StringFixed, ByteIndex(27)),
|
|
|
+ ("}\"", StringInterpolationEnd, ByteIndex(32)),
|
|
|
+ ("", Eof, ByteIndex(34))
|
|
|
+ ]
|
|
|
+ );
|
|
|
+
|
|
|
+ assert_eq!(
|
|
|
+ tokenize_reduced("\"foo = {\"foo, and bar = {\"bar\"}\"}\"").unwrap(),
|
|
|
+ [
|
|
|
+ ("\"foo = {", StringInterpolationStart, ByteIndex(0)),
|
|
|
+ ("\"foo, and bar = {", StringInterpolationStart, ByteIndex(8)),
|
|
|
+ ("\"bar\"", StringFixed, ByteIndex(25)),
|
|
|
+ ("}\"", StringInterpolationEnd, ByteIndex(30)),
|
|
|
+ ("}\"", StringInterpolationEnd, ByteIndex(32)),
|
|
|
+ ("", Eof, ByteIndex(34))
|
|
|
+ ]
|
|
|
+ );
|
|
|
+
|
|
|
assert_eq!(
|
|
|
tokenize("\"foo", 0).unwrap_err().kind,
|
|
|
TokenizerErrorKind::UnterminatedString
|
|
|
@@ -1206,10 +1230,26 @@ fn test_tokenize_string() {
|
|
|
tokenize("\"foo = {foo\"", 0).unwrap_err().kind,
|
|
|
TokenizerErrorKind::UnterminatedStringInterpolation
|
|
|
);
|
|
|
+ assert_eq!(
|
|
|
+ tokenize("\"foobar = {\"foo{\"bar\"}\"\"", 0).unwrap_err().kind,
|
|
|
+ TokenizerErrorKind::UnterminatedStringInterpolation
|
|
|
+ );
|
|
|
assert_eq!(
|
|
|
tokenize("\"foo = {foo}.", 0).unwrap_err().kind,
|
|
|
TokenizerErrorKind::UnterminatedString
|
|
|
);
|
|
|
+ assert_eq!(
|
|
|
+ tokenize("\"foo = {\"foo\"}.", 0).unwrap_err().kind,
|
|
|
+ TokenizerErrorKind::UnterminatedString
|
|
|
+ );
|
|
|
+ assert_eq!(
|
|
|
+ tokenize("\"foo = {\"foo}.\"", 0).unwrap_err().kind,
|
|
|
+ TokenizerErrorKind::UnterminatedString
|
|
|
+ );
|
|
|
+ assert_eq!(
|
|
|
+ tokenize("\"foobar = {\"foo{\"bar}\"}.\"", 0).unwrap_err().kind,
|
|
|
+ TokenizerErrorKind::UnterminatedString
|
|
|
+ );
|
|
|
|
|
|
insta::assert_snapshot!(
|
|
|
tokenize_reduced_pretty(r#""start \"inner\" end""#).unwrap(),
|