updated zig tokenizer to handle .*** and added tests

This commit is contained in:
Travis 2020-10-28 21:45:58 -05:00
parent 3c7a49c494
commit 960b5b518f
2 changed files with 49 additions and 3 deletions

View File

@ -403,6 +403,7 @@ pub const Tokenizer = struct {
angle_bracket_angle_bracket_right,
period,
period_2,
period_asterisk,
saw_at_sign,
};
@ -979,9 +980,7 @@ pub const Tokenizer = struct {
state = .period_2;
},
'*' => {
result.id = .PeriodAsterisk;
self.index += 1;
break;
state = .period_asterisk;
},
else => {
result.id = .Period;
@ -1001,6 +1000,17 @@ pub const Tokenizer = struct {
},
},
.period_asterisk => switch (c) {
'*' => {
result.id = .Invalid;
break;
},
else => {
result.id = .PeriodAsterisk;
break;
}
},
.slash => switch (c) {
'/' => {
state = .line_comment_start;
@ -1376,6 +1386,9 @@ pub const Tokenizer = struct {
.period_2 => {
result.id = .Ellipsis2;
},
.period_asterisk => {
result.id = .PeriodAsterisk;
},
.pipe => {
result.id = .Pipe;
},
@ -1762,6 +1775,31 @@ test "correctly parse pointer assignment" {
});
}
test "correctly parse pointer dereference followed by asterisk" {
testTokenize("\"b\".* ** 10", &[_]Token.Id{
.StringLiteral,
.PeriodAsterisk,
.AsteriskAsterisk,
.IntegerLiteral,
});
testTokenize("(\"b\".*)** 10", &[_]Token.Id{
.LParen,
.StringLiteral,
.PeriodAsterisk,
.RParen,
.AsteriskAsterisk,
.IntegerLiteral,
});
testTokenize("\"b\".*** 10", &[_]Token.Id{
.StringLiteral,
.Invalid,
.AsteriskAsterisk,
.IntegerLiteral,
});
}
test "tokenizer - range literals" {
testTokenize("0...9", &[_]Token.Id{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral });
testTokenize("'0'...'9'", &[_]Token.Id{ .CharLiteral, .Ellipsis3, .CharLiteral });

View File

@ -8195,4 +8195,12 @@ pub fn addCases(cases: *tests.CompileErrorContext) void {
, &[_][]const u8{
"tmp.zig:4:9: error: expected type '*c_void', found '?*c_void'",
});
cases.add("Issue #6823: don't allow .* to be followed by **",
\\fn foo() void {
\\ var sequence = "repeat".*** 10;
\\}
, &[_][]const u8{
"tmp.zig:2:30: error: `.*` can't be followed by `*`. Are you missing a space?",
});
}