d8bc935d4ea227629f604d63c309c6a13b9691e0 — Noah Graff a month ago 3ebef22
Revert "renamed peekWord to readWord"

This reverts commit 9359ff1c35b94bfe0f7645428f31dc0c9eea7969.

I'm dumb af, it does peek a word
1 files changed, 23 insertions(+), 23 deletions(-)

M src/tokenizer.zig
M src/tokenizer.zig => src/tokenizer.zig +23 -23
@@ 99,7 99,7 @@ pub const Tokenizer = struct {
 
     /// get the text contents of the next word, if the next symbol is a token,
     /// and it's a valid word.
-    pub fn readWord(tokenizer: *Tokenizer) ?[]const u8 {
+    pub fn peekWord(tokenizer: *Tokenizer) ?[]const u8 {
         if (!tokenizer.isNextSymbolId(.Token)) return null;
 
         // a symbol of type 'Token' should always have at least one


@@ 140,7 140,7 @@ pub const Tokenizer = struct {
             if (next_char != token_text[0]) return null;
             _ = tokenizer.text.readChar();
         } else {
-            const next_word = tokenizer.readWord() orelse return null;
+            const next_word = tokenizer.peekWord() orelse return null;
             if (!std.mem.eql(u8, next_word, token_text)) {
                 return null;
             }


@@ 233,7 233,7 @@ test "Tokenizer.nextSymbolId" {
     t.expectEqual(Symbol.Id.EndOfFile, tokenizer.nextSymbolId());
 }
 
-test "Tokenizer.readWord" {
+test "Tokenizer.peekWord" {
     const t = std.testing;
 
     var text = try TextBuffer.init(std.heap.direct_allocator, "Some tokens");


@@ 241,44 241,44 @@ test "Tokenizer.readWord" {
 
     var tokenizer = Tokenizer.init(&text);
     defer tokenizer.deinit();
-    t.expectEqualSlices(u8, "Some", tokenizer.readWord().?);
-    t.expectEqualSlices(u8, "Some", tokenizer.readWord().?);
-    t.expectEqualSlices(u8, "Some", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "Some", tokenizer.peekWord().?);
+    t.expectEqualSlices(u8, "Some", tokenizer.peekWord().?);
+    t.expectEqualSlices(u8, "Some", tokenizer.peekWord().?);
     _ = text.read(4);
-    t.expectEqualSlices(u8, "tokens", tokenizer.readWord().?);
-    t.expectEqualSlices(u8, "tokens", tokenizer.readWord().?);
-    t.expectEqualSlices(u8, "tokens", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "tokens", tokenizer.peekWord().?);
+    t.expectEqualSlices(u8, "tokens", tokenizer.peekWord().?);
+    t.expectEqualSlices(u8, "tokens", tokenizer.peekWord().?);
     _ = text.read(6);
-    t.expect(null == tokenizer.readWord());
-    t.expect(null == tokenizer.readWord());
-    t.expect(null == tokenizer.readWord());
+    t.expect(null == tokenizer.peekWord());
+    t.expect(null == tokenizer.peekWord());
+    t.expect(null == tokenizer.peekWord());
     try text.append("more||tokens&&with<operators");
-    t.expectEqualSlices(u8, "more", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "more", tokenizer.peekWord().?);
     _ = text.read(4);
-    t.expect(null == tokenizer.readWord());
+    t.expect(null == tokenizer.peekWord());
     _ = text.read(2);
-    t.expectEqualSlices(u8, "tokens", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "tokens", tokenizer.peekWord().?);
     _ = text.read(6);
-    t.expect(null == tokenizer.readWord());
+    t.expect(null == tokenizer.peekWord());
     _ = text.read(2);
-    t.expectEqualSlices(u8, "with", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "with", tokenizer.peekWord().?);
     _ = text.read(5);
-    t.expectEqualSlices(u8, "operators", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "operators", tokenizer.peekWord().?);
     _ = text.read(9);
-    t.expect(null == tokenizer.readWord());
+    t.expect(null == tokenizer.peekWord());
     try text.append(
         \\Tokens
         \\Split
         \\On
         \\Lines
     );
-    t.expectEqualSlices(u8, "Tokens", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "Tokens", tokenizer.peekWord().?);
     _ = text.read(7);
-    t.expectEqualSlices(u8, "Split", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "Split", tokenizer.peekWord().?);
     _ = text.read(6);
-    t.expectEqualSlices(u8, "On", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "On", tokenizer.peekWord().?);
     _ = text.read(3);
-    t.expectEqualSlices(u8, "Lines", tokenizer.readWord().?);
+    t.expectEqualSlices(u8, "Lines", tokenizer.peekWord().?);
     _ = text.read(6);
 }