ziglings/patches/patches/103_tokenization.patch

12 lines
393 B
Diff
Raw Normal View History

2023-10-05 22:07:58 +02:00
--- exercises/103_tokenization.zig 2023-10-05 21:57:23.245974688 +0200
+++ answers/103_tokenization.zig 2023-10-05 22:06:08.319119156 +0200
@@ -136,7 +136,7 @@
;
// now the tokenizer, but what do we need here?
2023-10-05 21:32:29 +02:00
- var it = std.mem.tokenizeAny(u8, poem, ???);
2023-10-05 22:07:58 +02:00
+ var it = std.mem.tokenizeAny(u8, poem, " ,;!\n");
// print all words and count them
var cnt: usize = 0;