Source
pub fn TokenIterator(comptime T: type, comptime delimiter_type: DelimiterType) type {
return struct {
buffer: []const T,
delimiter: switch (delimiter_type) {
.sequence, .any => []const T,
.scalar => T,
},
index: usize,
const Self = @This();
/// Returns a slice of the current token, or null if tokenization is
/// complete, and advances to the next token.
pub fn next(self: *Self) ?[]const T {
const result = self.peek() orelse return null;
self.index += result.len;
return result;
}
/// Returns a slice of the current token, or null if tokenization is
/// complete. Does not advance to the next token.
pub fn peek(self: *Self) ?[]const T {
// move to beginning of token
while (self.index < self.buffer.len and self.isDelimiter(self.index)) : (self.index += switch (delimiter_type) {
.sequence => self.delimiter.len,
.any, .scalar => 1,
}) {}
const start = self.index;
if (start == self.buffer.len) {
return null;
}
// move to end of token
var end = start;
while (end < self.buffer.len and !self.isDelimiter(end)) : (end += 1) {}
return self.buffer[start..end];
}
/// Returns a slice of the remaining bytes. Does not affect iterator state.
pub fn rest(self: Self) []const T {
// move to beginning of token
var index: usize = self.index;
while (index < self.buffer.len and self.isDelimiter(index)) : (index += switch (delimiter_type) {
.sequence => self.delimiter.len,
.any, .scalar => 1,
}) {}
return self.buffer[index..];
}
/// Resets the iterator to the initial token.
pub fn reset(self: *Self) void {
self.index = 0;
}
fn isDelimiter(self: Self, index: usize) bool {
switch (delimiter_type) {
.sequence => return startsWith(T, self.buffer[index..], self.delimiter),
.any => {
const item = self.buffer[index];
for (self.delimiter) |delimiter_item| {
if (item == delimiter_item) {
return true;
}
}
return false;
},
.scalar => return self.buffer[index] == self.delimiter,
}
}
};
}