Function tokenizeScalar [src]
Returns an iterator that iterates over the slices of buffer that are not
delimiter.
tokenizeScalar(u8, " abc def ghi ", ' ') will return slices
for "abc", "def", "ghi", null, in that order.
If buffer is empty, the iterator will return null.
If delimiter does not exist in buffer,
the iterator will return buffer, null, in that order.
See also: tokenizeAny, tokenizeSequence,
splitSequence,splitAny, and splitScalar
splitBackwardsSequence, splitBackwardsAny, and splitBackwardsScalar
Prototype
pub fn tokenizeScalar(comptime T: type, buffer: []const T, delimiter: T) TokenIterator(T, .scalar)
Parameters
T: type
buffer: []const T
delimiter: T
Example
test tokenizeScalar {
var it = tokenizeScalar(u8, " abc def ghi ", ' ');
try testing.expect(eql(u8, it.next().?, "abc"));
try testing.expect(eql(u8, it.peek().?, "def"));
try testing.expect(eql(u8, it.next().?, "def"));
try testing.expect(eql(u8, it.next().?, "ghi"));
try testing.expect(it.next() == null);
it = tokenizeScalar(u8, "..\\bob", '\\');
try testing.expect(eql(u8, it.next().?, ".."));
try testing.expect(eql(u8, "..", "..\\bob"[0..it.index]));
try testing.expect(eql(u8, it.next().?, "bob"));
try testing.expect(it.next() == null);
it = tokenizeScalar(u8, "//a/b", '/');
try testing.expect(eql(u8, it.next().?, "a"));
try testing.expect(eql(u8, it.next().?, "b"));
try testing.expect(eql(u8, "//a/b", "//a/b"[0..it.index]));
try testing.expect(it.next() == null);
it = tokenizeScalar(u8, "|", '|');
try testing.expect(it.next() == null);
try testing.expect(it.peek() == null);
it = tokenizeScalar(u8, "", '|');
try testing.expect(it.next() == null);
try testing.expect(it.peek() == null);
it = tokenizeScalar(u8, "hello", ' ');
try testing.expect(eql(u8, it.next().?, "hello"));
try testing.expect(it.next() == null);
var it16 = tokenizeScalar(
u16,
std.unicode.utf8ToUtf16LeStringLiteral("hello"),
' ',
);
try testing.expect(eql(u16, it16.next().?, std.unicode.utf8ToUtf16LeStringLiteral("hello")));
try testing.expect(it16.next() == null);
}
Source
pub fn tokenizeScalar(comptime T: type, buffer: []const T, delimiter: T) TokenIterator(T, .scalar) {
return .{
.index = 0,
.buffer = buffer,
.delimiter = delimiter,
};
}