enum EmitCodepointLiterals [src]
Determines when to emit Unicode code point literals as opposed to integer literals.
Fields
neverNever emit Unicode code point literals.
printable_asciiEmit Unicode code point literals for any u8 in the printable ASCII range.
alwaysEmit Unicode code point literals for any unsigned integer with 21 bits or fewer
whose value is a valid non-surrogate code point.
Source
pub const EmitCodepointLiterals = enum {
/// Never emit Unicode code point literals.
never,
/// Emit Unicode code point literals for any `u8` in the printable ASCII range.
printable_ascii,
/// Emit Unicode code point literals for any unsigned integer with 21 bits or fewer
/// whose value is a valid non-surrogate code point.
always,
/// If the value should be emitted as a Unicode codepoint, return it as a u21.
fn emitAsCodepoint(self: @This(), val: anytype) ?u21 {
// Rule out incompatible integer types
switch (@typeInfo(@TypeOf(val))) {
.int => |int_info| if (int_info.signedness == .signed or int_info.bits > 21) {
return null;
},
.comptime_int => {},
else => comptime unreachable,
}
// Return null if the value shouldn't be printed as a Unicode codepoint, or the value casted
// to a u21 if it should.
switch (self) {
.always => {
const c = std.math.cast(u21, val) orelse return null;
if (!std.unicode.utf8ValidCodepoint(c)) return null;
return c;
},
.printable_ascii => {
const c = std.math.cast(u8, val) orelse return null;
if (!std.ascii.isPrint(c)) return null;
return c;
},
.never => {
return null;
},
}
}
}