Function innerParse [src]

Alias for std.json.static.innerParse

This is an internal function called recursively during the implementation of parseFromTokenSourceLeaky and similar. It is exposed primarily to enable custom jsonParse() methods to call back into the parseFrom* system, such as if you're implementing a custom container of type T; you can call innerParse(T, ...) for each of the container's items. Note that null fields are not allowed on the options when calling this function. (The options you get in your jsonParse method has no null fields.)

Prototype

pub fn innerParse( comptime T: type, allocator: Allocator, source: anytype, options: ParseOptions, ) ParseError(@TypeOf(source.*))!T

Parameters

T: typeallocator: Allocatoroptions: ParseOptions

Source

pub fn innerParse( comptime T: type, allocator: Allocator, source: anytype, options: ParseOptions, ) ParseError(@TypeOf(source.*))!T { switch (@typeInfo(T)) { .bool => { return switch (try source.next()) { .true => true, .false => false, else => error.UnexpectedToken, }; }, .float, .comptime_float => { const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?); defer freeAllocated(allocator, token); const slice = switch (token) { inline .number, .allocated_number, .string, .allocated_string => |slice| slice, else => return error.UnexpectedToken, }; return try std.fmt.parseFloat(T, slice); }, .int, .comptime_int => { const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?); defer freeAllocated(allocator, token); const slice = switch (token) { inline .number, .allocated_number, .string, .allocated_string => |slice| slice, else => return error.UnexpectedToken, }; return sliceToInt(T, slice); }, .optional => |optionalInfo| { switch (try source.peekNextTokenType()) { .null => { _ = try source.next(); return null; }, else => { return try innerParse(optionalInfo.child, allocator, source, options); }, } }, .@"enum" => { if (std.meta.hasFn(T, "jsonParse")) { return T.jsonParse(allocator, source, options); } const token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?); defer freeAllocated(allocator, token); const slice = switch (token) { inline .number, .allocated_number, .string, .allocated_string => |slice| slice, else => return error.UnexpectedToken, }; return sliceToEnum(T, slice); }, .@"union" => |unionInfo| { if (std.meta.hasFn(T, "jsonParse")) { return T.jsonParse(allocator, source, options); } if (unionInfo.tag_type == null) @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); if (.object_begin != try source.next()) return error.UnexpectedToken; var result: ?T = null; var name_token: ?Token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?); const field_name = switch (name_token.?) { inline .string, .allocated_string => |slice| slice, else => { return error.UnexpectedToken; }, }; inline for (unionInfo.fields) |u_field| { if (std.mem.eql(u8, u_field.name, field_name)) { // Free the name token now in case we're using an allocator that optimizes freeing the last allocated object. // (Recursing into innerParse() might trigger more allocations.) freeAllocated(allocator, name_token.?); name_token = null; if (u_field.type == void) { // void isn't really a json type, but we can support void payload union tags with {} as a value. if (.object_begin != try source.next()) return error.UnexpectedToken; if (.object_end != try source.next()) return error.UnexpectedToken; result = @unionInit(T, u_field.name, {}); } else { // Recurse. result = @unionInit(T, u_field.name, try innerParse(u_field.type, allocator, source, options)); } break; } } else { // Didn't match anything. return error.UnknownField; } if (.object_end != try source.next()) return error.UnexpectedToken; return result.?; }, .@"struct" => |structInfo| { if (structInfo.is_tuple) { if (.array_begin != try source.next()) return error.UnexpectedToken; var r: T = undefined; inline for (0..structInfo.fields.len) |i| { r[i] = try innerParse(structInfo.fields[i].type, allocator, source, options); } if (.array_end != try source.next()) return error.UnexpectedToken; return r; } if (std.meta.hasFn(T, "jsonParse")) { return T.jsonParse(allocator, source, options); } if (.object_begin != try source.next()) return error.UnexpectedToken; var r: T = undefined; var fields_seen = [_]bool{false} ** structInfo.fields.len; while (true) { var name_token: ?Token = try source.nextAllocMax(allocator, .alloc_if_needed, options.max_value_len.?); const field_name = switch (name_token.?) { inline .string, .allocated_string => |slice| slice, .object_end => { // No more fields. break; }, else => { return error.UnexpectedToken; }, }; inline for (structInfo.fields, 0..) |field, i| { if (field.is_comptime) @compileError("comptime fields are not supported: " ++ @typeName(T) ++ "." ++ field.name); if (std.mem.eql(u8, field.name, field_name)) { // Free the name token now in case we're using an allocator that optimizes freeing the last allocated object. // (Recursing into innerParse() might trigger more allocations.) freeAllocated(allocator, name_token.?); name_token = null; if (fields_seen[i]) { switch (options.duplicate_field_behavior) { .use_first => { // Parse and ignore the redundant value. // We don't want to skip the value, because we want type checking. _ = try innerParse(field.type, allocator, source, options); break; }, .@"error" => return error.DuplicateField, .use_last => {}, } } @field(r, field.name) = try innerParse(field.type, allocator, source, options); fields_seen[i] = true; break; } } else { // Didn't match anything. freeAllocated(allocator, name_token.?); if (options.ignore_unknown_fields) { try source.skipValue(); } else { return error.UnknownField; } } } try fillDefaultStructValues(T, &r, &fields_seen); return r; }, .array => |arrayInfo| { switch (try source.peekNextTokenType()) { .array_begin => { // Typical array. return internalParseArray(T, arrayInfo.child, arrayInfo.len, allocator, source, options); }, .string => { if (arrayInfo.child != u8) return error.UnexpectedToken; // Fixed-length string. var r: T = undefined; var i: usize = 0; while (true) { switch (try source.next()) { .string => |slice| { if (i + slice.len != r.len) return error.LengthMismatch; @memcpy(r[i..][0..slice.len], slice); break; }, .partial_string => |slice| { if (i + slice.len > r.len) return error.LengthMismatch; @memcpy(r[i..][0..slice.len], slice); i += slice.len; }, .partial_string_escaped_1 => |arr| { if (i + arr.len > r.len) return error.LengthMismatch; @memcpy(r[i..][0..arr.len], arr[0..]); i += arr.len; }, .partial_string_escaped_2 => |arr| { if (i + arr.len > r.len) return error.LengthMismatch; @memcpy(r[i..][0..arr.len], arr[0..]); i += arr.len; }, .partial_string_escaped_3 => |arr| { if (i + arr.len > r.len) return error.LengthMismatch; @memcpy(r[i..][0..arr.len], arr[0..]); i += arr.len; }, .partial_string_escaped_4 => |arr| { if (i + arr.len > r.len) return error.LengthMismatch; @memcpy(r[i..][0..arr.len], arr[0..]); i += arr.len; }, else => unreachable, } } return r; }, else => return error.UnexpectedToken, } }, .vector => |vecInfo| { switch (try source.peekNextTokenType()) { .array_begin => { return internalParseArray(T, vecInfo.child, vecInfo.len, allocator, source, options); }, else => return error.UnexpectedToken, } }, .pointer => |ptrInfo| { switch (ptrInfo.size) { .one => { const r: *ptrInfo.child = try allocator.create(ptrInfo.child); r.* = try innerParse(ptrInfo.child, allocator, source, options); return r; }, .slice => { switch (try source.peekNextTokenType()) { .array_begin => { _ = try source.next(); // Typical array. var arraylist = ArrayList(ptrInfo.child).init(allocator); while (true) { switch (try source.peekNextTokenType()) { .array_end => { _ = try source.next(); break; }, else => {}, } try arraylist.ensureUnusedCapacity(1); arraylist.appendAssumeCapacity(try innerParse(ptrInfo.child, allocator, source, options)); } if (ptrInfo.sentinel()) |s| { return try arraylist.toOwnedSliceSentinel(s); } return try arraylist.toOwnedSlice(); }, .string => { if (ptrInfo.child != u8) return error.UnexpectedToken; // Dynamic length string. if (ptrInfo.sentinel()) |s| { // Use our own array list so we can append the sentinel. var value_list = ArrayList(u8).init(allocator); _ = try source.allocNextIntoArrayList(&value_list, .alloc_always); return try value_list.toOwnedSliceSentinel(s); } if (ptrInfo.is_const) { switch (try source.nextAllocMax(allocator, options.allocate.?, options.max_value_len.?)) { inline .string, .allocated_string => |slice| return slice, else => unreachable, } } else { // Have to allocate to get a mutable copy. switch (try source.nextAllocMax(allocator, .alloc_always, options.max_value_len.?)) { .allocated_string => |slice| return slice, else => unreachable, } } }, else => return error.UnexpectedToken, } }, else => @compileError("Unable to parse into type '" ++ @typeName(T) ++ "'"), } }, else => @compileError("Unable to parse into type '" ++ @typeName(T) ++ "'"), } unreachable; }