Function decodeBlockReader [src]

Decode a single block from source into dest. Literal and sequence data from the block is copied into literals_buffer and sequence_buffer, which must be large enough or error.LiteralsBufferTooSmall and error.SequenceBufferTooSmall are returned (the maximum block size is an upper bound for the size of both buffers). See decodeBlock and decodeBlockRingBuffer for function that can decode a block without these extra copies. error.EndOfStream is returned if source does not contain enough bytes.

Prototype

pub fn decodeBlockReader( dest: *RingBuffer, source: anytype, block_header: frame.Zstandard.Block.Header, decode_state: *DecodeState, block_size_max: usize, literals_buffer: []u8, sequence_buffer: []u8, ) !void

Parameters

dest: *RingBufferblock_header: frame.Zstandard.Block.Headerdecode_state: *DecodeStateblock_size_max: usizeliterals_buffer: []u8sequence_buffer: []u8

Source

pub fn decodeBlockReader( dest: *RingBuffer, source: anytype, block_header: frame.Zstandard.Block.Header, decode_state: *DecodeState, block_size_max: usize, literals_buffer: []u8, sequence_buffer: []u8, ) !void { const block_size = block_header.block_size; var block_reader_limited = std.io.limitedReader(source, block_size); const block_reader = block_reader_limited.reader(); if (block_size_max < block_size) return error.BlockSizeOverMaximum; switch (block_header.block_type) { .raw => { if (block_size == 0) return; const slice = dest.sliceAt(dest.write_index, block_size); try source.readNoEof(slice.first); try source.readNoEof(slice.second); dest.write_index = dest.mask2(dest.write_index + block_size); decode_state.written_count += block_size; }, .rle => { const byte = try source.readByte(); for (0..block_size) |_| { dest.writeAssumeCapacity(byte); } decode_state.written_count += block_size; }, .compressed => { const literals = try decodeLiteralsSection(block_reader, literals_buffer); const sequences_header = try decodeSequencesHeader(block_reader); try decode_state.prepare(block_reader, literals, sequences_header); var bytes_written: usize = 0; { const size = try block_reader.readAll(sequence_buffer); var bit_stream: readers.ReverseBitReader = undefined; try bit_stream.init(sequence_buffer[0..size]); if (sequences_header.sequence_count > 0) { if (sequence_buffer.len < block_reader_limited.bytes_left) return error.SequenceBufferTooSmall; decode_state.readInitialFseState(&bit_stream) catch return error.MalformedCompressedBlock; var sequence_size_limit = block_size_max; for (0..sequences_header.sequence_count) |i| { const decompressed_size = decode_state.decodeSequenceRingBuffer( dest, &bit_stream, sequence_size_limit, i == sequences_header.sequence_count - 1, ) catch return error.MalformedCompressedBlock; sequence_size_limit -= decompressed_size; bytes_written += decompressed_size; } } if (!bit_stream.isEmpty()) { return error.MalformedCompressedBlock; } } if (decode_state.literal_written_count < literals.header.regenerated_size) { const len = literals.header.regenerated_size - decode_state.literal_written_count; decode_state.decodeLiteralsRingBuffer(dest, len) catch return error.MalformedCompressedBlock; bytes_written += len; } switch (decode_state.literal_header.block_type) { .treeless, .compressed => { if (!decode_state.isLiteralStreamEmpty()) return error.MalformedCompressedBlock; }, .raw, .rle => {}, } if (bytes_written > block_size_max) return error.BlockSizeOverMaximum; if (block_reader_limited.bytes_left != 0) return error.MalformedCompressedBlock; decode_state.literal_written_count = 0; }, .reserved => return error.ReservedBlock, } }