DoxigAlpha

next

Function parameters

Parameters

#
self:*Iterator

Type definitions in this namespace

Types

#

Extract the zipped files to the given `dest` directory.

Functions

#
extract
Extract the zipped files to the given `dest` directory.

= [4]u8{ 'P', 'K', 1, 2 }

Values

#
central_file_header_sig
= [4]u8{ 'P', 'K', 1, 2 }
local_file_header_sig
= [4]u8{ 'P', 'K', 3, 4 }
end_record_sig
= [4]u8{ 'P', 'K', 5, 6 }
end_record64_sig
= [4]u8{ 'P', 'K', 6, 6 }
end_locator64_sig
= [4]u8{ 'P', 'K', 6, 7 }

Source

Implementation

#
pub fn next(self: *Iterator) !?Entry {
    if (self.cd_record_index == self.cd_record_count) {
        if (self.cd_record_offset != self.cd_size)
            return if (self.cd_size > self.cd_record_offset)
                error.ZipCdOversized
            else
                error.ZipCdUndersized;

        return null;
    }

    const header_zip_offset = self.cd_zip_offset + self.cd_record_offset;
    const input = self.input;
    try input.seekTo(header_zip_offset);
    const header = input.interface.takeStruct(CentralDirectoryFileHeader, .little) catch |err| switch (err) {
        error.ReadFailed => return input.err.?,
        error.EndOfStream => return error.EndOfStream,
    };
    if (!std.mem.eql(u8, &header.signature, &central_file_header_sig))
        return error.ZipBadCdOffset;

    self.cd_record_index += 1;
    self.cd_record_offset += @sizeOf(CentralDirectoryFileHeader) + header.filename_len + header.extra_len + header.comment_len;

    // Note: checking the version_needed_to_extract doesn't seem to be helpful, i.e. the zip file
    // at https://github.com/ninja-build/ninja/releases/download/v1.12.0/ninja-linux.zip
    // has an undocumented version 788 but extracts just fine.

    if (header.flags.encrypted)
        return error.ZipEncryptionUnsupported;
    // TODO: check/verify more flags
    if (header.disk_number != 0)
        return error.ZipMultiDiskUnsupported;

    var extents: FileExtents = .{
        .uncompressed_size = header.uncompressed_size,
        .compressed_size = header.compressed_size,
        .local_file_header_offset = header.local_file_header_offset,
    };

    if (header.extra_len > 0) {
        var extra_buf: [std.math.maxInt(u16)]u8 = undefined;
        const extra = extra_buf[0..header.extra_len];

        try input.seekTo(header_zip_offset + @sizeOf(CentralDirectoryFileHeader) + header.filename_len);
        input.interface.readSliceAll(extra) catch |err| switch (err) {
            error.ReadFailed => return input.err.?,
            error.EndOfStream => return error.EndOfStream,
        };

        var extra_offset: usize = 0;
        while (extra_offset + 4 <= extra.len) {
            const header_id = std.mem.readInt(u16, extra[extra_offset..][0..2], .little);
            const data_size = std.mem.readInt(u16, extra[extra_offset..][2..4], .little);
            const end = extra_offset + 4 + data_size;
            if (end > extra.len)
                return error.ZipBadExtraFieldSize;
            const data = extra[extra_offset + 4 .. end];
            switch (@as(ExtraHeader, @enumFromInt(header_id))) {
                .zip64_info => try readZip64FileExtents(CentralDirectoryFileHeader, header, &extents, data),
                else => {}, // ignore
            }
            extra_offset = end;
        }
    }

    return .{
        .version_needed_to_extract = header.version_needed_to_extract,
        .flags = header.flags,
        .compression_method = header.compression_method,
        .last_modification_time = header.last_modification_time,
        .last_modification_date = header.last_modification_date,
        .header_zip_offset = header_zip_offset,
        .crc32 = header.crc32,
        .filename_len = header.filename_len,
        .compressed_size = extents.compressed_size,
        .uncompressed_size = extents.uncompressed_size,
        .file_offset = extents.local_file_header_offset,
    };
}