DoxigAlpha

Hashed

Provides a Reader implementation by passing data from an underlying reader through Hasher.update.

The underlying reader is best unbuffered.

This implementation makes suboptimal buffering decisions due to being generic. A better solution will involve creating a reader for each hash function, where the discard buffer can be tailored to the hash implementation details.

Fields of this type

Fields

#
in:*Reader
hasher:Hasher

Functions in this namespace

Functions

#

Source

Implementation

#
pub fn Hashed(comptime Hasher: type) type {
    return struct {
        in: *Reader,
        hasher: Hasher,
        reader: Reader,

        pub fn init(in: *Reader, hasher: Hasher, buffer: []u8) @This() {
            return .{
                .in = in,
                .hasher = hasher,
                .reader = .{
                    .vtable = &.{
                        .stream = @This().stream,
                        .readVec = @This().readVec,
                        .discard = @This().discard,
                    },
                    .buffer = buffer,
                    .end = 0,
                    .seek = 0,
                },
            };
        }

        fn stream(r: *Reader, w: *Writer, limit: Limit) StreamError!usize {
            const this: *@This() = @alignCast(@fieldParentPtr("reader", r));
            const data = limit.slice(try w.writableSliceGreedy(1));
            var vec: [1][]u8 = .{data};
            const n = try this.in.readVec(&vec);
            this.hasher.update(data[0..n]);
            w.advance(n);
            return n;
        }

        fn readVec(r: *Reader, data: [][]u8) Error!usize {
            const this: *@This() = @alignCast(@fieldParentPtr("reader", r));
            var vecs: [8][]u8 = undefined; // Arbitrarily chosen amount.
            const dest_n, const data_size = try r.writableVector(&vecs, data);
            const dest = vecs[0..dest_n];
            const n = try this.in.readVec(dest);
            var remaining: usize = n;
            for (dest) |slice| {
                if (remaining < slice.len) {
                    this.hasher.update(slice[0..remaining]);
                    remaining = 0;
                    break;
                } else {
                    remaining -= slice.len;
                    this.hasher.update(slice);
                }
            }
            assert(remaining == 0);
            if (n > data_size) {
                r.end += n - data_size;
                return data_size;
            }
            return n;
        }

        fn discard(r: *Reader, limit: Limit) Error!usize {
            const this: *@This() = @alignCast(@fieldParentPtr("reader", r));
            const peeked = limit.slice(try this.in.peekGreedy(1));
            this.hasher.update(peeked);
            this.in.toss(peeked.len);
            return peeked.len;
        }
    };
}