I have something very funny going on, which, really bowls me over:
The crux is that:
const b = font.getChar('j');
std.debug.print("datalen={}, height={}, width={}\n", .{b.data.len, b.size.height(), b.size.width()});
std.debug.print("datalen={}, height={}, width={}\n", .{b.data.len, b.size.height(), b.size.width()});
Prints:
datalen=462, height=33, width=14
datalen=462, height=-1, width=-17079488
The b.size (Size struct) does not mutate the data in width()/height() methods, yet, as you can see from the output, it goes horribly wrong somewhere.
const Size = struct {
m_x : c_int = undefined,
m_x2 : c_int = undefined,
m_y : c_int = undefined,
m_y2 : c_int = undefined,
m_offset: usize = undefined,
pub fn width(self: @This()) c_int {
return self.m_x2 - self.m_x;
}
pub fn height(self: @This()) c_int {
return self.m_y2 - self.m_y;
}
pub fn getBufferSize(self: @This()) usize {
return @intCast(self.width() * self.height());
}
}
The method getChar, not that I think it is too relevant, returns a struct to the character size and the underlying bitmap:
pub fn getChar(
self: @This(),
character: usize,
) struct {
data: []u8,
size : *Size,
} {
assert(character < NO_CHARS);
assert(self.m_buffer != null);
This is done using Zig 0.14.1. It is a single threaded simple/straight forward test.