Skip to content

Instantly share code, notes, and snippets.

@lassade
Last active September 5, 2024 23:51
Show Gist options
  • Save lassade/c6c00d377cff8420904598b1874c02c6 to your computer and use it in GitHub Desktop.
Save lassade/c6c00d377cff8420904598b1874c02c6 to your computer and use it in GitHub Desktop.
A allocator with leak test that only accuses unreachable memory as leaked. You can then don't bother to free memory at the end of your program, can check for leaks many times during the execution of your program;
// update 1 - fix sorting, skip std.mem.Allocator, added a leak counter
const std = @import("std");
const Allocator = std.mem.Allocator;
const StackTrace = std.builtin.StackTrace;
const log = std.log.scoped(.mem);
const tracy = @import("tracy");
// todo: support for double free
pub const DebugAllocator = struct {
child: Allocator = undefined,
hold: std.Thread.Mutex = .{},
allocs: std.MultiArrayList(Alloc) = .{},
const LocalST = struct {
index: usize, // note: this is probably not needed
instruction_addresses: [15]usize,
};
// note: do not change the layout of this struct
const Alloc = struct {
ptr: usize,
len: usize,
st: LocalST, // todo: use index to avoid moving memory later on when sorting
};
fn captureStackTrace(ret_addr: usize) LocalST {
var local_st: LocalST = undefined;
@memset(&local_st.instruction_addresses, 0);
var st = std.builtin.StackTrace{
.index = 0,
.instruction_addresses = &local_st.instruction_addresses,
};
std.debug.captureStackTrace(ret_addr, &st);
local_st.index = st.index;
return local_st;
}
fn alloc(self: *@This(), len: usize, ptr_align: u8, ret_addr: usize) ?[*]u8 {
const zone = tracy.ZoneN(@src(), "alloc");
defer zone.End();
const ptr = self.child.vtable.alloc(self.child.ptr, len, ptr_align, ret_addr);
if (ptr != null) {
// tracy.Alloc(ptr, len);
// track allocation
self.hold.lock();
const a = Alloc{ .ptr = @intFromPtr(ptr), .len = len, .st = captureStackTrace(ret_addr) };
self.allocs.append(self.child, a) catch {}; // note: allocatio isn't tracked
self.hold.unlock();
}
return ptr;
}
fn resize(self: *@This(), buf: []u8, buf_align: u8, new_len: usize, ret_addr: usize) bool {
const zone = tracy.ZoneN(@src(), "resize");
defer zone.End();
const success = self.child.vtable.resize(self.child.ptr, buf, buf_align, new_len, ret_addr);
if (success) {
// tracy.AllocN(buf.ptr, new_len, "resize");
self.hold.lock();
const s = self.allocs.slice();
const i = std.mem.indexOfScalar(usize, s.items(.ptr), @intFromPtr(buf.ptr)) orelse unreachable;
s.items(.len).ptr[i] = new_len;
s.items(.st).ptr[i] = captureStackTrace(ret_addr);
self.hold.unlock();
}
return success;
}
fn free(self: *@This(), buf: []u8, buf_align: u8, ret_addr: usize) void {
const zone = tracy.ZoneN(@src(), "free");
defer zone.End();
self.child.vtable.free(self.child.ptr, buf, buf_align, ret_addr);
// tracy.Free(buf.ptr);
self.hold.lock();
if (std.mem.indexOfScalar(usize, self.allocs.items(.ptr), @intFromPtr(buf.ptr))) |i| {
self.allocs.swapRemove(i);
}
self.hold.unlock();
}
const vtable = Allocator.VTable{
.alloc = @ptrCast(&alloc),
.resize = @ptrCast(&resize),
.free = @ptrCast(&free),
};
pub fn allocator(self: *@This()) Allocator {
return .{ .ptr = @ptrCast(self), .vtable = &vtable };
}
const LeakCheck = struct {
allocator: Allocator,
allocs: std.MultiArrayList(Alloc) = .{},
leaks: std.DynamicBitSetUnmanaged = .{},
pub fn deinit(self: *@This()) void {
self.allocs.deinit(self.allocator);
self.leaks.deinit(self.allocator);
}
pub fn summary(self: *const @This()) void {
var c: usize = 0;
var it = self.leaks.iterator(.{});
const s = self.allocs.slice();
while (it.next()) |i| {
const local_st = &s.items(.st)[i];
const st = std.builtin.StackTrace{
.index = local_st.index,
.instruction_addresses = &local_st.instruction_addresses,
};
log.err("memory address 0x{x} leaked: {}", .{ s.items(.ptr)[i], st });
c += 1;
}
log.err("{} of {} allocation(s) leaked", .{ c, self.allocs.len });
}
fn check(self: *@This(), ptr: usize) void {
const CmpCtx = struct {
fn compare(a: usize, b: usize) std.math.Order {
return std.math.order(b, a);
}
};
const ptrs = self.allocs.items(.ptr);
const len = self.allocs.items(.len);
var i = std.sort.lowerBound(usize, ptrs, ptr, CmpCtx.compare);
if (i < ptrs.len) {
if (ptrs[i] == ptr) {
self.leaks.unset(i);
} else if (i > 0) {
i -= 1;
if (ptrs[i] < ptr and ptr < ptrs[i] + len[i]) {
self.leaks.unset(i);
}
}
}
}
pub fn trackChildren(self: *@This(), comptime T: type, data: *const T) void {
if (T == Allocator) return;
if (std.meta.hasMethod(T, "trackMem")) {
data.trackMem(self);
return;
}
switch (@typeInfo(T)) {
.Optional => |o| {
if (data.*) |*value| self.trackChildren(o.child, value);
},
.Array => |a| {
for (data) |*value| self.trackChildren(a.child, value);
},
.Struct => |s| {
if (s.layout == .@"packed") return;
inline for (s.fields) |field| {
switch (@typeInfo(field.type)) {
.Pointer => |_| self.track(@field(data, field.name)),
else => self.trackChildren(field.type, &@field(data, field.name)),
}
}
},
.Union => |u| {
if (u.tag_type) |UnionTagType| {
inline for (u.fields) |field| {
if (data == @field(UnionTagType, field.name)) {
self.trackChildren(field.type, &@field(data, field.name));
break;
}
}
}
},
else => {},
}
}
// todo: doesnt like self referecing instances
pub fn track(self: *@This(), data: anytype) void {
switch (@typeInfo(@TypeOf(data))) {
.Pointer => |p| {
// log.info("visit {s}", .{@typeName(@TypeOf(data))});
switch (p.size) {
.One, .Many, .C => {
self.check(@intFromPtr(data));
switch (@typeInfo(p.child)) {
.Struct => self.trackChildren(p.child, data),
else => {},
}
},
.Slice => {
self.check(@intFromPtr(data.ptr));
for (data) |*value| self.trackChildren(p.child, value);
},
}
},
else => {},
}
}
};
pub fn detectLeaks(self: *@This()) !LeakCheck {
var leak_check: LeakCheck = .{
.allocator = self.allocator(),
.allocs = .{},
.leaks = .{},
};
var len = self.allocs.len + 32; // race contidion, but wont matter that mutch
try leak_check.leaks.resize(leak_check.allocator, len, true);
try leak_check.allocs.ensureTotalCapacity(leak_check.allocator, len);
self.hold.lock();
len = @min(len, self.allocs.len); // avoid overflow and don't copy past the necessary
leak_check.allocs.len = len;
const src = self.allocs.slice();
const dest = leak_check.allocs.slice();
@memcpy(dest.items(.ptr), src.items(.ptr));
@memcpy(dest.items(.len), src.items(.len));
@memcpy(dest.items(.st), src.items(.st));
self.hold.unlock();
for (len..leak_check.leaks.bit_length) |i| leak_check.leaks.unset(i); // todo: this can be improved
// sort
const SortCtx = struct {
slice: [*]usize,
pub fn lessThan(ctx: @This(), a: usize, b: usize) bool {
return ctx.slice[a] < ctx.slice[b];
}
};
leak_check.allocs.sortUnstable(SortCtx{ .slice = leak_check.allocs.items(.ptr).ptr });
// note: leack check can leak, funny enough, but only if called multiple times
leak_check.track(leak_check.allocs.bytes);
leak_check.track(leak_check.leaks.masks);
return leak_check;
}
};
// pub fn main() !void {
// const App = struct {
// input: []u8,
// };
// var da = DebugAllocator{};
// var app = try da.allocator().create(App);
// app.input = try da.allocator().alloc(u8, 512);
// app.input = try da.allocator().alloc(u8, 256); // leak!
//
// var leak_check = try da.detectLeaks();
// defer leak_check.deinit();
// leak_check.track(app);
// leak_check.summary();
// }
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment