-
-
Save Elabajaba/322428c5d2b5305a44c4f393fe05c783 to your computer and use it in GitHub Desktop.
gpu-alloc dx12 issues
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
let mem_allocator = { | |
let config = gpu_alloc::Config::i_am_prototyping(); //TODO | |
let properties = gpu_alloc::DeviceProperties { | |
max_memory_allocation_count: u32::MAX, // Afaik this isn't really a thing on dx12 like it is on Vulkan? | |
max_memory_allocation_size: u64::MAX, // TODO | |
non_coherent_atom_size: 256, // I don't think this is a thing on dx12? If it is, I don't know what the equivalent is. | |
// I don't know what to do for these. DX12 seems to generally just uses D3D12_HEAP_TYPE_DEFAULT, D3D12_HEAP_TYPE_UPLOAD, | |
// or D3D12_HEAP_TYPE_READBACK, though D3D12_HEAP_TYPE_CUSTOM exists where you can choose D3D12_MEMORY_POOL_(L0 or L1 for gpu or cpu memory) | |
// and CPUPageProperty (NOT_AVAILABLE, WRITE_COMBINE, WRITE_BACK, or UNKNOWN) | |
// https://www.asawicki.info/news_1755_untangling_direct3d_12_memory_heap_types_and_pools | |
memory_types: todo!(), | |
memory_heaps: todo!(), | |
buffer_device_address: false, // TOOD: idk | |
}; | |
gpu_alloc::GpuAllocator::new(config, properties) | |
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
unsafe fn create_buffer( | |
&self, | |
desc: &crate::BufferDescriptor, | |
) -> Result<super::Buffer, crate::DeviceError> { | |
let mut resource = native::Resource::null(); | |
let mut size = desc.size; | |
if desc.usage.contains(crate::BufferUses::UNIFORM) { | |
let align_mask = d3d12::D3D12_CONSTANT_BUFFER_DATA_PLACEMENT_ALIGNMENT as u64 - 1; | |
size = ((size - 1) | align_mask) + 1; | |
} | |
// DX12 stuff | |
let raw_desc = D3D12_RESOURCE_DESC { | |
Dimension: d3d12::D3D12_RESOURCE_DIMENSION_BUFFER, | |
Alignment: 0, // TODO | |
Width: size, | |
Height: 1, | |
DepthOrArraySize: 1, | |
MipLevels: 1, | |
Format: dxgiformat::DXGI_FORMAT_UNKNOWN, | |
SampleDesc: dxgitype::DXGI_SAMPLE_DESC { | |
Count: 1, | |
Quality: 0, | |
}, | |
Layout: d3d12::D3D12_TEXTURE_LAYOUT_ROW_MAJOR, | |
Flags: conv::map_buffer_usage_to_resource_flags(desc.usage), | |
}; | |
let heap = self.allocator.allocate_memory(...).unwrap(); | |
let hr = self.raw.CreatePlacedResource( | |
heap, | |
heap.offset(), // Need a way to get the offset in the heap from gpu-alloc somehow | |
&raw_desc, | |
d3d12::D3D12_RESOURCE_STATE_COMMON, | |
ptr::null(), | |
&d3d12::ID3D12Resource::uuidof(), | |
resource.mut_void(), | |
); | |
Ok(super::Buffer { | |
resource, | |
size, | |
heap, | |
}) | |
} | |
// mapping is all done at the wgpu level instead of the gpu-alloc level due to the Heap vs Resource conflict. | |
unsafe fn map_buffer( | |
&self, | |
buffer: &super::Buffer, | |
range: crate::MemoryRange, | |
) -> Result<crate::BufferMapping, crate::DeviceError> { | |
let mut ptr = ptr::null_mut(); | |
let subresource_id = 0; // Ideally we would suballocate at the resource level, but 0 is fine for now. | |
let hr = (*buffer.resource).Map(subresource_id, ptr::null(), &mut ptr); | |
hr.into_device_result("Map buffer")?; | |
Ok(crate::BufferMapping { | |
ptr: ptr::NonNull::new(ptr.offset(range.start as isize).cast::<u8>()).unwrap(), | |
is_coherent: true, | |
}) | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
impl gpu_alloc::MemoryDevice<Heap> for super::Device { | |
unsafe fn allocate_memory( | |
&self, | |
size: u64, | |
memory_type: u32, | |
flags: gpu_alloc::AllocationFlags, | |
) -> Result<Heap, gpu_alloc::OutOfMemory> { | |
let heap_type = match memory_type { | |
0 => HeapType::Default, | |
1 => HeapType::Upload, | |
2 => HeapType::Readback, | |
3 => HeapType::Custom, | |
_ => panic!("Invalid dx12 memory type"), | |
}; | |
// Placeholder values | |
let cpu_page_property = native::CpuPageProperty::Unknown; | |
let memory_pool_preference = native::MemoryPool::Unknown; | |
let creation_node_mask = 0; | |
let visible_node_mask = 0; | |
let heap_properties = native::HeapProperties::new( | |
heap_type, | |
cpu_page_property, | |
memory_pool_preference, | |
creation_node_mask, | |
visible_node_mask, | |
); | |
let mut info_flags = HeapFlags::NONE; | |
let (heap, hr) = self.raw.create_heap(size, heap_properties, 0, info_flags); | |
match hr { | |
winerror::S_OK => Ok(heap), | |
winerror::E_OUTOFMEMORY => Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory), | |
_ => panic!("Unexpected dx12 error: `{}`", hr), | |
} | |
} | |
unsafe fn deallocate_memory(&self, memory: Heap) { | |
memory.Release(); | |
} | |
unsafe fn map_memory( | |
&self, | |
memory: &mut Heap, | |
offset: u64, | |
size: u64, | |
) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError> { | |
// You can't map a heap in dx12, you create a Resource with a heap and map the Resource. | |
unimplemented!() | |
} | |
unsafe fn unmap_memory(&self, memory: &mut Heap) { | |
// Same as map_memory | |
unimplemented!() | |
} | |
unsafe fn invalidate_memory_ranges( | |
&self, | |
_ranges: &[gpu_alloc::MappedMemoryRange<'_, Heap>], | |
) -> Result<(), gpu_alloc::OutOfMemory> { | |
// should never be called | |
unimplemented!() | |
} | |
unsafe fn flush_memory_ranges( | |
&self, | |
_ranges: &[gpu_alloc::MappedMemoryRange<'_, Heap>], | |
) -> Result<(), gpu_alloc::OutOfMemory> { | |
// should never be called | |
unimplemented!() | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment