#include #include static HANDLE FindCrtHeap() { // Pick a block that we know is in the CRT heap. // _HEAPINFO crtEntry = {}; if (_HEAPOK != _heapwalk(&crtEntry)) return NULL; // Find all heaps in the process. One of them is the CRT heap. // const int kMaxHeaps = 16; HANDLE heaps[kMaxHeaps] = {}; const int heapCount = GetProcessHeaps(kMaxHeaps, heaps); const int heapStart = (heapCount <= kMaxHeaps) ? heapCount - 1 : kMaxHeaps - 1; // Reverse order because the CRT was likely recently initialized. // for (int i = heapStart; i > 0; --i) { // Not locking the heap because we know nothing about it, and locking // heaps that weren't created with HEAP_NO_SERIALIZATION invokes // undefined behavior. // const HANDLE heap = heaps[i]; // Oddly, '_heapwalk' always skips the first block. // PROCESS_HEAP_ENTRY entry = {}; if (HeapWalk(heap, &entry) && HeapWalk(heap, &entry)) { if (crtEntry._pentry == entry.lpData) { // Found it! return heap; } } } return NULL; } static bool EnableHeapGenerateExceptions(HANDLE heap) { // Undefined behavior time! Poke into the heap internals and turn on the // HEAP_GENERATE_EXCEPTIONS flag, which causes failed allocations to raise // a structured exception. Why two places that need the flag? I don't know, // but one of them actually causes the behavior I want and the other one // makes WinDbg realize that the heap has the HEAP_GENERATE_EXCEPTIONS flag, // so I figure it's a good idea to set both of them. // DWORD* const flagsA = (DWORD*)((char*)heap + 0x70); DWORD* const flagsB = (DWORD*)((char*)heap + 0x74); // Verify that they are exactly what I expect them to be. // if (*flagsA == 0x1002 && *flagsB == 0x0000) { *flagsA |= HEAP_GENERATE_EXCEPTIONS; *flagsB |= HEAP_GENERATE_EXCEPTIONS; return true; } return false; } bool RaiseExceptionOnFailedMalloc() { const HANDLE crtHeap = FindCrtHeap(); return crtHeap && EnableHeapGenerateExceptions(crtHeap); }