Lines Matching refs:pChunk

44 #define ASSERT_OFFPREV(pHeap, pChunk)  \
45 do { Assert(MMHYPERCHUNK_GET_OFFPREV(pChunk) <= 0); \
46 Assert(MMHYPERCHUNK_GET_OFFPREV(pChunk) >= (intptr_t)(pHeap)->CTX_SUFF(pbHeap) - (intptr_t)(pChunk)); \
47 AssertMsg( MMHYPERCHUNK_GET_OFFPREV(pChunk) != 0 \
48 || (uint8_t *)(pChunk) == (pHeap)->CTX_SUFF(pbHeap), \
49 ("pChunk=%p pvHyperHeap=%p\n", (pChunk), (pHeap)->CTX_SUFF(pbHeap))); \
52 #define ASSERT_OFFNEXT(pHeap, pChunk) \
53 do { ASSERT_ALIGN((pChunk)->offNext); \
54 ASSERT_L((pChunk)->offNext, (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) + (pHeap)->offPageAligned - (uintptr_t)(pChunk)); \
57 #define ASSERT_OFFHEAP(pHeap, pChunk) \
58 do { Assert((pChunk)->offHeap); \
59 AssertMsg((PMMHYPERHEAP)((pChunk)->offHeap + (uintptr_t)pChunk) == (pHeap), \
60 ("offHeap=%RX32 pChunk=%p pHeap=%p\n", (pChunk)->offHeap, (pChunk), (pHeap))); \
65 #define ASSERT_OFFSTAT(pHeap, pChunk) \
66 do { if (MMHYPERCHUNK_ISFREE(pChunk)) \
67 Assert(!(pChunk)->offStat); \
68 else if ((pChunk)->offStat) \
70 Assert((pChunk)->offStat); \
71 AssertMsg(!((pChunk)->offStat & (MMHYPER_HEAP_ALIGN_MIN - 1)), ("offStat=%RX32\n", (pChunk)->offStat)); \
72 uintptr_t uPtr = (uintptr_t)(pChunk)->offStat + (uintptr_t)pChunk; NOREF(uPtr); \
78 #define ASSERT_OFFSTAT(pHeap, pChunk) \
79 do { Assert(!(pChunk)->offStat); \
83 #define ASSERT_CHUNK(pHeap, pChunk) \
84 do { ASSERT_OFFNEXT(pHeap, pChunk); \
85 ASSERT_OFFPREV(pHeap, pChunk); \
86 ASSERT_OFFHEAP(pHeap, pChunk); \
87 ASSERT_OFFSTAT(pHeap, pChunk); \
89 #define ASSERT_CHUNK_USED(pHeap, pChunk) \
90 do { ASSERT_OFFNEXT(pHeap, pChunk); \
91 ASSERT_OFFPREV(pHeap, pChunk); \
92 Assert(MMHYPERCHUNK_ISUSED(pChunk)); \
95 #define ASSERT_FREE_OFFPREV(pHeap, pChunk) \
96 do { ASSERT_ALIGN((pChunk)->offPrev); \
97 ASSERT_GE(((pChunk)->offPrev & (MMHYPER_HEAP_ALIGN_MIN - 1)), (intptr_t)(pHeap)->CTX_SUFF(pbHeap) - (intptr_t)(pChunk)); \
98 Assert((pChunk)->offPrev != MMHYPERCHUNK_GET_OFFPREV(&(pChunk)->core) || !(pChunk)->offPrev); \
99 AssertMsg( (pChunk)->offPrev \
100 || (uintptr_t)(pChunk) - (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) == (pHeap)->offFreeHead, \
101 ("pChunk=%p offChunk=%#x offFreeHead=%#x\n", (pChunk), (uintptr_t)(pChunk) - (uintptr_t)(pHeap)->CTX_SUFF(pbHeap),\
105 #define ASSERT_FREE_OFFNEXT(pHeap, pChunk) \
106 do { ASSERT_ALIGN((pChunk)->offNext); \
107 ASSERT_L((pChunk)->offNext, (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) + (pHeap)->offPageAligned - (uintptr_t)(pChunk)); \
108 Assert((pChunk)->offNext != (pChunk)->core.offNext || !(pChunk)->offNext); \
109 AssertMsg( (pChunk)->offNext \
110 || (uintptr_t)(pChunk) - (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) == (pHeap)->offFreeTail, \
111 ("pChunk=%p offChunk=%#x offFreeTail=%#x\n", (pChunk), (uintptr_t)(pChunk) - (uintptr_t)(pHeap)->CTX_SUFF(pbHeap), \
115 #define ASSERT_FREE_CB(pHeap, pChunk) \
116 do { ASSERT_ALIGN((pChunk)->cb); \
117 Assert((pChunk)->cb > 0); \
118 if ((pChunk)->core.offNext) \
119 AssertMsg((pChunk)->cb == ((pChunk)->core.offNext - sizeof(MMHYPERCHUNK)), \
120 ("cb=%d offNext=%d\n", (pChunk)->cb, (pChunk)->core.offNext)); \
122 ASSERT_LE((pChunk)->cb, (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) + (pHeap)->offPageAligned - (uintptr_t)(pChunk)); \
125 #define ASSERT_CHUNK_FREE(pHeap, pChunk) \
126 do { ASSERT_CHUNK(pHeap, &(pChunk)->core); \
127 Assert(MMHYPERCHUNK_ISFREE(pChunk)); \
128 ASSERT_FREE_OFFNEXT(pHeap, pChunk); \
129 ASSERT_FREE_OFFPREV(pHeap, pChunk); \
130 ASSERT_FREE_CB(pHeap, pChunk); \
145 static int mmHyperFree(PMMHYPERHEAP pHeap, PMMHYPERCHUNK pChunk);
270 PMMHYPERCHUNK pChunk = mmHyperAllocChunk(pHeap, cbAligned, uAlignment);
271 if (pChunk)
274 const uint32_t cbChunk = pChunk->offNext
275 ? pChunk->offNext
276 : pHeap->CTX_SUFF(pbHeap) + pHeap->offPageAligned - (uint8_t *)pChunk;
282 pChunk->offStat = (uintptr_t)pStat - (uintptr_t)pChunk;
284 pChunk->offStat = 0;
286 void *pv = pChunk + 1;
731 PMMHYPERCHUNK pChunk = mmHyperAllocChunk(pHeap, RT_ALIGN(sizeof(*pStat), MMHYPER_HEAP_ALIGN_MIN), MMHYPER_HEAP_ALIGN_MIN);
732 if (!pChunk)
734 pStat = (PMMHYPERSTAT)(pChunk + 1);
735 pChunk->offStat = (uintptr_t)pStat - (uintptr_t)pChunk;
799 PMMHYPERCHUNK pChunk = (PMMHYPERCHUNK)((PMMHYPERCHUNK)pv - 1);
801 AssertMsgReturn( (uintptr_t)pChunk + pChunk->offNext >= (uintptr_t)pChunk
802 || RT_ALIGN_32(pChunk->offNext, MMHYPER_HEAP_ALIGN_MIN) != pChunk->offNext,
803 ("%p: offNext=%#RX32\n", pv, pChunk->offNext),
806 AssertMsgReturn(MMHYPERCHUNK_ISUSED(pChunk),
810 int32_t offPrev = MMHYPERCHUNK_GET_OFFPREV(pChunk);
811 AssertMsgReturn( (uintptr_t)pChunk + offPrev <= (uintptr_t)pChunk
818 PMMHYPERSTAT pStat = (PMMHYPERSTAT)((uintptr_t)pChunk + pChunk->offStat);
820 && pChunk->offStat,
821 ("%p: offStat=%#RX32!\n", pv, pChunk->offStat),
824 AssertMsgReturn(!pChunk->offStat,
825 ("%p: offStat=%#RX32!\n", pv, pChunk->offStat),
830 PMMHYPERHEAP pHeap = (PMMHYPERHEAP)((uintptr_t)pChunk + pChunk->offHeap);
832 && pChunk->offHeap,
833 ("%p: pHeap=%#x offHeap=%RX32\n", pv, pHeap->u32Magic, pChunk->offHeap),
842 AssertMsgReturn((uintptr_t)pChunk + offPrev >= (uintptr_t)pHeap->CTX_SUFF(pbHeap),
846 AssertMsgReturn(pChunk->offNext < pHeap->cbHeap,
847 ("%p: offNext=%#RX32!\n", pv, pChunk->offNext),
861 const uint32_t cbChunk = pChunk->offNext
862 ? pChunk->offNext
863 : pHeap->CTX_SUFF(pbHeap) + pHeap->offPageAligned - (uint8_t *)pChunk;
867 memset(pChunk + 1, MMHYPER_HEAP_FREE_POISON, cbChunk - sizeof(*pChunk));
900 pHeap->aDelayedFrees[pHeap->iDelayedFree].offChunk = (uintptr_t)pChunk - (uintptr_t)pHeap;
908 int rc = mmHyperFree(pHeap, pChunk);
953 * @param pChunk The memory chunk to free.
955 static int mmHyperFree(PMMHYPERHEAP pHeap, PMMHYPERCHUNK pChunk)
957 Log3(("mmHyperFree: Enter pHeap=%p pChunk=%p\n", pHeap, pChunk));
958 PMMHYPERCHUNKFREE pFree = (PMMHYPERCHUNKFREE)pChunk;
999 AssertMsgFailed(("Freed twice! pv=%p (pChunk=%p)\n", pChunk + 1, pChunk));
1002 pChunk->offStat = 0;