Lines Matching refs:buf

605 #define	UMERR_REDZONE	1	/* redzone violation (write past end of buf) */
629 uint64_t *buf = buf_arg;
631 while (buf < bufend)
632 *buf++ = pattern;
639 uint64_t *buf;
641 for (buf = buf_arg; buf < bufend; buf++)
642 if (*buf != pattern)
643 return (buf);
651 uint64_t *buf;
653 for (buf = buf_arg; buf < bufend; buf++) {
654 if (*buf != old) {
656 (char *)buf - (char *)buf_arg);
657 return (buf);
659 *buf = new;
792 umem_findslab(umem_cache_t *cp, void *buf)
799 if (UMEM_SLAB_MEMBER(sp, buf)) {
817 void *buf = bufarg;
825 sp = umem_findslab(cp, buf);
829 if ((sp = umem_findslab(cp, buf)) != NULL)
841 buf = (char *)bufarg - ((uintptr_t)bufarg -
843 if (buf != bufarg)
846 btp = UMEM_BUFTAG(cp, buf);
849 for (bcp = *UMEM_HASH(cp, buf); bcp; bcp = bcp->bc_next)
850 if (bcp->bc_addr == buf)
857 bcp->bc_addr != buf) {
866 umem_abort_info.ump_realbuf = buf;
878 off = verify_pattern(UMEM_FREE_PATTERN, buf, cp->cache_verify);
880 off = buf;
883 (uintptr_t)off - (uintptr_t)buf,
925 "(%p)\n", bufarg, buf);
943 umem_printf("previous transaction on buffer %p:\n", buf);
1139 char *buf, *slab;
1172 sp->slab_base = buf = slab + color;
1187 bcp->bc_addr = buf;
1190 bcp = UMEM_BUFCTL(cp, buf);
1193 umem_buftag_t *btp = UMEM_BUFTAG(cp, buf);
1198 copy_pattern(UMEM_FREE_PATTERN, buf,
1204 buf += chunksize;
1259 void *buf;
1302 buf = bcp->bc_addr;
1303 hash_bucket = UMEM_HASH(cp, buf);
1310 buf = UMEM_BUF(cp, bcp);
1313 ASSERT(UMEM_SLAB_MEMBER(sp, buf));
1317 return (buf);
1324 umem_slab_free(umem_cache_t *cp, void *buf)
1329 ASSERT(buf != NULL);
1338 prev_bcpp = UMEM_HASH(cp, buf);
1340 if (bcp->bc_addr == buf) {
1349 bcp = UMEM_BUFCTL(cp, buf);
1350 sp = UMEM_SLAB(cp, buf);
1353 if (bcp == NULL || sp->slab_cache != cp || !UMEM_SLAB_MEMBER(sp, buf)) {
1355 umem_error(UMERR_BADADDR, cp, buf);
1362 umem_log_enter(umem_content_log, buf,
1405 umem_cache_alloc_debug(umem_cache_t *cp, void *buf, int umflag)
1407 umem_buftag_t *btp = UMEM_BUFTAG(cp, buf);
1413 umem_error(UMERR_BADBUFTAG, cp, buf);
1419 if ((cp->cache_flags & UMF_HASH) && bcp->bc_addr != buf) {
1420 umem_error(UMERR_BADBUFCTL, cp, buf);
1428 UMEM_UNINITIALIZED_PATTERN, buf, cp->cache_verify)) {
1429 umem_error(UMERR_MODIFIED, cp, buf);
1448 cp->cache_constructor(buf, cp->cache_private, flags_nfatal) != 0)) {
1451 copy_pattern(UMEM_FREE_PATTERN, buf, cp->cache_verify);
1452 umem_slab_free(cp, buf);
1464 umem_cache_free_debug(umem_cache_t *cp, void *buf)
1466 umem_buftag_t *btp = UMEM_BUFTAG(cp, buf);
1472 umem_error(UMERR_DUPFREE, cp, buf);
1475 sp = umem_findslab(cp, buf);
1477 umem_error(UMERR_BADADDR, cp, buf);
1479 umem_error(UMERR_REDZONE, cp, buf);
1485 if ((cp->cache_flags & UMF_HASH) && bcp->bc_addr != buf) {
1486 umem_error(UMERR_BADBUFCTL, cp, buf);
1491 umem_error(UMERR_REDZONE, cp, buf);
1498 buf, cp->cache_contents);
1503 cp->cache_destructor(buf, cp->cache_private);
1506 copy_pattern(UMEM_FREE_PATTERN, buf, cp->cache_verify);
1522 void *buf = mp->mag_round[round];
1525 verify_pattern(UMEM_FREE_PATTERN, buf,
1527 umem_error(UMERR_MODIFIED, cp, buf);
1533 cp->cache_destructor(buf, cp->cache_private);
1535 umem_slab_free(cp, buf);
1643 void *buf;
1655 buf = ccp->cc_loaded->mag_round[--ccp->cc_rounds];
1659 umem_cache_alloc_debug(cp, buf, umflag) == -1) {
1666 return (buf);
1708 buf = umem_slab_alloc(cp, umflag);
1710 if (buf == NULL) {
1724 if (umem_cache_alloc_debug(cp, buf, umflag) == -1) {
1730 return (buf);
1739 cp->cache_constructor(buf, cp->cache_private, flags_nfatal) != 0) {
1741 umem_slab_free(cp, buf);
1749 return (buf);
1757 _umem_cache_free(umem_cache_t *cp, void *buf)
1764 if (umem_cache_free_debug(cp, buf) == -1)
1774 ccp->cc_loaded->mag_round[ccp->cc_rounds++] = buf;
1855 cp->cache_destructor(buf, cp->cache_private);
1857 umem_slab_free(cp, buf);
1865 void *buf;
1870 buf = _umem_cache_alloc(cp, umflag);
1871 if (buf != NULL) {
1873 umem_buftag_t *btp = UMEM_BUFTAG(cp, buf);
1874 ((uint8_t *)buf)[size] = UMEM_REDZONE_BYTE;
1877 bzero(buf, size);
1881 buf = _umem_alloc(size, umflag); /* handles failure */
1882 if (buf != NULL)
1883 bzero(buf, size);
1885 return (buf);
1893 void *buf;
1897 buf = _umem_cache_alloc(cp, umflag);
1898 if ((cp->cache_flags & UMF_BUFTAG) && buf != NULL) {
1899 umem_buftag_t *btp = UMEM_BUFTAG(cp, buf);
1900 ((uint8_t *)buf)[size] = UMEM_REDZONE_BYTE;
1903 if (buf == NULL && umem_alloc_retry(cp, umflag))
1905 return (buf);
1915 buf = vmem_alloc(umem_oversize_arena, size, UMEM_VMFLAGS(umflag));
1916 if (buf == NULL) {
1921 return (buf);
1928 void *buf;
1944 buf = vmem_xalloc(umem_memalign_arena, size, align, 0, 0, NULL, NULL,
1946 if (buf == NULL) {
1951 return (buf);
1956 _umem_free(void *buf, size_t size)
1963 umem_buftag_t *btp = UMEM_BUFTAG(cp, buf);
1966 if (*(uint64_t *)buf == UMEM_FREE_PATTERN) {
1967 umem_error(UMERR_DUPFREE, cp, buf);
1972 umem_error(UMERR_BADSIZE, cp, buf);
1974 umem_error(UMERR_REDZONE, cp, buf);
1978 if (((uint8_t *)buf)[size] != UMEM_REDZONE_BYTE) {
1979 umem_error(UMERR_REDZONE, cp, buf);
1984 _umem_cache_free(cp, buf);
1986 if (buf == NULL && size == 0)
1988 vmem_free(umem_oversize_arena, buf, size);
1994 _umem_free_align(void *buf, size_t size)
1996 if (buf == NULL && size == 0)
1998 vmem_xfree(umem_memalign_arena, buf, size);