asm-fake.cpp revision 18c326c55a02b14ef381355b52a03b94dd36ea70
/* $Id$ */
/** @file
* IPRT - Fake asm.h routines for use early in a new port.
*/
/*
* Copyright (C) 2010 Oracle Corporation
*
* This file is part of VirtualBox Open Source Edition (OSE), as
* available from http://www.virtualbox.org. This file is free software;
* General Public License (GPL) as published by the Free Software
* Foundation, in version 2 as it comes in the "COPYING" file of the
* VirtualBox OSE distribution. VirtualBox OSE is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
*
* The contents of this file may alternatively be used under the terms
* of the Common Development and Distribution License Version 1.0
* (CDDL) only, as it comes in the "COPYING.CDDL" file of the
* VirtualBox OSE distribution, in which case the provisions of the
* CDDL are applicable instead of those of the GPL.
*
* You may elect to license modified versions of this file under the
* terms and conditions of either the GPL or the CDDL or both.
*/
/*******************************************************************************
* Header Files *
*******************************************************************************/
{
return u8Ret;
}
{
return u16Ret;
}
{
return u32Ret;
}
{
return u64Ret;
}
{
{
return true;
}
return false;
}
RTDECL(bool) ASMAtomicCmpXchgU32(volatile uint32_t *pu32, const uint32_t u32New, const uint32_t u32Old)
{
{
return true;
}
return false;
}
RTDECL(bool) ASMAtomicCmpXchgU64(volatile uint64_t *pu64, const uint64_t u64New, const uint64_t u64Old)
{
{
return true;
}
return false;
}
RTDECL(bool) ASMAtomicCmpXchgExU32(volatile uint32_t *pu32, const uint32_t u32New, const uint32_t u32Old, uint32_t *pu32Old)
{
{
return true;
}
return false;
}
RTDECL(bool) ASMAtomicCmpXchgExU64(volatile uint64_t *pu64, const uint64_t u64New, const uint64_t u64Old, uint64_t *pu64Old)
{
{
return true;
}
return false;
}
{
return u32Old;
}
{
return *pu32 += 1;
}
{
return *pu32 -= 1;
}
{
}
{
}
RTDECL(void) ASMSerializeInstruction(void)
{
}
{
return *pu64;
}
{
return *pu64;
}
{
while (cbLeft-- > 0)
*puPtr++ = 0;
}
{
while (cbLeft-- > 0)
*pu32++ = 0;
}
{
while (cb > 0)
{
pu32++;
}
}
{
}
#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
RTDECL(void) ASMNopPause(void)
{
}
#endif
{
}
{
}
{
}
{
}
{
}
{
}
{
return true;
return false;
}
{
}
{
return false;
return true;
}
{
}
{
return fRet;
}
{
}
{
}
{
{
if (u32 != UINT32_MAX)
{
while (u32 & 1)
{
u32 >>= 1;
iBit++;
}
return -1;
return iBit;
}
iBit += 32;
pu32++;
}
return -1;
}
{
if (iBit)
{
/*
* Inspect the 32-bit word containing the unaligned bit.
*/
if (u32)
{
iBit = 0;
while (!(u32 & 1))
{
u32 >>= 1;
iBit++;
}
}
/*
* Skip ahead and see if there is anything left to search.
*/
iBitPrev |= 31;
iBitPrev++;
return -1;
}
/*
* 32-bit aligned search, let ASMBitFirstClear do the dirty work.
*/
if (iBit >= 0)
return iBit;
}
{
{
if (u32 != 0)
{
while (!(u32 & 1))
{
u32 >>= 1;
iBit++;
}
return -1;
return iBit;
}
iBit += 32;
pu32++;
}
return -1;
}
{
if (iBit)
{
/*
* Inspect the 32-bit word containing the unaligned bit.
*/
if (u32)
{
iBit = 0;
while (!(u32 & 1))
{
u32 >>= 1;
iBit++;
}
}
/*
* Skip ahead and see if there is anything left to search.
*/
iBitPrev |= 31;
iBitPrev++;
return -1;
}
/*
* 32-bit aligned search, let ASMBitFirstSet do the dirty work.
*/
if (iBit >= 0)
return iBit;
}
{
return iBit + 1;
return 0;
}
{
while (iBit-- > 0)
return iBit + 1;
return 0;
}
{
}
{
}