#ifndef __BUILTINS_LOADED #define __BUILTINS_LOADED 1 /**************************************************************************** ** ** - Prototypes for platform specific builtins ** ***************************************************************************** ** Header is nonstandard ***************************************************************************** ** ** Copyright 2008 Hewlett-Packard Development Company, L.P. ** ** Confidential computer software. Valid license from HP and/or ** its subsidiaries required for possession, use, or copying. ** ** Consistent with FAR 12.211 and 12.212, Commercial Computer Software, ** Computer Software Documentation, and Technical Data for Commercial ** Items are licensed to the U.S. Government under vendor's standard ** commercial license. ** ** Neither HP nor any of its subsidiaries shall be liable for technical ** or editorial errors or omissions contained herein. The information ** in this document is provided "as is" without warranty of any kind and ** is subject to change without notice. The warranties for HP products ** are set forth in the express limited warranty statements accompanying ** such products. Nothing herein should be construed as constituting an ** additional warranty. ** ***************************************************************************** */ #pragma __nostandard #include #ifdef __cplusplus extern "C" { #endif #if __INITIAL_POINTER_SIZE # pragma __pointer_size __save # pragma __pointer_size 32 #endif /* ** This header file is divided into the following four sections: ** ** OpenVMS IA64 - Just those builtins unique to ia64 ** OpenVMS AXP and ia64 (some produce diagnostics on ia64 as marked) ** OpenVMS AXP and ia64 with __X_FLOAT ** OpenVMS VAX DEC C Only (No DEC C++) */ /************************************************************************/ #if defined(__ia64) && defined(__VMS) /************************************************************************/ /* ** Parameters that must be specified as compile-time integer constants. ** Although this typedef does not actually behave differently from ** "plain int" for a parameter, it suggests the intent - the builtin ** functions themselves will enforce the use of a compile-time ** constant for parameters that require it. */ typedef const int __Integer_Constant; /* ** Selected OS Related Itanium Built-ins defined by Intel's */ /* ** The __whichReg and __whichIndReg parameters are values defined ** below. */ /** *** Copyright (C) 1985-2001 Intel Corporation. All rights reserved. *** *** The information and source code contained herein is the exclusive *** property of Intel Corporation and may not be disclosed, examined *** or reproduced in whole or in part without explicit written authorization *** from the company. *** **/ #ifndef _IA64REGS_H_INCLUDED #define _IA64REGS_H_INCLUDED /* */ /* Register Names for getReg() and setReg() */ /* */ /* Special Registers */ #define _IA64_REG_IP 1016 /* getReg only */ #define _IA64_REG_PSR_UM 1017 #define _IA64_REG_PSR 1019 #define _IA64_REG_PSR_L 1019 /* General Integer Registers */ #define _IA64_REG_GP 1025 /* R1 */ #define _IA64_REG_SP 1036 /* R12 */ #define _IA64_REG_TP 1037 /* R13 */ /* Application Registers */ #define _IA64_REG_AR_KR0 3072 #define _IA64_REG_AR_KR1 3073 #define _IA64_REG_AR_KR2 3074 #define _IA64_REG_AR_KR3 3075 #define _IA64_REG_AR_KR4 3076 #define _IA64_REG_AR_KR5 3077 #define _IA64_REG_AR_KR6 3078 #define _IA64_REG_AR_KR7 3079 #define _IA64_REG_AR_RSC 3088 #define _IA64_REG_AR_BSP 3089 #define _IA64_REG_AR_BSPSTORE 3090 #define _IA64_REG_AR_RNAT 3091 #define _IA64_REG_AR_FCR 3093 #define _IA64_REG_AR_EFLAG 3096 #define _IA64_REG_AR_CSD 3097 #define _IA64_REG_AR_SSD 3098 #define _IA64_REG_AR_CFLAG 3099 #define _IA64_REG_AR_FSR 3100 #define _IA64_REG_AR_FIR 3101 #define _IA64_REG_AR_FDR 3102 #define _IA64_REG_AR_CCV 3104 #define _IA64_REG_AR_UNAT 3108 #define _IA64_REG_AR_FPSR 3112 #define _IA64_REG_AR_ITC 3116 #define _IA64_REG_AR_RUC 3117 #define _IA64_REG_AR_PFS 3136 #define _IA64_REG_AR_LC 3137 #define _IA64_REG_AR_EC 3138 /* Control Registers */ #define _IA64_REG_CR_DCR 4096 #define _IA64_REG_CR_ITM 4097 #define _IA64_REG_CR_IVA 4098 #define _IA64_REG_CR_PTA 4104 #define _IA64_REG_CR_IPSR 4112 #define _IA64_REG_CR_ISR 4113 #define _IA64_REG_CR_IIP 4115 #define _IA64_REG_CR_IFA 4116 #define _IA64_REG_CR_ITIR 4117 #define _IA64_REG_CR_IIPA 4118 #define _IA64_REG_CR_IFS 4119 #define _IA64_REG_CR_IIM 4120 #define _IA64_REG_CR_IHA 4121 #define _IA64_REG_CR_IIB0 4122 #define _IA64_REG_CR_IIB1 4123 #define _IA64_REG_CR_LID 4160 #define _IA64_REG_CR_IVR 4161 /* getReg only */ #define _IA64_REG_CR_TPR 4162 #define _IA64_REG_CR_EOI 4163 #define _IA64_REG_CR_IRR0 4164 /* getReg only */ #define _IA64_REG_CR_IRR1 4165 /* getReg only */ #define _IA64_REG_CR_IRR2 4166 /* getReg only */ #define _IA64_REG_CR_IRR3 4167 /* getReg only */ #define _IA64_REG_CR_ITV 4168 #define _IA64_REG_CR_PMV 4169 #define _IA64_REG_CR_CMCV 4170 #define _IA64_REG_CR_LRR0 4176 #define _IA64_REG_CR_LRR1 4177 /* Indirect Registers for getIndReg() and setIndReg() */ #define _IA64_REG_INDR_CPUID 9000 /* getIndReg only */ #define _IA64_REG_INDR_DBR 9001 #define _IA64_REG_INDR_IBR 9002 #define _IA64_REG_INDR_PKR 9003 #define _IA64_REG_INDR_PMC 9004 #define _IA64_REG_INDR_PMD 9005 #define _IA64_REG_INDR_RR 9006 #endif /* _IA64REGS_H_INCLUDED */ unsigned __int64 __getReg(__Integer_Constant __whichReg); void __setReg(__Integer_Constant __whichReg, unsigned __int64 __value); unsigned __int64 __getIndReg(__Integer_Constant __whichIndReg, __int64 __index); void __setIndReg(__Integer_Constant __whichIndReg, __int64 __index, unsigned __int64 __value); void __break(__Integer_Constant __break_arg); /* Native ia64, arg as-is */ void __dsrlz(void); void __fc(__int64 __address); void __fci(__int64 __address); void __fwb(void); void __invalat(void); void __invala(void); /* alternate spelling of __invalat */ void __isrlz(void); void __itcd(__int64 __address); void __itci(__int64 __address); /* ** The __whichTransReg param selects an Address Translation Register, ** GEM_TS_REG_K_AR[0..127] and is an integer from 0-127 like ** whichFloatReg (GEM_TS_REG_K_F[0..127]) and whichGeneralReg ** (GEM_TS_REG_K_R[0..127]). ** ** GEM engineering says that the number of Address Translation Registers is ** processor dependent - a small integer <= 31 would probably work ** on most any processor. */ void __itrd(__int64 __whichTransReg, __int64 __address); void __itri(__int64 __whichTransReg, __int64 __address); void __ptce(__int64 __address); void __ptcl(__int64 __address, __int64 __pagesz); void __ptcg(__int64 __address, __int64 __pagesz); void __ptcga(__int64 __address, __int64 __pagesz); void __ptri(__int64 __address, __int64 __pagesz); void __ptrd(__int64 __address, __int64 __pagesz); void __rsm(__Integer_Constant __mask); void __rum(__Integer_Constant __mask); void __ssm(__Integer_Constant __mask); void __sum(__Integer_Constant __mask); void __synci(void); __int64 /*address*/ __thash(__int64 __address); __int64 /*address*/ __ttag(__int64 __address); /* GEM IA64 built-ins */ void __break2(__Integer_Constant __break_code, unsigned __int64 __r17_value); void __flushrs(void); void __loadrs(void); int __prober(__int64 __address, unsigned int __mode); int __probew(__int64 __address, unsigned int __mode); unsigned int __tak(__int64 __address); __int64 /*address*/ __tpa(__int64 __address); /************************************************************************/ #endif /* __ia64 && __VMS */ /************************************************************************/ /* ** Certain OpenVMS header files expect the __PAL builtins to accept ** 64-bit pointers regardless of whether the /POINTER_SIZE is used ** or not. To allow this, we will define a typedef to be used with ** those prototypes. */ #if defined __ALPHA || defined __ia64 # ifndef ___VOID__PTR64 # define ___VOID__PTR64 1 # ifdef __INITIAL_POINTER_SIZE # if __INITIAL_POINTER_SIZE # pragma __pointer_size __save # pragma __pointer_size 64 # else # pragma __required_pointer_size __save # pragma __required_pointer_size 64 # endif typedef void * ___void__ptr64; typedef const void * __const_void__ptr64; # if __INITIAL_POINTER_SIZE # pragma __pointer_size __restore # else # pragma __required_pointer_size __restore # endif # else typedef unsigned __int64 ___void__ptr64; # endif # endif #endif #if defined(__VMS) /*********************************************************************** ** typedefs for return values of built-ins common for VAX and Alpha ** ***********************************************************************/ typedef enum {_bbcci_oldval_1, _bbcci_oldval_0} _BBCCI_STATUS; typedef enum {_bbssi_oldval_0, _bbssi_oldval_1} _BBSSI_STATUS; typedef enum {_insqi_inserted_many, _insqi_not_inserted, _insqi_inserted_only} _INSQI_STATUS; typedef enum {_insque_inserted_only, _insque_inserted_many} _INSQUE_STATUS; typedef enum {_probe_not_accessible, _probe_accessible} _PROBE_STATUS; typedef enum {_remqi_removed_more, _remqi_not_removed, _remqi_removed_empty, _remqi_empty} _REMQI_STATUS; typedef enum {_remque_removed_more, _remque_removed_empty, _remque_empty} _REMQUE_STATUS; #endif /* defined(__VMS) */ /************************************************************************/ #if !defined(__VAX) && defined(__VMS) /************************************************************************/ /* ** The following builtins were added in DEC C V5.2 */ #if (__DECC_VER >= 50200000) || (__DECCXX_VER >= 50200000) /************************************************************ ** DEC C builtins for atomic/interlocked operations ************************************************************* ** ** These functions are not easily/efficiently implemented using inline ** assembly code, and so the compiler has builtin knowledge of their names ** and signatures in order to generate fast and reliable code (e.g. it may ** generate loops with tests arranged for branch prediction using ** out-of-line branch targets for failure paths). ** ** The _RETRY variants store a non-zero status if the operation completes ** successfully within the specified number of retries. The variants ** without _RETRY do not return until they succeed. ** Note: On ia64, all of the _RETRY variants other than __LOCK_LONG_RETRY ** and __ACQUIRE_SEM_LONG_RETRY evaluate the retry count expression but ** do not otherwise use it as the ia64 instructions underlying them ** cannot complete until they have succeeded. ** ** The following operations do not in themselves generate Memory Barriers. ** The user is expected to code explicitly any Memory Barriers where ** needed using inline assembly code, e.g. asm("mb"), or asm("wmb"). ** ** Many of these builtins perform essentially the same operations as other ** builtins having similar names but with the ATOMIC or INTERLOCKED ** appearing in a different position. The following group with the ATOMIC ** or INTERLOCKED first offer several usability improvements over the ** older versions with the ATOMIC or INTERLOCKED appearing later in the ** name. The older versions are retained for compatibility only. ** ** Usability problems with the older builtins that are addressed by this ** newer set introduced in DEC C V5.2: ** ** Old versions did not provide the pre-updated value, and ** provided status even with infinite retry, in which case the builtin ** does not return until it succeeds. New versions provide the ** old value, and only provide status when the limited-retry variant is ** used. ** ** Older versions generated memory barriers both before and after the ** update. Newer versions allow the user to control memory barrier ** placement for the low-level primitives. ** ** Older versions did not provide for efficient spinlock or counted ** semaphore implementation. There are new higher-level spinlock and ** counted semaphore operations that include a test of the stored value ** to control success, and generate memory barriers just after a lock ** or resource is acquired, and just before it is release. */ /* ** The following set of functions may pass long pointers. */ # if __INITIAL_POINTER_SIZE # pragma __pointer_size 64 # endif /* ** Atomic update of location with integer operation, returning previous ** contents. ** Note: _RETRY variants produce a warning on ia64, and always succeed. */ int __ATOMIC_ADD_LONG (volatile void *__addr, int __expr); int __ATOMIC_ADD_LONG_RETRY (volatile void *__addr, int __expr, int __retry, int *__sts); __int64 __ATOMIC_ADD_QUAD (volatile void *__addr, __int64 __expr); __int64 __ATOMIC_ADD_QUAD_RETRY (volatile void *__addr, __int64 __expr, int __retry, int *__sts); int __ATOMIC_AND_LONG (volatile void *__addr, int __expr); int __ATOMIC_AND_LONG_RETRY (volatile void *__addr, int __expr, int __retry, int *__sts); __int64 __ATOMIC_AND_QUAD (volatile void *__addr, __int64 __expr); __int64 __ATOMIC_AND_QUAD_RETRY (volatile void *__addr, __int64 __expr, int __retry, int *__sts); int __ATOMIC_OR_LONG (volatile void *__addr, int __expr); int __ATOMIC_OR_LONG_RETRY (volatile void *__addr, int __expr, int __retry, int *__sts); __int64 __ATOMIC_OR_QUAD (volatile void *__addr, __int64 __expr); __int64 __ATOMIC_OR_QUAD_RETRY (volatile void *__addr, __int64 __expr, int __retry, int *__sts); /* ** Just like __ATOMIC_ADD, but using +1 or -1 implicitly. */ int __ATOMIC_INCREMENT_LONG (volatile void *__addr); int __ATOMIC_INCREMENT_LONG_RETRY (volatile void *__addr, int __retry, int *__sts); __int64 __ATOMIC_INCREMENT_QUAD (volatile void *__addr); __int64 __ATOMIC_INCREMENT_QUAD_RETRY (volatile void *__addr, int __retry, int *__sts); int __ATOMIC_DECREMENT_LONG (volatile void *__addr); int __ATOMIC_DECREMENT_LONG_RETRY (volatile void *__addr, int __retry, int *__sts); __int64 __ATOMIC_DECREMENT_QUAD (volatile void *__addr); __int64 __ATOMIC_DECREMENT_QUAD_RETRY (volatile void *__addr, int __retry, int *__sts); /* ** Atomic replacement of location's contents, returning previous contents. */ int __ATOMIC_EXCH_LONG (volatile void *__addr, int __expr); int __ATOMIC_EXCH_LONG_RETRY (volatile void *__addr, int __expr, int __retry, int *__sts); __int64 __ATOMIC_EXCH_QUAD (volatile void *__addr, __int64 __expr); __int64 __ATOMIC_EXCH_QUAD_RETRY (volatile void *__addr, __int64 __expr, int __retry, int *__sts); /* ** Interlocked "test for bit clear and then clear". Returns non-zero if ** bit was already clear. */ int __INTERLOCKED_TESTBITCC_QUAD (volatile void *__addr, int __bitpos); int __INTERLOCKED_TESTBITCC_QUAD_RETRY (volatile void *__addr, int __bitpos, int __retry, int *__sts); /* ** Interlocked "test for bit set and then set". Returns non-zero if bit ** was already set. */ int __INTERLOCKED_TESTBITSS_QUAD (volatile void *__addr, int __bitpos); int __INTERLOCKED_TESTBITSS_QUAD_RETRY (volatile void *__addr, int __bitpos, int __retry, int *__sts); /* ** Acquire/release binary spinlock based on low-order bit of a longword. ** NOTE: Memory barrier generated after lock, before unlock. _RETRY ** variant returns non-zero on success within retry attempts. ** Note: On ia64, the _RETRY forms of__LOCK_LONG and __ACQUIRE_SEM_LONG ** are fully supported. */ void __LOCK_LONG (volatile void *__addr); int __LOCK_LONG_RETRY (volatile void *__addr, int __retry); void __UNLOCK_LONG (volatile void *__addr); /* ** Acquire/release counted semaphore based on positive value of longword ** indicating number of resources available. ** ** NOTE: Memory barrier generated after acquisition, before release. ** _RETRY variant returns non-zero on success within retry attempts. */ void __ACQUIRE_SEM_LONG (volatile void *__addr); int __ACQUIRE_SEM_LONG_RETRY (volatile void *__addr, int __retry); void __RELEASE_SEM_LONG (volatile void *__addr); /* ** Done with the set of functions which may pass long pointers. */ # if __INITIAL_POINTER_SIZE # pragma __pointer_size 32 # endif #endif /* (__DECC_VER >= 50200000) || (__DECCXX_VER >= 50200000) */ /* ** xxxQUE Mapping Tables ** These tables map the _PAL returned values into VAX C expected values. */ #define __xxxQUE_MAP_ALPHA_TO_VAX(z) ((0x12 >> (z)) & 3) #define __REMQxI_MAP_ALPHA_TO_VAX(z) (((0x0c >> (z)) + 1) & 3) /* ** All longword queue operations take 32-bit pointers. */ #ifdef __INITIAL_POINTER_SIZE # pragma __required_pointer_size __save # pragma __required_pointer_size 32 #endif /* ** All 24 queue manipulation "PAL calls" are supported on both Alpha ** and ia64 platforms, using the same names and interfaces shown here. ** It just happens that the code generated is a PAL call on Alpha and ** a call to a VMS system service on ia64. */ /* ** Insertion into longword queues */ int __PAL_INSQHIL (void *__head, void *__new); /* At head, interlocked */ int __PAL_INSQTIL (void *__head, void *__new); /* At tail, interlocked */ int __PAL_INSQUEL (void *__pred, void *__new); /* At pred, interlocked */ int __PAL_INSQUEL_D (void **__pred, void *__new); /* At pred, indirectly */ int __PAL_INSQHILR (void *__head, void *__new); /* At head, interlocked */ int __PAL_INSQTILR (void *__head, void *__new); /* At tail, interlocked */ /* ** Removal from from longword queues */ int __PAL_REMQHIL (void *__head, void **__rem); /* At head, interlocked */ int __PAL_REMQTIL (void *__head, void **__rem); /* At tail, interlocked */ int __PAL_REMQUEL (void *__entr, void **__rem); /* At entr, interlocked */ int __PAL_REMQUEL_D (void **__entr, void **__rem); /* At entr, indirectly */ int __PAL_REMQHILR (void *__head, void **__rem); /* At head, interlocked */ int __PAL_REMQTILR (void *__head, void **__rem); /* At tail, interlocked */ /* ** All quadword queue operations take 64-bit pointers. */ #ifdef __INITIAL_POINTER_SIZE # pragma __required_pointer_size 64 #endif /* ** Insertion into quadword queues */ int __PAL_INSQHIQ (void *__head, void *__new); /* At head, interlocked */ int __PAL_INSQTIQ (void *__head, void *__new); /* At tail, interlocked */ int __PAL_INSQUEQ (void *__pred, void *__new); /* At pred, interlocked */ int __PAL_INSQUEQ_D (void **__pred, void *__new); /* At pred, indirectly */ int __PAL_INSQHIQR (void *__head, void *__new); /* At head, interlocked */ int __PAL_INSQTIQR (void *__head, void *__new); /* At tail, interlocked */ /* ** Removal from quadword queues */ int __PAL_REMQHIQ (void *__head, void **__rem); /* At head, interlocked */ int __PAL_REMQTIQ (void *__head, void **__rem); /* At tail, interlocked */ int __PAL_REMQUEQ (void *__entr, void **__rem); /* At entr, interlocked */ int __PAL_REMQUEQ_D (void **__entr, void **__rem); /* At entr, indirectly */ int __PAL_REMQHIQR (void *__head, void **__rem); /* At head, interlocked */ int __PAL_REMQTIQR (void *__head, void **__rem); /* At tail, interlocked */ /* ** Restore the original required_pointer_size */ #ifdef __INITIAL_POINTER_SIZE # pragma __required_pointer_size __restore #endif /* ** The following set of functions may pass long pointers. */ #if __INITIAL_POINTER_SIZE #pragma __pointer_size 64 #endif /* ** Move from Processor Register ** Note: None of these MFPR PAL calls are implemented in the ia64 compiler. ** Some may be overridden in */ unsigned int __PAL_MFPR_ASTEN (void); /* AST Enable */ unsigned int __PAL_MFPR_ASTSR (void); /* AST Summary Register */ void * __PAL_MFPR_ESP (void); /* Executive Stack Pointer */ int __PAL_MFPR_FEN (void); /* Floating Point Enable */ int __PAL_MFPR_IPL (void); /* Interrupt Priority Level */ __int64 __PAL_MFPR_MCES (void); /* Machine Check Error Summary */ void * __PAL_MFPR_PCBB (void); /* Priv Context Block Base */ __int64 __PAL_MFPR_PRBR (void); /* Processor Base Register */ int __PAL_MFPR_PTBR (void); /* Page Table Base Register */ void * __PAL_MFPR_SCBB (void); /* System Control Block Base */ unsigned int __PAL_MFPR_SISR (void); /* Software Int Summ Register */ void * __PAL_MFPR_SSP (void); /* Supervisor Stack Pointer */ __int64 __PAL_MFPR_TBCHK (void *__addr); /* Translation Buffer Check */ void * __PAL_MFPR_USP (void); /* User Stack Pointer */ void * __PAL_MFPR_VPTB (void); /* Virtual Page Table */ __int64 __PAL_MFPR_WHAMI (void); /* Who Am I */ /* ** Move to Processor Register ** Note: None of these MTPR PAL calls are implemented in the ia64 compiler. ** Some may be overridden in */ void __PAL_MTPR_ASTEN (unsigned int __mask); /* AST Enable */ void __PAL_MTPR_ASTSR (unsigned int __mask); /* AST Summary Register */ void __PAL_MTPR_DATFX (int __value); /* Data Alignment Trap Fixup */ void __PAL_MTPR_ESP (void *__addr); /* Executive Stack Pointer */ void __PAL_MTPR_FEN (int __value); /* Floating Point Enable */ void __PAL_MTPR_IPIR (__int64 __number); /* Interprocessor Inter Req */ int __PAL_MTPR_IPL (int __value); /* Interrupt Priority Level */ void __PAL_MTPR_MCES (__int64 __value); /* Machine Check Error Summary */ void __PAL_MTPR_PRBR (__int64 __value); /* Processor Base Register */ void __PAL_MTPR_SCBB (void *__addr); /* System Control Block Base */ void __PAL_MTPR_SIRR (int __level); /* Software Inter Req Register */ void __PAL_MTPR_SSP (int *__addr); /* Supervisor Stack Pointer */ void __PAL_MTPR_TBIA (void); /* User Stack Pointer */ void __PAL_MTPR_TBIAP (void); /* T Buffer Inval All Process */ void __PAL_MTPR_TBIS (___void__ptr64 __addr); /* T Buffer Inval Single */ void __PAL_MTPR_TBISD (___void__ptr64 __addr); /* T Buffer Inval Single Data */ void __PAL_MTPR_TBISI (___void__ptr64 __addr); /* T Buffer Inval Single Instr */ void __PAL_MTPR_USP (void *__addr); /* User Stack Pointer */ void __PAL_MTPR_VPTB (void *__addr); /* Virtual Page Table */ /* ** Probe Read/Write Accessibility ** Note: Not a builtin in ia64 compiler, to be overridden in ** See also ia64-specific __prober and __probew builtins. */ int __PAL_PROBER(const void *__base_address, int __offset, char __mode); int __PAL_PROBEW(const void *__base_address, int __offset, char __mode); /* ** Change Mode ** Note: Not a builtin in ia64 compiler, may be overridden in */ void __PAL_CHME(void); /* Executive */ void __PAL_CHMK(void); /* Kernel */ void __PAL_CHMS(void); /* Supervisor */ void __PAL_CHMU(void); /* User */ /* ** Load/Store Quadword Physical ** Note: Not a builtin in ia64 compiler, to be overridden in */ unsigned __int64 __PAL_LDQP(void *__addr); /* Load */ void __PAL_STQP(void *__addr, unsigned __int64 __value); /* Store */ /* ** Done with the set of functions which may pass long pointers. */ #if __INITIAL_POINTER_SIZE #pragma __pointer_size 32 #endif /* ** Cache Flush ** Note: Not a builtin in ia64 compiler, to be overridden in */ void __PAL_CFLUSH(int __value); /* ** Drain Aborts ** Note: Not on ia64 */ void __PAL_DRAINA(void); /* ** Read Processor Status ** Note: Not a builtin in ia64 compiler, to be overridden in */ unsigned __int64 __PAL_RD_PS(void); /* ** Swap AST Enable ** Note: Not a builtin in ia64 compiler, to be overridden in */ unsigned int __PAL_SWASTEN(int __new_state_mask); /* ** Write Processor Status Software Field ** Note: Not a builtin in ia64 compiler, to be overridden in */ void __PAL_WR_PS_SW(int __mask); /* ** Convert from G-Floating to Quadword ** Note: Not on ia64 */ __int64 __CVTGQ(double __operand1); /* ** Convert from G-Floating to F-Floating Chopped ** Note: Not on ia64 */ float __CVTGF_C(double __operand1); /* ** Add Floating Point Chopped ** Note: Not on ia64 */ float __ADDF_C(float __operand1, float __operand2); double __ADDG_C(double __operand1, double __operand2); /* ** Subtract Floating Point Chopped ** Note: Not on ia64 */ float __SUBF_C(float __operand1, float __operand2); double __SUBG_C(double __operand1, double __operand2); /* ** Multiply Floating Point Chopped ** Note: Not on ia64 */ float __MULF_C(float __operand1, float __operand2); double __MULG_C(double __operand1, double __operand2); /* ** Divide Floating Point Chopped ** Note: Not on ia64 */ float __DIVF_C(float __operand1, float __operand2); double __DIVG_C(double __operand1, double __operand2); /* ** Macros for translation from VAX C to DEC C ALPHA builtins */ #define _BBCCI(position, address) __TESTBITCCI((address), (position)) #define _BBSSI(position, address) __TESTBITSSI((address), (position)) #define _INSQHI(new_entry, head) \ ((0x12 >> (__PAL_INSQHIL((head), (new_entry))+2)) & 3) #define _INSQTI(new_entry, head) \ ((0x12 >> (__PAL_INSQTIL((head), (new_entry))+2)) & 3) #define _INSQUE(new_entry, predecessor) \ ((0x12 >> (__PAL_INSQUEL((predecessor), (new_entry))+1)) & 3) #define _REMQHI(head, removed_entry) \ (((0x0c >> (__PAL_REMQHIL((head), (void **)(removed_entry))+1))+1) & 3) #define _REMQTI(head, removed_entry) \ (((0x0c >> (__PAL_REMQTIL((head), (void **)(removed_entry))+1))+1) & 3) #define _REMQUE(entry, removed_entry) \ ((0x12 >> (__PAL_REMQUEL((entry), (void **)(removed_entry))+1)) & 3) #define _PROBER(mode, offset, address) __PAL_PROBER((address), (offset), (mode)) #define _PROBEW(mode, offset, address) __PAL_PROBEW((address), (offset), (mode)) /* ** __ALLOCA builtin - allocate n-bytes from the stack */ void * __ALLOCA(unsigned int __x); /* ** The remaining functions can handle accepting long pointers. */ #if __INITIAL_POINTER_SIZE #pragma __pointer_size 64 #endif /* ** UMULH Builtin - Unsigned Quadword Multiply High */ unsigned __int64 __UMULH(unsigned __int64 __oper1, unsigned __int64 __oper2); /* ** op_ATOMIC_size Builtins ** ** Note: There is one optional retry count parameter ** Note: Retry form not on ia64 */ int __ADD_ATOMIC_LONG(void *__addr, int __expr, ...); int __ADD_ATOMIC_QUAD(void *__addr, int __expr, ...); int __AND_ATOMIC_LONG(void *__addr, int __expr, ...); int __AND_ATOMIC_QUAD(void *__addr, int __expr, ...); int __OR_ATOMIC_LONG(void *__addr, int __expr, ...); int __OR_ATOMIC_QUAD(void *__addr, int __expr, ...); /* ** TESTBITxxI ** ** Note: There is one optional retry count parameter ** Note: Retry form not on ia64 */ int __TESTBITCCI(void *__addr, int __position, ...); int __TESTBITSSI(void *__addr, int __position, ...); /* ** Add Aligned Word Interlocked */ int __ADAWI(short __src, volatile short *__dest); /* ** Trap Barrier Instruction ** Note: Not on ia64 */ void __TRAPB(void); /* ** Read Cycle Counter ** Note: Not a builtin in ia64 compiler, to be overridden in */ unsigned __int64 __RPCC(void); /* ** Halt the Processor. (Privileged) ** Note: Not a builtin in ia64 compiler, to be overridden in */ void _HALT(void); void __PAL_HALT(void); /* ** Generate Trap ** Note: Not a builtin in ia64 compiler, to be overridden in ** Note: argument must be compile-time constant. */ void __PAL_GENTRAP(unsigned __int64 __encoded_software_trap); /* ** Breakpoint ** Note: Not a builtin in ia64 compiler, to be overridden in */ void __PAL_BPT(void); /* ** Bugcheck ** Note: Not a builtin in ia64 compiler, to be overridden in */ #if defined __ia64 || (__DECC_VER >= 70130074) || (__DECCXX_VER >= 70130074) void __PAL_BUGCHK(unsigned __int64 code); #else void __PAL_BUGCHK(void); #endif /* ** Swap Privileged Context ** Note: Not on ia64 */ void __PAL_SWPCTX(void *__addr); /* ** Copy Sign ** Note: Not on ia64 */ float __CPYSF (float __operand1, float __operand2); double __CPYS (double __operand1, double __operand2); /* ** Copy Sign Negate ** Note: Not on ia64 */ float __CPYSNF(float __operand1, float __operand2); double __CPYSN (double __operand1, double __operand2); /* ** Copy Sign Exponent ** Note: Not on ia64 */ float __CPYSEF(float __operand1, float __operand2); double __CPYSE (double __operand1, double __operand2); /* ** Convert from T-Floating to Quadword ** Note: Not on ia64 */ __int64 __CVTTQ(double __operand1); /* ** Convert from T-Floating to S-Floating Chopped ** Note: Not on ia64 */ float __CVTTS_C(double __operand1); /* ** Add Floating Point Chopped ** Note: Not on ia64 */ float __ADDS_C(float __operand1, float __operand2); double __ADDT_C(double __operand1, double __operand2); /* ** Subtract Floating Point Chopped ** Note: Not on ia64 */ float __SUBS_C(float __operand1, float __operand2); double __SUBT_C(double __operand1, double __operand2); /* ** Multiply Floating Point Chopped ** Note: Not on ia64 */ float __MULS_C(float __operand1, float __operand2); double __MULT_C(double __operand1, double __operand2); /* ** Divide Floating Point Chopped ** Note: Not on ia64 */ float __DIVS_C(float __operand1, float __operand2); double __DIVT_C(double __operand1, double __operand2); /* ** Memory Barrier */ void __MB(void); /* ** Instruction Memory Barrier ** Note: Not a builtin in ia64 compiler, to be overridden in */ #if defined(__ia64) void __PAL_IMB(void * instr, unsigned __int64 length); #else void __PAL_IMB(void); #endif /* __ia64 */ /* ** Compare, Store Long/Quad ** Note: On ia64, these produce a warning if the compiler can verify ** that the source and destination addresses are the same, else ** they produce an error. These builtins should be considered ** deprecated, to be replaced by __CMP_SWAP_LONG and __CMP_SWAP_QUAD ** when they become available on Alpha. */ int __CMP_STORE_LONG (volatile void *__src, int __oldval, int __newval, volatile void *__dst); int __CMP_STORE_QUAD (volatile void *__src, __int64 __oldval, __int64 __newval, volatile void *__dst); /* ** Compare and swap (__CMP_SWAP* and _Interlocked*), and ** __RETURN_ADDRESS - IA64 and Alpha compilers after V6.5. */ #if defined(__ia64) || \ (defined(__alpha) && (__DECC_VER >= 60600000 || \ __DECCXX_VER >= 60600000) ) /* ** Compare and swap returning status (1 or 0). */ int __CMP_SWAP_LONG (volatile void *__addr, int __comparand, int __newval); int __CMP_SWAP_QUAD (volatile void *__addr, __int64 __comparand, __int64 __newval); /* ** OS Related Itanium Built-ins defined by Intel's ** for compare and swap returning old value with acquire or ** release semantics. ** ** Unordered data accesses may become visible in any order. Acquire data ** accesses quarantee that they are made visible prior to all subsequent ** data accesses. Release data accesses guarantee that all previous data ** accesses are made visible prior to being made visible themselves. ** ** See also __CMP_SWAP_[LONG|QUAD]_[ACQ|REL] below. */ unsigned __int64 _InterlockedCompareExchange_acq (unsigned int *__Destination, unsigned __int64 __Newval, unsigned __int64 __Comparand); unsigned __int64 _InterlockedCompareExchange64_acq (unsigned __int64 *__Destination, unsigned __int64 __Newval, unsigned __int64 __Comparand); unsigned __int64 _InterlockedCompareExchange_rel (unsigned int *__Destination, unsigned __int64 __Newval, unsigned __int64 __Comparand); unsigned __int64 _InterlockedCompareExchange64_rel (unsigned __int64 *__Destination, unsigned __int64 __Newval, unsigned __int64 __Comparand); /* ** Compare and swap returning status (1 or 0) with acquire or ** release semantics. ** ** Nota Bene: ** _Interlocked* built-ins return the old value and have the ** newval and comparand arguments in a different order than ** __CMP_SWAP* built-ins that return the status (1 or 0). */ int __CMP_SWAP_LONG_ACQ (volatile void *__addr, int __comparand, int __newval); int __CMP_SWAP_QUAD_ACQ (volatile void *__addr, __int64 __comparand, __int64 __newval); int __CMP_SWAP_LONG_REL (volatile void *__addr, int __comparand, int __newval); int __CMP_SWAP_QUAD_REL (volatile void *__addr, __int64 __comparand, __int64 __newval); /* ** Produce the value of R26 (Alpha) or B0 (IA64) on entry to the ** function containing a call to this builtin. Cannot be invoked ** from a function with non-standard linkage. */ __int64 __RETURN_ADDRESS(void); #endif /* ** The following intrinsics were added in DEC C V5.7 and DIGITAL C++ V6.0 */ #if (__DECC_VER >= 50700000) || (__DECCXX_VER >= 60000000) __int64 _popcnt (unsigned __int64); __int64 _poppar (unsigned __int64); __int64 _leadz (unsigned __int64); __int64 _trailz (unsigned __int64); # pragma intrinsic (_popcnt, _poppar, _leadz, _trailz) #endif /* (__DECC_VER >= 50700000) || (__DECCXX_VER >= 60000000) */ /* ** Reset the pointer size prior to leaving this section */ #if __INITIAL_POINTER_SIZE #pragma __pointer_size 32 #endif /************************************************************************/ #endif /* (__ALPHA || __ia64) && __VMS */ /************************************************************************/ /************************************************************************/ #if !defined(__VAX) && defined(__VMS) && defined(__X_FLOAT) /************************************************************************/ /* ** Convert from X-Floating to Quadword ** Note: Not on ia64 */ __int64 __CVTXQ(long double __operand1); /* ** Convert from X-Floating to T-Floating Chopped ** Note: Not on ia64 */ double __CVTXT_C(long double __operand1); /* ** Add Floating Point Chopped ** Note: Not on ia64 */ long double __ADDX_C(long double __operand1, long double __operand2); /* ** Subtract Floating Point Chopped ** Note: Not on ia64 */ long double __SUBX_C(long double __operand1, long double __operand2); /* ** Multiply Floating Point Chopped ** Note: Not on ia64 */ long double __MULX_C(long double __operand1, long double __operand2); /* ** Divide Floating Point Chopped ** Note: Not on ia64 */ long double __DIVX_C(long double __operand1, long double __operand2); /************************************************************************/ #endif /* (__ALPHA || __ia64) && __VMS && __X_FLOAT */ /************************************************************************/ /************************************************************************/ #if defined(__VAX) && defined(__VMS) && defined(__DECC) /************************************************************************/ /* ** The following builtin was added in DEC C V6.0 */ #if (__DECC_VER >= 60000000) void * __ALLOCA(unsigned int __x); #endif /* ** The following builtins were added in DEC C V5.2 */ #if (__DECC_VER >= 50200000) /* ** Processor Register Management */ typedef enum {_value_replaced, _value_not_replaced} _MTPR_STATUS; void _MFPR(int __register_num, void *__destination); _MTPR_STATUS _MTPR(int __new_value, int __register_num); /* ** Processor control */ void _HALT(void); /* ** General Register query */ typedef enum {_R0, _R1, _R2, _R3, _R4, _R5, _R6, _R7, _R8, _R9, _R10, _R11, _AP, _FP, _SP, _PC} _REGISTER_NUMBER; int _READ_GPR(_REGISTER_NUMBER __general_register_number); #endif /* __DECC_VER >= 50200000 */ /* ** Add Aligned Word Interlocked */ typedef enum {_adawi_sum_neg=-1,_adawi_sum_zero,_adawi_sum_pos} _ADAWI_STATUS; _ADAWI_STATUS _ADAWI(short __src, short *__dest); /* ** Interlocked branch group */ _BBCCI_STATUS _BBCCI(int __position, void *__addr); _BBSSI_STATUS _BBSSI(int __position, void *__addr); /* ** Find First bit group */ typedef enum {_ff_bit_not_found, _ff_bit_found} _FF_STATUS; _FF_STATUS _FFC(int __start, char __size, const void *__base, int *__position); _FF_STATUS _FFS(int __start, char __size, const void *__base, int *__position); /* ** Insert into a queue group */ _INSQI_STATUS _INSQHI(void *__new_entry, void *__head); _INSQI_STATUS _INSQTI(void *__new_entry, void *__head); _INSQUE_STATUS _INSQUE(void *__new_entry, void *__predecessor); /* ** Character processing group */ unsigned short _LOCC(char __target, unsigned short __length, const char *__string, ...); void _MOVC3(unsigned short __length, const char *__src, char *__dest, ...); void _MOVC5(unsigned short __srclen, const char *__src, char __fill, unsigned short __destlen, char *__dest, ...); unsigned short _SCANC(unsigned short __length, const char *__string, const char *__table, char __mask, ...); unsigned short _SKPC(char __target, unsigned short __length, const char *__string, ...); unsigned short _SPANC(unsigned short __length, const char *__string, const char *__table, char __mask, ...); /* ** Obtain the program status longword */ void _MOVPSL(void *__psl); /* ** Probe memory group */ _PROBE_STATUS _PROBER(char __mode, unsigned short __length, const void *__addr); _PROBE_STATUS _PROBEW(char __mode, unsigned short __length, const void *__addr); /* ** Remove from a queue group */ _REMQI_STATUS _REMQHI(void *__head, void *__removed_entry); _REMQI_STATUS _REMQTI(void *__head, void *__removed_entry); _REMQUE_STATUS _REMQUE(void *__entry, void *__removed_entry); /************************************************************************/ #endif /* __VAX && __VMS */ /************************************************************************/ /* ** Restore the users pointer context */ #if __INITIAL_POINTER_SIZE # pragma __pointer_size __restore #endif #if defined(__ia64) && defined(__VMS) # include #endif /* __ia64 && __VMS */ #ifdef __cplusplus } #endif #pragma __standard #endif /* __BUILTINS_LOADED */