#if defined(__x86_64__) || (defined(__PPC__) && defined(__LITTLE_ENDIAN__))
# define U8TOU32(p) (*(const u32 *)(p))
# define U32TO8(p,v) (*(u32 *)(p) = (v))
-#elif defined(__PPC__)
+#elif defined(__PPC__) || defined(__POWERPC__)
# define U8TOU32(p) ({u32 ret; asm ("lwbrx %0,0,%1":"=r"(ret):"b"(p)); ret; })
# define U32TO8(p,v) asm ("stwbrx %0,0,%1"::"r"(v),"b"(p):"memory")
#elif defined(__s390x__)
/* "round toward zero (truncate), mask all exceptions" */
#if defined(__x86_64__)
static const u32 mxcsr = 0x7f80;
-#elif defined(__PPC__)
+#elif defined(__PPC__) || defined(__POWERPC__)
static const u64 one = 1;
#elif defined(__s390x__)
static const u32 fpc = 1;
asm volatile ("stmxcsr %0":"=m"(mxcsr_orig));
asm volatile ("ldmxcsr %0"::"m"(mxcsr));
-#elif defined(__PPC__)
+#elif defined(__PPC__) || defined(__POWERPC__)
double fpscr_orig, fpscr = *(double *)&one;
asm volatile ("mffs %0":"=f"(fpscr_orig));
*/
#if defined(__x86_64__)
asm volatile ("ldmxcsr %0"::"m"(mxcsr_orig));
-#elif defined(__PPC__)
+#elif defined(__PPC__) || defined(__POWERPC__)
asm volatile ("mtfsf 255,%0"::"f"(fpscr_orig));
#elif defined(__s390x__)
asm volatile ("lfpc %0"::"m"(fpc_orig));
asm volatile ("stmxcsr %0":"=m"(mxcsr_orig));
asm volatile ("ldmxcsr %0"::"m"(mxcsr));
-#elif defined(__PPC__)
+#elif defined(__PPC__) || defined(__POWERPC__)
double fpscr_orig, fpscr = *(double *)&one;
asm volatile ("mffs %0":"=f"(fpscr_orig));
*/
#if defined(__x86_64__)
asm volatile ("ldmxcsr %0"::"m"(mxcsr_orig));
-#elif defined(__PPC__)
+#elif defined(__PPC__) || defined(__POWERPC__)
asm volatile ("mtfsf 255,%0"::"f"(fpscr_orig));
#elif defined(__s390x__)
asm volatile ("lfpc %0"::"m"(fpc_orig));