media/libvpx/vpx_ports/x86.h

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

michael@0 1 /*
michael@0 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
michael@0 3 *
michael@0 4 * Use of this source code is governed by a BSD-style license
michael@0 5 * that can be found in the LICENSE file in the root of the source
michael@0 6 * tree. An additional intellectual property rights grant can be found
michael@0 7 * in the file PATENTS. All contributing project authors may
michael@0 8 * be found in the AUTHORS file in the root of the source tree.
michael@0 9 */
michael@0 10
michael@0 11
michael@0 12 #ifndef VPX_PORTS_X86_H
michael@0 13 #define VPX_PORTS_X86_H
michael@0 14 #include <stdlib.h>
michael@0 15 #include "vpx_config.h"
michael@0 16
michael@0 17 typedef enum {
michael@0 18 VPX_CPU_UNKNOWN = -1,
michael@0 19 VPX_CPU_AMD,
michael@0 20 VPX_CPU_AMD_OLD,
michael@0 21 VPX_CPU_CENTAUR,
michael@0 22 VPX_CPU_CYRIX,
michael@0 23 VPX_CPU_INTEL,
michael@0 24 VPX_CPU_NEXGEN,
michael@0 25 VPX_CPU_NSC,
michael@0 26 VPX_CPU_RISE,
michael@0 27 VPX_CPU_SIS,
michael@0 28 VPX_CPU_TRANSMETA,
michael@0 29 VPX_CPU_TRANSMETA_OLD,
michael@0 30 VPX_CPU_UMC,
michael@0 31 VPX_CPU_VIA,
michael@0 32
michael@0 33 VPX_CPU_LAST
michael@0 34 } vpx_cpu_t;
michael@0 35
michael@0 36 #if defined(__GNUC__) && __GNUC__ || defined(__ANDROID__)
michael@0 37 #if ARCH_X86_64
michael@0 38 #define cpuid(func,ax,bx,cx,dx)\
michael@0 39 __asm__ __volatile__ (\
michael@0 40 "cpuid \n\t" \
michael@0 41 : "=a" (ax), "=b" (bx), "=c" (cx), "=d" (dx) \
michael@0 42 : "a" (func));
michael@0 43 #else
michael@0 44 #define cpuid(func,ax,bx,cx,dx)\
michael@0 45 __asm__ __volatile__ (\
michael@0 46 "mov %%ebx, %%edi \n\t" \
michael@0 47 "cpuid \n\t" \
michael@0 48 "xchg %%edi, %%ebx \n\t" \
michael@0 49 : "=a" (ax), "=D" (bx), "=c" (cx), "=d" (dx) \
michael@0 50 : "a" (func));
michael@0 51 #endif
michael@0 52 #elif defined(__SUNPRO_C) || defined(__SUNPRO_CC) /* end __GNUC__ or __ANDROID__*/
michael@0 53 #if ARCH_X86_64
michael@0 54 #define cpuid(func,ax,bx,cx,dx)\
michael@0 55 asm volatile (\
michael@0 56 "xchg %rsi, %rbx \n\t" \
michael@0 57 "cpuid \n\t" \
michael@0 58 "movl %ebx, %edi \n\t" \
michael@0 59 "xchg %rsi, %rbx \n\t" \
michael@0 60 : "=a" (ax), "=D" (bx), "=c" (cx), "=d" (dx) \
michael@0 61 : "a" (func));
michael@0 62 #else
michael@0 63 #define cpuid(func,ax,bx,cx,dx)\
michael@0 64 asm volatile (\
michael@0 65 "pushl %ebx \n\t" \
michael@0 66 "cpuid \n\t" \
michael@0 67 "movl %ebx, %edi \n\t" \
michael@0 68 "popl %ebx \n\t" \
michael@0 69 : "=a" (ax), "=D" (bx), "=c" (cx), "=d" (dx) \
michael@0 70 : "a" (func));
michael@0 71 #endif
michael@0 72 #else /* end __SUNPRO__ */
michael@0 73 #if ARCH_X86_64
michael@0 74 void __cpuid(int CPUInfo[4], int info_type);
michael@0 75 #pragma intrinsic(__cpuid)
michael@0 76 #define cpuid(func,a,b,c,d) do{\
michael@0 77 int regs[4];\
michael@0 78 __cpuid(regs,func); a=regs[0]; b=regs[1]; c=regs[2]; d=regs[3];\
michael@0 79 } while(0)
michael@0 80 #else
michael@0 81 #define cpuid(func,a,b,c,d)\
michael@0 82 __asm mov eax, func\
michael@0 83 __asm cpuid\
michael@0 84 __asm mov a, eax\
michael@0 85 __asm mov b, ebx\
michael@0 86 __asm mov c, ecx\
michael@0 87 __asm mov d, edx
michael@0 88 #endif
michael@0 89 #endif /* end others */
michael@0 90
michael@0 91 #define HAS_MMX 0x01
michael@0 92 #define HAS_SSE 0x02
michael@0 93 #define HAS_SSE2 0x04
michael@0 94 #define HAS_SSE3 0x08
michael@0 95 #define HAS_SSSE3 0x10
michael@0 96 #define HAS_SSE4_1 0x20
michael@0 97 #define HAS_AVX 0x40
michael@0 98 #define HAS_AVX2 0x80
michael@0 99 #ifndef BIT
michael@0 100 #define BIT(n) (1<<n)
michael@0 101 #endif
michael@0 102
michael@0 103 static int
michael@0 104 x86_simd_caps(void) {
michael@0 105 unsigned int flags = 0;
michael@0 106 unsigned int mask = ~0;
michael@0 107 unsigned int reg_eax, reg_ebx, reg_ecx, reg_edx;
michael@0 108 char *env;
michael@0 109 (void)reg_ebx;
michael@0 110
michael@0 111 /* See if the CPU capabilities are being overridden by the environment */
michael@0 112 env = getenv("VPX_SIMD_CAPS");
michael@0 113
michael@0 114 if (env && *env)
michael@0 115 return (int)strtol(env, NULL, 0);
michael@0 116
michael@0 117 env = getenv("VPX_SIMD_CAPS_MASK");
michael@0 118
michael@0 119 if (env && *env)
michael@0 120 mask = strtol(env, NULL, 0);
michael@0 121
michael@0 122 /* Ensure that the CPUID instruction supports extended features */
michael@0 123 cpuid(0, reg_eax, reg_ebx, reg_ecx, reg_edx);
michael@0 124
michael@0 125 if (reg_eax < 1)
michael@0 126 return 0;
michael@0 127
michael@0 128 /* Get the standard feature flags */
michael@0 129 cpuid(1, reg_eax, reg_ebx, reg_ecx, reg_edx);
michael@0 130
michael@0 131 if (reg_edx & BIT(23)) flags |= HAS_MMX;
michael@0 132
michael@0 133 if (reg_edx & BIT(25)) flags |= HAS_SSE; /* aka xmm */
michael@0 134
michael@0 135 if (reg_edx & BIT(26)) flags |= HAS_SSE2; /* aka wmt */
michael@0 136
michael@0 137 if (reg_ecx & BIT(0)) flags |= HAS_SSE3;
michael@0 138
michael@0 139 if (reg_ecx & BIT(9)) flags |= HAS_SSSE3;
michael@0 140
michael@0 141 if (reg_ecx & BIT(19)) flags |= HAS_SSE4_1;
michael@0 142
michael@0 143 if (reg_ecx & BIT(28)) flags |= HAS_AVX;
michael@0 144
michael@0 145 if (reg_ebx & BIT(5)) flags |= HAS_AVX2;
michael@0 146
michael@0 147 return flags & mask;
michael@0 148 }
michael@0 149
michael@0 150 vpx_cpu_t vpx_x86_vendor(void);
michael@0 151
michael@0 152 #if ARCH_X86_64 && defined(_MSC_VER)
michael@0 153 unsigned __int64 __rdtsc(void);
michael@0 154 #pragma intrinsic(__rdtsc)
michael@0 155 #endif
michael@0 156 static unsigned int
michael@0 157 x86_readtsc(void) {
michael@0 158 #if defined(__GNUC__) && __GNUC__
michael@0 159 unsigned int tsc;
michael@0 160 __asm__ __volatile__("rdtsc\n\t":"=a"(tsc):);
michael@0 161 return tsc;
michael@0 162 #elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)
michael@0 163 unsigned int tsc;
michael@0 164 asm volatile("rdtsc\n\t":"=a"(tsc):);
michael@0 165 return tsc;
michael@0 166 #else
michael@0 167 #if ARCH_X86_64
michael@0 168 return (unsigned int)__rdtsc();
michael@0 169 #else
michael@0 170 __asm rdtsc;
michael@0 171 #endif
michael@0 172 #endif
michael@0 173 }
michael@0 174
michael@0 175
michael@0 176 #if defined(__GNUC__) && __GNUC__
michael@0 177 #define x86_pause_hint()\
michael@0 178 __asm__ __volatile__ ("pause \n\t")
michael@0 179 #elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)
michael@0 180 #define x86_pause_hint()\
michael@0 181 asm volatile ("pause \n\t")
michael@0 182 #else
michael@0 183 #if ARCH_X86_64
michael@0 184 #define x86_pause_hint()\
michael@0 185 _mm_pause();
michael@0 186 #else
michael@0 187 #define x86_pause_hint()\
michael@0 188 __asm pause
michael@0 189 #endif
michael@0 190 #endif
michael@0 191
michael@0 192 #if defined(__GNUC__) && __GNUC__
michael@0 193 static void
michael@0 194 x87_set_control_word(unsigned short mode) {
michael@0 195 __asm__ __volatile__("fldcw %0" : : "m"(*&mode));
michael@0 196 }
michael@0 197 static unsigned short
michael@0 198 x87_get_control_word(void) {
michael@0 199 unsigned short mode;
michael@0 200 __asm__ __volatile__("fstcw %0\n\t":"=m"(*&mode):);
michael@0 201 return mode;
michael@0 202 }
michael@0 203 #elif defined(__SUNPRO_C) || defined(__SUNPRO_CC)
michael@0 204 static void
michael@0 205 x87_set_control_word(unsigned short mode) {
michael@0 206 asm volatile("fldcw %0" : : "m"(*&mode));
michael@0 207 }
michael@0 208 static unsigned short
michael@0 209 x87_get_control_word(void) {
michael@0 210 unsigned short mode;
michael@0 211 asm volatile("fstcw %0\n\t":"=m"(*&mode):);
michael@0 212 return mode;
michael@0 213 }
michael@0 214 #elif ARCH_X86_64
michael@0 215 /* No fldcw intrinsics on Windows x64, punt to external asm */
michael@0 216 extern void vpx_winx64_fldcw(unsigned short mode);
michael@0 217 extern unsigned short vpx_winx64_fstcw(void);
michael@0 218 #define x87_set_control_word vpx_winx64_fldcw
michael@0 219 #define x87_get_control_word vpx_winx64_fstcw
michael@0 220 #else
michael@0 221 static void
michael@0 222 x87_set_control_word(unsigned short mode) {
michael@0 223 __asm { fldcw mode }
michael@0 224 }
michael@0 225 static unsigned short
michael@0 226 x87_get_control_word(void) {
michael@0 227 unsigned short mode;
michael@0 228 __asm { fstcw mode }
michael@0 229 return mode;
michael@0 230 }
michael@0 231 #endif
michael@0 232
michael@0 233 static unsigned short
michael@0 234 x87_set_double_precision(void) {
michael@0 235 unsigned short mode = x87_get_control_word();
michael@0 236 x87_set_control_word((mode&~0x300) | 0x200);
michael@0 237 return mode;
michael@0 238 }
michael@0 239
michael@0 240
michael@0 241 extern void vpx_reset_mmx_state(void);
michael@0 242 #endif
michael@0 243

mercurial