avx512vbmi2intrin.h 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391
  1. /*===------------- avx512vbmi2intrin.h - VBMI2 intrinsics ------------------===
  2. *
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a copy
  5. * of this software and associated documentation files (the "Software"), to deal
  6. * in the Software without restriction, including without limitation the rights
  7. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  8. * copies of the Software, and to permit persons to whom the Software is
  9. * furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  17. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  18. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  19. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  20. * THE SOFTWARE.
  21. *
  22. *===-----------------------------------------------------------------------===
  23. */
  24. #ifndef __IMMINTRIN_H
  25. #error "Never use <avx512vbmi2intrin.h> directly; include <immintrin.h> instead."
  26. #endif
  27. #ifndef __AVX512VBMI2INTRIN_H
  28. #define __AVX512VBMI2INTRIN_H
  29. /* Define the default attributes for the functions in this file. */
  30. #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__, __target__("avx512vbmi2")))
  31. static __inline__ __m512i __DEFAULT_FN_ATTRS
  32. _mm512_mask_compress_epi16(__m512i __S, __mmask32 __U, __m512i __D)
  33. {
  34. return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi) __D,
  35. (__v32hi) __S,
  36. __U);
  37. }
  38. static __inline__ __m512i __DEFAULT_FN_ATTRS
  39. _mm512_maskz_compress_epi16(__mmask32 __U, __m512i __D)
  40. {
  41. return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi) __D,
  42. (__v32hi) _mm512_setzero_hi(),
  43. __U);
  44. }
  45. static __inline__ __m512i __DEFAULT_FN_ATTRS
  46. _mm512_mask_compress_epi8(__m512i __S, __mmask64 __U, __m512i __D)
  47. {
  48. return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi) __D,
  49. (__v64qi) __S,
  50. __U);
  51. }
  52. static __inline__ __m512i __DEFAULT_FN_ATTRS
  53. _mm512_maskz_compress_epi8(__mmask64 __U, __m512i __D)
  54. {
  55. return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi) __D,
  56. (__v64qi) _mm512_setzero_qi(),
  57. __U);
  58. }
  59. static __inline__ void __DEFAULT_FN_ATTRS
  60. _mm512_mask_compressstoreu_epi16(void *__P, __mmask32 __U, __m512i __D)
  61. {
  62. __builtin_ia32_compressstorehi512_mask ((__v32hi *) __P, (__v32hi) __D,
  63. __U);
  64. }
  65. static __inline__ void __DEFAULT_FN_ATTRS
  66. _mm512_mask_compressstoreu_epi8(void *__P, __mmask64 __U, __m512i __D)
  67. {
  68. __builtin_ia32_compressstoreqi512_mask ((__v64qi *) __P, (__v64qi) __D,
  69. __U);
  70. }
  71. static __inline__ __m512i __DEFAULT_FN_ATTRS
  72. _mm512_mask_expand_epi16(__m512i __S, __mmask32 __U, __m512i __D)
  73. {
  74. return (__m512i) __builtin_ia32_expandhi512_mask ((__v32hi) __D,
  75. (__v32hi) __S,
  76. __U);
  77. }
  78. static __inline__ __m512i __DEFAULT_FN_ATTRS
  79. _mm512_maskz_expand_epi16(__mmask32 __U, __m512i __D)
  80. {
  81. return (__m512i) __builtin_ia32_expandhi512_mask ((__v32hi) __D,
  82. (__v32hi) _mm512_setzero_hi(),
  83. __U);
  84. }
  85. static __inline__ __m512i __DEFAULT_FN_ATTRS
  86. _mm512_mask_expand_epi8(__m512i __S, __mmask64 __U, __m512i __D)
  87. {
  88. return (__m512i) __builtin_ia32_expandqi512_mask ((__v64qi) __D,
  89. (__v64qi) __S,
  90. __U);
  91. }
  92. static __inline__ __m512i __DEFAULT_FN_ATTRS
  93. _mm512_maskz_expand_epi8(__mmask64 __U, __m512i __D)
  94. {
  95. return (__m512i) __builtin_ia32_expandqi512_mask ((__v64qi) __D,
  96. (__v64qi) _mm512_setzero_qi(),
  97. __U);
  98. }
  99. static __inline__ __m512i __DEFAULT_FN_ATTRS
  100. _mm512_mask_expandloadu_epi16(__m512i __S, __mmask32 __U, void const *__P)
  101. {
  102. return (__m512i) __builtin_ia32_expandloadhi512_mask ((const __v32hi *)__P,
  103. (__v32hi) __S,
  104. __U);
  105. }
  106. static __inline__ __m512i __DEFAULT_FN_ATTRS
  107. _mm512_maskz_expandloadu_epi16(__mmask32 __U, void const *__P)
  108. {
  109. return (__m512i) __builtin_ia32_expandloadhi512_mask ((const __v32hi *)__P,
  110. (__v32hi) _mm512_setzero_hi(),
  111. __U);
  112. }
  113. static __inline__ __m512i __DEFAULT_FN_ATTRS
  114. _mm512_mask_expandloadu_epi8(__m512i __S, __mmask64 __U, void const *__P)
  115. {
  116. return (__m512i) __builtin_ia32_expandloadqi512_mask ((const __v64qi *)__P,
  117. (__v64qi) __S,
  118. __U);
  119. }
  120. static __inline__ __m512i __DEFAULT_FN_ATTRS
  121. _mm512_maskz_expandloadu_epi8(__mmask64 __U, void const *__P)
  122. {
  123. return (__m512i) __builtin_ia32_expandloadqi512_mask ((const __v64qi *)__P,
  124. (__v64qi) _mm512_setzero_qi(),
  125. __U);
  126. }
  127. #define _mm512_mask_shldi_epi64(S, U, A, B, I) __extension__ ({ \
  128. (__m512i)__builtin_ia32_vpshldq512_mask((__v8di)(A), \
  129. (__v8di)(B), \
  130. (int)(I), \
  131. (__v8di)(S), \
  132. (__mmask8)(U)); })
  133. #define _mm512_maskz_shldi_epi64(U, A, B, I) \
  134. _mm512_mask_shldi_epi64(_mm512_setzero_hi(), (U), (A), (B), (I))
  135. #define _mm512_shldi_epi64(A, B, I) \
  136. _mm512_mask_shldi_epi64(_mm512_undefined(), (__mmask8)(-1), (A), (B), (I))
  137. #define _mm512_mask_shldi_epi32(S, U, A, B, I) __extension__ ({ \
  138. (__m512i)__builtin_ia32_vpshldd512_mask((__v16si)(A), \
  139. (__v16si)(B), \
  140. (int)(I), \
  141. (__v16si)(S), \
  142. (__mmask16)(U)); })
  143. #define _mm512_maskz_shldi_epi32(U, A, B, I) \
  144. _mm512_mask_shldi_epi32(_mm512_setzero_hi(), (U), (A), (B), (I))
  145. #define _mm512_shldi_epi32(A, B, I) \
  146. _mm512_mask_shldi_epi32(_mm512_undefined(), (__mmask16)(-1), (A), (B), (I))
  147. #define _mm512_mask_shldi_epi16(S, U, A, B, I) __extension__ ({ \
  148. (__m512i)__builtin_ia32_vpshldw512_mask((__v32hi)(A), \
  149. (__v32hi)(B), \
  150. (int)(I), \
  151. (__v32hi)(S), \
  152. (__mmask32)(U)); })
  153. #define _mm512_maskz_shldi_epi16(U, A, B, I) \
  154. _mm512_mask_shldi_epi16(_mm512_setzero_hi(), (U), (A), (B), (I))
  155. #define _mm512_shldi_epi16(A, B, I) \
  156. _mm512_mask_shldi_epi16(_mm512_undefined(), (__mmask32)(-1), (A), (B), (I))
  157. #define _mm512_mask_shrdi_epi64(S, U, A, B, I) __extension__ ({ \
  158. (__m512i)__builtin_ia32_vpshrdq512_mask((__v8di)(A), \
  159. (__v8di)(B), \
  160. (int)(I), \
  161. (__v8di)(S), \
  162. (__mmask8)(U)); })
  163. #define _mm512_maskz_shrdi_epi64(U, A, B, I) \
  164. _mm512_mask_shrdi_epi64(_mm512_setzero_hi(), (U), (A), (B), (I))
  165. #define _mm512_shrdi_epi64(A, B, I) \
  166. _mm512_mask_shrdi_epi64(_mm512_undefined(), (__mmask8)(-1), (A), (B), (I))
  167. #define _mm512_mask_shrdi_epi32(S, U, A, B, I) __extension__ ({ \
  168. (__m512i)__builtin_ia32_vpshrdd512_mask((__v16si)(A), \
  169. (__v16si)(B), \
  170. (int)(I), \
  171. (__v16si)(S), \
  172. (__mmask16)(U)); })
  173. #define _mm512_maskz_shrdi_epi32(U, A, B, I) \
  174. _mm512_mask_shrdi_epi32(_mm512_setzero_hi(), (U), (A), (B), (I))
  175. #define _mm512_shrdi_epi32(A, B, I) \
  176. _mm512_mask_shrdi_epi32(_mm512_undefined(), (__mmask16)(-1), (A), (B), (I))
  177. #define _mm512_mask_shrdi_epi16(S, U, A, B, I) __extension__ ({ \
  178. (__m512i)__builtin_ia32_vpshrdw512_mask((__v32hi)(A), \
  179. (__v32hi)(B), \
  180. (int)(I), \
  181. (__v32hi)(S), \
  182. (__mmask32)(U)); })
  183. #define _mm512_maskz_shrdi_epi16(U, A, B, I) \
  184. _mm512_mask_shrdi_epi16(_mm512_setzero_hi(), (U), (A), (B), (I))
  185. #define _mm512_shrdi_epi16(A, B, I) \
  186. _mm512_mask_shrdi_epi16(_mm512_undefined(), (__mmask32)(-1), (A), (B), (I))
  187. static __inline__ __m512i __DEFAULT_FN_ATTRS
  188. _mm512_mask_shldv_epi64(__m512i __S, __mmask8 __U, __m512i __A, __m512i __B)
  189. {
  190. return (__m512i) __builtin_ia32_vpshldvq512_mask ((__v8di) __S,
  191. (__v8di) __A,
  192. (__v8di) __B,
  193. __U);
  194. }
  195. static __inline__ __m512i __DEFAULT_FN_ATTRS
  196. _mm512_maskz_shldv_epi64(__mmask8 __U, __m512i __S, __m512i __A, __m512i __B)
  197. {
  198. return (__m512i) __builtin_ia32_vpshldvq512_maskz ((__v8di) __S,
  199. (__v8di) __A,
  200. (__v8di) __B,
  201. __U);
  202. }
  203. static __inline__ __m512i __DEFAULT_FN_ATTRS
  204. _mm512_shldv_epi64(__m512i __S, __m512i __A, __m512i __B)
  205. {
  206. return (__m512i) __builtin_ia32_vpshldvq512_mask ((__v8di) __S,
  207. (__v8di) __A,
  208. (__v8di) __B,
  209. (__mmask8) -1);
  210. }
  211. static __inline__ __m512i __DEFAULT_FN_ATTRS
  212. _mm512_mask_shldv_epi32(__m512i __S, __mmask16 __U, __m512i __A, __m512i __B)
  213. {
  214. return (__m512i) __builtin_ia32_vpshldvd512_mask ((__v16si) __S,
  215. (__v16si) __A,
  216. (__v16si) __B,
  217. __U);
  218. }
  219. static __inline__ __m512i __DEFAULT_FN_ATTRS
  220. _mm512_maskz_shldv_epi32(__mmask16 __U, __m512i __S, __m512i __A, __m512i __B)
  221. {
  222. return (__m512i) __builtin_ia32_vpshldvd512_maskz ((__v16si) __S,
  223. (__v16si) __A,
  224. (__v16si) __B,
  225. __U);
  226. }
  227. static __inline__ __m512i __DEFAULT_FN_ATTRS
  228. _mm512_shldv_epi32(__m512i __S, __m512i __A, __m512i __B)
  229. {
  230. return (__m512i) __builtin_ia32_vpshldvd512_mask ((__v16si) __S,
  231. (__v16si) __A,
  232. (__v16si) __B,
  233. (__mmask16) -1);
  234. }
  235. static __inline__ __m512i __DEFAULT_FN_ATTRS
  236. _mm512_mask_shldv_epi16(__m512i __S, __mmask32 __U, __m512i __A, __m512i __B)
  237. {
  238. return (__m512i) __builtin_ia32_vpshldvw512_mask ((__v32hi) __S,
  239. (__v32hi) __A,
  240. (__v32hi) __B,
  241. __U);
  242. }
  243. static __inline__ __m512i __DEFAULT_FN_ATTRS
  244. _mm512_maskz_shldv_epi16(__mmask32 __U, __m512i __S, __m512i __A, __m512i __B)
  245. {
  246. return (__m512i) __builtin_ia32_vpshldvw512_maskz ((__v32hi) __S,
  247. (__v32hi) __A,
  248. (__v32hi) __B,
  249. __U);
  250. }
  251. static __inline__ __m512i __DEFAULT_FN_ATTRS
  252. _mm512_shldv_epi16(__m512i __S, __m512i __A, __m512i __B)
  253. {
  254. return (__m512i) __builtin_ia32_vpshldvw512_mask ((__v32hi) __S,
  255. (__v32hi) __A,
  256. (__v32hi) __B,
  257. (__mmask32) -1);
  258. }
  259. static __inline__ __m512i __DEFAULT_FN_ATTRS
  260. _mm512_mask_shrdv_epi64(__m512i __S, __mmask8 __U, __m512i __A, __m512i __B)
  261. {
  262. return (__m512i) __builtin_ia32_vpshrdvq512_mask ((__v8di) __S,
  263. (__v8di) __A,
  264. (__v8di) __B,
  265. __U);
  266. }
  267. static __inline__ __m512i __DEFAULT_FN_ATTRS
  268. _mm512_maskz_shrdv_epi64(__mmask8 __U, __m512i __S, __m512i __A, __m512i __B)
  269. {
  270. return (__m512i) __builtin_ia32_vpshrdvq512_maskz ((__v8di) __S,
  271. (__v8di) __A,
  272. (__v8di) __B,
  273. __U);
  274. }
  275. static __inline__ __m512i __DEFAULT_FN_ATTRS
  276. _mm512_shrdv_epi64(__m512i __S, __m512i __A, __m512i __B)
  277. {
  278. return (__m512i) __builtin_ia32_vpshrdvq512_mask ((__v8di) __S,
  279. (__v8di) __A,
  280. (__v8di) __B,
  281. (__mmask8) -1);
  282. }
  283. static __inline__ __m512i __DEFAULT_FN_ATTRS
  284. _mm512_mask_shrdv_epi32(__m512i __S, __mmask16 __U, __m512i __A, __m512i __B)
  285. {
  286. return (__m512i) __builtin_ia32_vpshrdvd512_mask ((__v16si) __S,
  287. (__v16si) __A,
  288. (__v16si) __B,
  289. __U);
  290. }
  291. static __inline__ __m512i __DEFAULT_FN_ATTRS
  292. _mm512_maskz_shrdv_epi32(__mmask16 __U, __m512i __S, __m512i __A, __m512i __B)
  293. {
  294. return (__m512i) __builtin_ia32_vpshrdvd512_maskz ((__v16si) __S,
  295. (__v16si) __A,
  296. (__v16si) __B,
  297. __U);
  298. }
  299. static __inline__ __m512i __DEFAULT_FN_ATTRS
  300. _mm512_shrdv_epi32(__m512i __S, __m512i __A, __m512i __B)
  301. {
  302. return (__m512i) __builtin_ia32_vpshrdvd512_mask ((__v16si) __S,
  303. (__v16si) __A,
  304. (__v16si) __B,
  305. (__mmask16) -1);
  306. }
  307. static __inline__ __m512i __DEFAULT_FN_ATTRS
  308. _mm512_mask_shrdv_epi16(__m512i __S, __mmask32 __U, __m512i __A, __m512i __B)
  309. {
  310. return (__m512i) __builtin_ia32_vpshrdvw512_mask ((__v32hi) __S,
  311. (__v32hi) __A,
  312. (__v32hi) __B,
  313. __U);
  314. }
  315. static __inline__ __m512i __DEFAULT_FN_ATTRS
  316. _mm512_maskz_shrdv_epi16(__mmask32 __U, __m512i __S, __m512i __A, __m512i __B)
  317. {
  318. return (__m512i) __builtin_ia32_vpshrdvw512_maskz ((__v32hi) __S,
  319. (__v32hi) __A,
  320. (__v32hi) __B,
  321. __U);
  322. }
  323. static __inline__ __m512i __DEFAULT_FN_ATTRS
  324. _mm512_shrdv_epi16(__m512i __S, __m512i __A, __m512i __B)
  325. {
  326. return (__m512i) __builtin_ia32_vpshrdvw512_mask ((__v32hi) __S,
  327. (__v32hi) __A,
  328. (__v32hi) __B,
  329. (__mmask32) -1);
  330. }
  331. #undef __DEFAULT_FN_ATTRS
  332. #endif