avx512vlvbmi2intrin.h 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748
  1. /*===------------- avx512vlvbmi2intrin.h - VBMI2 intrinsics -----------------===
  2. *
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a copy
  5. * of this software and associated documentation files (the "Software"), to deal
  6. * in the Software without restriction, including without limitation the rights
  7. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  8. * copies of the Software, and to permit persons to whom the Software is
  9. * furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  17. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  18. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  19. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  20. * THE SOFTWARE.
  21. *
  22. *===-----------------------------------------------------------------------===
  23. */
  24. #ifndef __IMMINTRIN_H
  25. #error "Never use <avx512vlvbmi2intrin.h> directly; include <immintrin.h> instead."
  26. #endif
  27. #ifndef __AVX512VLVBMI2INTRIN_H
  28. #define __AVX512VLVBMI2INTRIN_H
  29. /* Define the default attributes for the functions in this file. */
  30. #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__, __target__("avx512vl,avx512vbmi2")))
  31. static __inline __m128i __DEFAULT_FN_ATTRS
  32. _mm128_setzero_hi(void) {
  33. return (__m128i)(__v8hi){ 0, 0, 0, 0, 0, 0, 0, 0 };
  34. }
  35. static __inline__ __m128i __DEFAULT_FN_ATTRS
  36. _mm128_mask_compress_epi16(__m128i __S, __mmask8 __U, __m128i __D)
  37. {
  38. return (__m128i) __builtin_ia32_compresshi128_mask ((__v8hi) __D,
  39. (__v8hi) __S,
  40. __U);
  41. }
  42. static __inline__ __m128i __DEFAULT_FN_ATTRS
  43. _mm128_maskz_compress_epi16(__mmask8 __U, __m128i __D)
  44. {
  45. return (__m128i) __builtin_ia32_compresshi128_mask ((__v8hi) __D,
  46. (__v8hi) _mm128_setzero_hi(),
  47. __U);
  48. }
  49. static __inline__ __m128i __DEFAULT_FN_ATTRS
  50. _mm128_mask_compress_epi8(__m128i __S, __mmask16 __U, __m128i __D)
  51. {
  52. return (__m128i) __builtin_ia32_compressqi128_mask ((__v16qi) __D,
  53. (__v16qi) __S,
  54. __U);
  55. }
  56. static __inline__ __m128i __DEFAULT_FN_ATTRS
  57. _mm128_maskz_compress_epi8(__mmask16 __U, __m128i __D)
  58. {
  59. return (__m128i) __builtin_ia32_compressqi128_mask ((__v16qi) __D,
  60. (__v16qi) _mm128_setzero_hi(),
  61. __U);
  62. }
  63. static __inline__ void __DEFAULT_FN_ATTRS
  64. _mm128_mask_compressstoreu_epi16(void *__P, __mmask8 __U, __m128i __D)
  65. {
  66. __builtin_ia32_compressstorehi128_mask ((__v8hi *) __P, (__v8hi) __D,
  67. __U);
  68. }
  69. static __inline__ void __DEFAULT_FN_ATTRS
  70. _mm128_mask_compressstoreu_epi8(void *__P, __mmask16 __U, __m128i __D)
  71. {
  72. __builtin_ia32_compressstoreqi128_mask ((__v16qi *) __P, (__v16qi) __D,
  73. __U);
  74. }
  75. static __inline__ __m128i __DEFAULT_FN_ATTRS
  76. _mm128_mask_expand_epi16(__m128i __S, __mmask8 __U, __m128i __D)
  77. {
  78. return (__m128i) __builtin_ia32_expandhi128_mask ((__v8hi) __D,
  79. (__v8hi) __S,
  80. __U);
  81. }
  82. static __inline__ __m128i __DEFAULT_FN_ATTRS
  83. _mm128_maskz_expand_epi16(__mmask8 __U, __m128i __D)
  84. {
  85. return (__m128i) __builtin_ia32_expandhi128_mask ((__v8hi) __D,
  86. (__v8hi) _mm128_setzero_hi(),
  87. __U);
  88. }
  89. static __inline__ __m128i __DEFAULT_FN_ATTRS
  90. _mm128_mask_expand_epi8(__m128i __S, __mmask16 __U, __m128i __D)
  91. {
  92. return (__m128i) __builtin_ia32_expandqi128_mask ((__v16qi) __D,
  93. (__v16qi) __S,
  94. __U);
  95. }
  96. static __inline__ __m128i __DEFAULT_FN_ATTRS
  97. _mm128_maskz_expand_epi8(__mmask16 __U, __m128i __D)
  98. {
  99. return (__m128i) __builtin_ia32_expandqi128_mask ((__v16qi) __D,
  100. (__v16qi) _mm128_setzero_hi(),
  101. __U);
  102. }
  103. static __inline__ __m128i __DEFAULT_FN_ATTRS
  104. _mm128_mask_expandloadu_epi16(__m128i __S, __mmask8 __U, void const *__P)
  105. {
  106. return (__m128i) __builtin_ia32_expandloadhi128_mask ((const __v8hi *)__P,
  107. (__v8hi) __S,
  108. __U);
  109. }
  110. static __inline__ __m128i __DEFAULT_FN_ATTRS
  111. _mm128_maskz_expandloadu_epi16(__mmask8 __U, void const *__P)
  112. {
  113. return (__m128i) __builtin_ia32_expandloadhi128_mask ((const __v8hi *)__P,
  114. (__v8hi) _mm128_setzero_hi(),
  115. __U);
  116. }
  117. static __inline__ __m128i __DEFAULT_FN_ATTRS
  118. _mm128_mask_expandloadu_epi8(__m128i __S, __mmask16 __U, void const *__P)
  119. {
  120. return (__m128i) __builtin_ia32_expandloadqi128_mask ((const __v16qi *)__P,
  121. (__v16qi) __S,
  122. __U);
  123. }
  124. static __inline__ __m128i __DEFAULT_FN_ATTRS
  125. _mm128_maskz_expandloadu_epi8(__mmask16 __U, void const *__P)
  126. {
  127. return (__m128i) __builtin_ia32_expandloadqi128_mask ((const __v16qi *)__P,
  128. (__v16qi) _mm128_setzero_hi(),
  129. __U);
  130. }
  131. static __inline __m256i __DEFAULT_FN_ATTRS
  132. _mm256_setzero_hi(void) {
  133. return (__m256i)(__v16hi){ 0, 0, 0, 0, 0, 0, 0, 0,
  134. 0, 0, 0, 0, 0, 0, 0, 0 };
  135. }
  136. static __inline__ __m256i __DEFAULT_FN_ATTRS
  137. _mm256_mask_compress_epi16(__m256i __S, __mmask16 __U, __m256i __D)
  138. {
  139. return (__m256i) __builtin_ia32_compresshi256_mask ((__v16hi) __D,
  140. (__v16hi) __S,
  141. __U);
  142. }
  143. static __inline__ __m256i __DEFAULT_FN_ATTRS
  144. _mm256_maskz_compress_epi16(__mmask16 __U, __m256i __D)
  145. {
  146. return (__m256i) __builtin_ia32_compresshi256_mask ((__v16hi) __D,
  147. (__v16hi) _mm256_setzero_hi(),
  148. __U);
  149. }
  150. static __inline__ __m256i __DEFAULT_FN_ATTRS
  151. _mm256_mask_compress_epi8(__m256i __S, __mmask32 __U, __m256i __D)
  152. {
  153. return (__m256i) __builtin_ia32_compressqi256_mask ((__v32qi) __D,
  154. (__v32qi) __S,
  155. __U);
  156. }
  157. static __inline__ __m256i __DEFAULT_FN_ATTRS
  158. _mm256_maskz_compress_epi8(__mmask32 __U, __m256i __D)
  159. {
  160. return (__m256i) __builtin_ia32_compressqi256_mask ((__v32qi) __D,
  161. (__v32qi) _mm256_setzero_hi(),
  162. __U);
  163. }
  164. static __inline__ void __DEFAULT_FN_ATTRS
  165. _mm256_mask_compressstoreu_epi16(void *__P, __mmask16 __U, __m256i __D)
  166. {
  167. __builtin_ia32_compressstorehi256_mask ((__v16hi *) __P, (__v16hi) __D,
  168. __U);
  169. }
  170. static __inline__ void __DEFAULT_FN_ATTRS
  171. _mm256_mask_compressstoreu_epi8(void *__P, __mmask32 __U, __m256i __D)
  172. {
  173. __builtin_ia32_compressstoreqi256_mask ((__v32qi *) __P, (__v32qi) __D,
  174. __U);
  175. }
  176. static __inline__ __m256i __DEFAULT_FN_ATTRS
  177. _mm256_mask_expand_epi16(__m256i __S, __mmask16 __U, __m256i __D)
  178. {
  179. return (__m256i) __builtin_ia32_expandhi256_mask ((__v16hi) __D,
  180. (__v16hi) __S,
  181. __U);
  182. }
  183. static __inline__ __m256i __DEFAULT_FN_ATTRS
  184. _mm256_maskz_expand_epi16(__mmask16 __U, __m256i __D)
  185. {
  186. return (__m256i) __builtin_ia32_expandhi256_mask ((__v16hi) __D,
  187. (__v16hi) _mm256_setzero_hi(),
  188. __U);
  189. }
  190. static __inline__ __m256i __DEFAULT_FN_ATTRS
  191. _mm256_mask_expand_epi8(__m256i __S, __mmask32 __U, __m256i __D)
  192. {
  193. return (__m256i) __builtin_ia32_expandqi256_mask ((__v32qi) __D,
  194. (__v32qi) __S,
  195. __U);
  196. }
  197. static __inline__ __m256i __DEFAULT_FN_ATTRS
  198. _mm256_maskz_expand_epi8(__mmask32 __U, __m256i __D)
  199. {
  200. return (__m256i) __builtin_ia32_expandqi256_mask ((__v32qi) __D,
  201. (__v32qi) _mm256_setzero_hi(),
  202. __U);
  203. }
  204. static __inline__ __m256i __DEFAULT_FN_ATTRS
  205. _mm256_mask_expandloadu_epi16(__m256i __S, __mmask16 __U, void const *__P)
  206. {
  207. return (__m256i) __builtin_ia32_expandloadhi256_mask ((const __v16hi *)__P,
  208. (__v16hi) __S,
  209. __U);
  210. }
  211. static __inline__ __m256i __DEFAULT_FN_ATTRS
  212. _mm256_maskz_expandloadu_epi16(__mmask16 __U, void const *__P)
  213. {
  214. return (__m256i) __builtin_ia32_expandloadhi256_mask ((const __v16hi *)__P,
  215. (__v16hi) _mm256_setzero_hi(),
  216. __U);
  217. }
  218. static __inline__ __m256i __DEFAULT_FN_ATTRS
  219. _mm256_mask_expandloadu_epi8(__m256i __S, __mmask32 __U, void const *__P)
  220. {
  221. return (__m256i) __builtin_ia32_expandloadqi256_mask ((const __v32qi *)__P,
  222. (__v32qi) __S,
  223. __U);
  224. }
  225. static __inline__ __m256i __DEFAULT_FN_ATTRS
  226. _mm256_maskz_expandloadu_epi8(__mmask32 __U, void const *__P)
  227. {
  228. return (__m256i) __builtin_ia32_expandloadqi256_mask ((const __v32qi *)__P,
  229. (__v32qi) _mm256_setzero_hi(),
  230. __U);
  231. }
  232. #define _mm256_mask_shldi_epi64(S, U, A, B, I) __extension__ ({ \
  233. (__m256i)__builtin_ia32_vpshldq256_mask((__v4di)(A), \
  234. (__v4di)(B), \
  235. (int)(I), \
  236. (__v4di)(S), \
  237. (__mmask8)(U)); })
  238. #define _mm256_maskz_shldi_epi64(U, A, B, I) \
  239. _mm256_mask_shldi_epi64(_mm256_setzero_hi(), (U), (A), (B), (I))
  240. #define _mm256_shldi_epi64(A, B, I) \
  241. _mm256_mask_shldi_epi64(_mm256_undefined_si256(), (__mmask8)(-1), (A), (B), (I))
  242. #define _mm128_mask_shldi_epi64(S, U, A, B, I) __extension__ ({ \
  243. (__m128i)__builtin_ia32_vpshldq128_mask((__v2di)(A), \
  244. (__v2di)(B), \
  245. (int)(I), \
  246. (__v2di)(S), \
  247. (__mmask8)(U)); })
  248. #define _mm128_maskz_shldi_epi64(U, A, B, I) \
  249. _mm128_mask_shldi_epi64(_mm128_setzero_hi(), (U), (A), (B), (I))
  250. #define _mm128_shldi_epi64(A, B, I) \
  251. _mm128_mask_shldi_epi64(_mm_undefined_si128(), (__mmask8)(-1), (A), (B), (I))
  252. #define _mm256_mask_shldi_epi32(S, U, A, B, I) __extension__ ({ \
  253. (__m256i)__builtin_ia32_vpshldd256_mask((__v8si)(A), \
  254. (__v8si)(B), \
  255. (int)(I), \
  256. (__v8si)(S), \
  257. (__mmask8)(U)); })
  258. #define _mm256_maskz_shldi_epi32(U, A, B, I) \
  259. _mm256_mask_shldi_epi32(_mm256_setzero_hi(), (U), (A), (B), (I))
  260. #define _mm256_shldi_epi32(A, B, I) \
  261. _mm256_mask_shldi_epi32(_mm256_undefined_si256(), (__mmask8)(-1), (A), (B), (I))
  262. #define _mm128_mask_shldi_epi32(S, U, A, B, I) __extension__ ({ \
  263. (__m128i)__builtin_ia32_vpshldd128_mask((__v4si)(A), \
  264. (__v4si)(B), \
  265. (int)(I), \
  266. (__v4si)(S), \
  267. (__mmask8)(U)); })
  268. #define _mm128_maskz_shldi_epi32(U, A, B, I) \
  269. _mm128_mask_shldi_epi32(_mm128_setzero_hi(), (U), (A), (B), (I))
  270. #define _mm128_shldi_epi32(A, B, I) \
  271. _mm128_mask_shldi_epi32(_mm_undefined_si128(), (__mmask8)(-1), (A), (B), (I))
  272. #define _mm256_mask_shldi_epi16(S, U, A, B, I) __extension__ ({ \
  273. (__m256i)__builtin_ia32_vpshldw256_mask((__v16hi)(A), \
  274. (__v16hi)(B), \
  275. (int)(I), \
  276. (__v16hi)(S), \
  277. (__mmask16)(U)); })
  278. #define _mm256_maskz_shldi_epi16(U, A, B, I) \
  279. _mm256_mask_shldi_epi16(_mm256_setzero_hi(), (U), (A), (B), (I))
  280. #define _mm256_shldi_epi16(A, B, I) \
  281. _mm256_mask_shldi_epi16(_mm256_undefined_si256(), (__mmask8)(-1), (A), (B), (I))
  282. #define _mm128_mask_shldi_epi16(S, U, A, B, I) __extension__ ({ \
  283. (__m128i)__builtin_ia32_vpshldw128_mask((__v8hi)(A), \
  284. (__v8hi)(B), \
  285. (int)(I), \
  286. (__v8hi)(S), \
  287. (__mmask8)(U)); })
  288. #define _mm128_maskz_shldi_epi16(U, A, B, I) \
  289. _mm128_mask_shldi_epi16(_mm128_setzero_hi(), (U), (A), (B), (I))
  290. #define _mm128_shldi_epi16(A, B, I) \
  291. _mm128_mask_shldi_epi16(_mm_undefined_si128(), (__mmask8)(-1), (A), (B), (I))
  292. #define _mm256_mask_shrdi_epi64(S, U, A, B, I) __extension__ ({ \
  293. (__m256i)__builtin_ia32_vpshrdq256_mask((__v4di)(A), \
  294. (__v4di)(B), \
  295. (int)(I), \
  296. (__v4di)(S), \
  297. (__mmask8)(U)); })
  298. #define _mm256_maskz_shrdi_epi64(U, A, B, I) \
  299. _mm256_mask_shrdi_epi64(_mm256_setzero_hi(), (U), (A), (B), (I))
  300. #define _mm256_shrdi_epi64(A, B, I) \
  301. _mm256_mask_shrdi_epi64(_mm256_undefined_si256(), (__mmask8)(-1), (A), (B), (I))
  302. #define _mm128_mask_shrdi_epi64(S, U, A, B, I) __extension__ ({ \
  303. (__m128i)__builtin_ia32_vpshrdq128_mask((__v2di)(A), \
  304. (__v2di)(B), \
  305. (int)(I), \
  306. (__v2di)(S), \
  307. (__mmask8)(U)); })
  308. #define _mm128_maskz_shrdi_epi64(U, A, B, I) \
  309. _mm128_mask_shrdi_epi64(_mm128_setzero_hi(), (U), (A), (B), (I))
  310. #define _mm128_shrdi_epi64(A, B, I) \
  311. _mm128_mask_shrdi_epi64(_mm_undefined_si128(), (__mmask8)(-1), (A), (B), (I))
  312. #define _mm256_mask_shrdi_epi32(S, U, A, B, I) __extension__ ({ \
  313. (__m256i)__builtin_ia32_vpshrdd256_mask((__v8si)(A), \
  314. (__v8si)(B), \
  315. (int)(I), \
  316. (__v8si)(S), \
  317. (__mmask8)(U)); })
  318. #define _mm256_maskz_shrdi_epi32(U, A, B, I) \
  319. _mm256_mask_shrdi_epi32(_mm256_setzero_hi(), (U), (A), (B), (I))
  320. #define _mm256_shrdi_epi32(A, B, I) \
  321. _mm256_mask_shrdi_epi32(_mm256_undefined_si256(), (__mmask8)(-1), (A), (B), (I))
  322. #define _mm128_mask_shrdi_epi32(S, U, A, B, I) __extension__ ({ \
  323. (__m128i)__builtin_ia32_vpshrdd128_mask((__v4si)(A), \
  324. (__v4si)(B), \
  325. (int)(I), \
  326. (__v4si)(S), \
  327. (__mmask8)(U)); })
  328. #define _mm128_maskz_shrdi_epi32(U, A, B, I) \
  329. _mm128_mask_shrdi_epi32(_mm128_setzero_hi(), (U), (A), (B), (I))
  330. #define _mm128_shrdi_epi32(A, B, I) \
  331. _mm128_mask_shrdi_epi32(_mm_undefined_si128(), (__mmask8)(-1), (A), (B), (I))
  332. #define _mm256_mask_shrdi_epi16(S, U, A, B, I) __extension__ ({ \
  333. (__m256i)__builtin_ia32_vpshrdw256_mask((__v16hi)(A), \
  334. (__v16hi)(B), \
  335. (int)(I), \
  336. (__v16hi)(S), \
  337. (__mmask16)(U)); })
  338. #define _mm256_maskz_shrdi_epi16(U, A, B, I) \
  339. _mm256_mask_shrdi_epi16(_mm256_setzero_hi(), (U), (A), (B), (I))
  340. #define _mm256_shrdi_epi16(A, B, I) \
  341. _mm256_mask_shrdi_epi16(_mm256_undefined_si256(), (__mmask8)(-1), (A), (B), (I))
  342. #define _mm128_mask_shrdi_epi16(S, U, A, B, I) __extension__ ({ \
  343. (__m128i)__builtin_ia32_vpshrdw128_mask((__v8hi)(A), \
  344. (__v8hi)(B), \
  345. (int)(I), \
  346. (__v8hi)(S), \
  347. (__mmask8)(U)); })
  348. #define _mm128_maskz_shrdi_epi16(U, A, B, I) \
  349. _mm128_mask_shrdi_epi16(_mm128_setzero_hi(), (U), (A), (B), (I))
  350. #define _mm128_shrdi_epi16(A, B, I) \
  351. _mm128_mask_shrdi_epi16(_mm_undefined_si128(), (__mmask8)(-1), (A), (B), (I))
  352. static __inline__ __m256i __DEFAULT_FN_ATTRS
  353. _mm256_mask_shldv_epi64(__m256i __S, __mmask8 __U, __m256i __A, __m256i __B)
  354. {
  355. return (__m256i) __builtin_ia32_vpshldvq256_mask ((__v4di) __S,
  356. (__v4di) __A,
  357. (__v4di) __B,
  358. __U);
  359. }
  360. static __inline__ __m256i __DEFAULT_FN_ATTRS
  361. _mm256_maskz_shldv_epi64(__mmask8 __U, __m256i __S, __m256i __A, __m256i __B)
  362. {
  363. return (__m256i) __builtin_ia32_vpshldvq256_maskz ((__v4di) __S,
  364. (__v4di) __A,
  365. (__v4di) __B,
  366. __U);
  367. }
  368. static __inline__ __m256i __DEFAULT_FN_ATTRS
  369. _mm256_shldv_epi64(__m256i __S, __m256i __A, __m256i __B)
  370. {
  371. return (__m256i) __builtin_ia32_vpshldvq256_mask ((__v4di) __S,
  372. (__v4di) __A,
  373. (__v4di) __B,
  374. (__mmask8) -1);
  375. }
  376. static __inline__ __m128i __DEFAULT_FN_ATTRS
  377. _mm128_mask_shldv_epi64(__m128i __S, __mmask8 __U, __m128i __A, __m128i __B)
  378. {
  379. return (__m128i) __builtin_ia32_vpshldvq128_mask ((__v2di) __S,
  380. (__v2di) __A,
  381. (__v2di) __B,
  382. __U);
  383. }
  384. static __inline__ __m128i __DEFAULT_FN_ATTRS
  385. _mm128_maskz_shldv_epi64(__mmask8 __U, __m128i __S, __m128i __A, __m128i __B)
  386. {
  387. return (__m128i) __builtin_ia32_vpshldvq128_maskz ((__v2di) __S,
  388. (__v2di) __A,
  389. (__v2di) __B,
  390. __U);
  391. }
  392. static __inline__ __m128i __DEFAULT_FN_ATTRS
  393. _mm128_shldv_epi64(__m128i __S, __m128i __A, __m128i __B)
  394. {
  395. return (__m128i) __builtin_ia32_vpshldvq128_mask ((__v2di) __S,
  396. (__v2di) __A,
  397. (__v2di) __B,
  398. (__mmask8) -1);
  399. }
  400. static __inline__ __m256i __DEFAULT_FN_ATTRS
  401. _mm256_mask_shldv_epi32(__m256i __S, __mmask8 __U, __m256i __A, __m256i __B)
  402. {
  403. return (__m256i) __builtin_ia32_vpshldvd256_mask ((__v8si) __S,
  404. (__v8si) __A,
  405. (__v8si) __B,
  406. __U);
  407. }
  408. static __inline__ __m256i __DEFAULT_FN_ATTRS
  409. _mm256_maskz_shldv_epi32(__mmask8 __U, __m256i __S, __m256i __A, __m256i __B)
  410. {
  411. return (__m256i) __builtin_ia32_vpshldvd256_maskz ((__v8si) __S,
  412. (__v8si) __A,
  413. (__v8si) __B,
  414. __U);
  415. }
  416. static __inline__ __m256i __DEFAULT_FN_ATTRS
  417. _mm256_shldv_epi32(__m256i __S, __m256i __A, __m256i __B)
  418. {
  419. return (__m256i) __builtin_ia32_vpshldvd256_mask ((__v8si) __S,
  420. (__v8si) __A,
  421. (__v8si) __B,
  422. (__mmask8) -1);
  423. }
  424. static __inline__ __m128i __DEFAULT_FN_ATTRS
  425. _mm128_mask_shldv_epi32(__m128i __S, __mmask8 __U, __m128i __A, __m128i __B)
  426. {
  427. return (__m128i) __builtin_ia32_vpshldvd128_mask ((__v4si) __S,
  428. (__v4si) __A,
  429. (__v4si) __B,
  430. __U);
  431. }
  432. static __inline__ __m128i __DEFAULT_FN_ATTRS
  433. _mm128_maskz_shldv_epi32(__mmask8 __U, __m128i __S, __m128i __A, __m128i __B)
  434. {
  435. return (__m128i) __builtin_ia32_vpshldvd128_maskz ((__v4si) __S,
  436. (__v4si) __A,
  437. (__v4si) __B,
  438. __U);
  439. }
  440. static __inline__ __m128i __DEFAULT_FN_ATTRS
  441. _mm128_shldv_epi32(__m128i __S, __m128i __A, __m128i __B)
  442. {
  443. return (__m128i) __builtin_ia32_vpshldvd128_mask ((__v4si) __S,
  444. (__v4si) __A,
  445. (__v4si) __B,
  446. (__mmask8) -1);
  447. }
  448. static __inline__ __m256i __DEFAULT_FN_ATTRS
  449. _mm256_mask_shldv_epi16(__m256i __S, __mmask16 __U, __m256i __A, __m256i __B)
  450. {
  451. return (__m256i) __builtin_ia32_vpshldvw256_mask ((__v16hi) __S,
  452. (__v16hi) __A,
  453. (__v16hi) __B,
  454. __U);
  455. }
  456. static __inline__ __m256i __DEFAULT_FN_ATTRS
  457. _mm256_maskz_shldv_epi16(__mmask16 __U, __m256i __S, __m256i __A, __m256i __B)
  458. {
  459. return (__m256i) __builtin_ia32_vpshldvw256_maskz ((__v16hi) __S,
  460. (__v16hi) __A,
  461. (__v16hi) __B,
  462. __U);
  463. }
  464. static __inline__ __m256i __DEFAULT_FN_ATTRS
  465. _mm256_shldv_epi16(__m256i __S, __m256i __A, __m256i __B)
  466. {
  467. return (__m256i) __builtin_ia32_vpshldvw256_mask ((__v16hi) __S,
  468. (__v16hi) __A,
  469. (__v16hi) __B,
  470. (__mmask16) -1);
  471. }
  472. static __inline__ __m128i __DEFAULT_FN_ATTRS
  473. _mm128_mask_shldv_epi16(__m128i __S, __mmask8 __U, __m128i __A, __m128i __B)
  474. {
  475. return (__m128i) __builtin_ia32_vpshldvw128_mask ((__v8hi) __S,
  476. (__v8hi) __A,
  477. (__v8hi) __B,
  478. __U);
  479. }
  480. static __inline__ __m128i __DEFAULT_FN_ATTRS
  481. _mm128_maskz_shldv_epi16(__mmask8 __U, __m128i __S, __m128i __A, __m128i __B)
  482. {
  483. return (__m128i) __builtin_ia32_vpshldvw128_maskz ((__v8hi) __S,
  484. (__v8hi) __A,
  485. (__v8hi) __B,
  486. __U);
  487. }
  488. static __inline__ __m128i __DEFAULT_FN_ATTRS
  489. _mm128_shldv_epi16(__m128i __S, __m128i __A, __m128i __B)
  490. {
  491. return (__m128i) __builtin_ia32_vpshldvw128_mask ((__v8hi) __S,
  492. (__v8hi) __A,
  493. (__v8hi) __B,
  494. (__mmask8) -1);
  495. }
  496. static __inline__ __m256i __DEFAULT_FN_ATTRS
  497. _mm256_mask_shrdv_epi64(__m256i __S, __mmask8 __U, __m256i __A, __m256i __B)
  498. {
  499. return (__m256i) __builtin_ia32_vpshrdvq256_mask ((__v4di) __S,
  500. (__v4di) __A,
  501. (__v4di) __B,
  502. __U);
  503. }
  504. static __inline__ __m256i __DEFAULT_FN_ATTRS
  505. _mm256_maskz_shrdv_epi64(__mmask8 __U, __m256i __S, __m256i __A, __m256i __B)
  506. {
  507. return (__m256i) __builtin_ia32_vpshrdvq256_maskz ((__v4di) __S,
  508. (__v4di) __A,
  509. (__v4di) __B,
  510. __U);
  511. }
  512. static __inline__ __m256i __DEFAULT_FN_ATTRS
  513. _mm256_shrdv_epi64(__m256i __S, __m256i __A, __m256i __B)
  514. {
  515. return (__m256i) __builtin_ia32_vpshrdvq256_mask ((__v4di) __S,
  516. (__v4di) __A,
  517. (__v4di) __B,
  518. (__mmask8) -1);
  519. }
  520. static __inline__ __m128i __DEFAULT_FN_ATTRS
  521. _mm128_mask_shrdv_epi64(__m128i __S, __mmask8 __U, __m128i __A, __m128i __B)
  522. {
  523. return (__m128i) __builtin_ia32_vpshrdvq128_mask ((__v2di) __S,
  524. (__v2di) __A,
  525. (__v2di) __B,
  526. __U);
  527. }
  528. static __inline__ __m128i __DEFAULT_FN_ATTRS
  529. _mm128_maskz_shrdv_epi64(__mmask8 __U, __m128i __S, __m128i __A, __m128i __B)
  530. {
  531. return (__m128i) __builtin_ia32_vpshrdvq128_maskz ((__v2di) __S,
  532. (__v2di) __A,
  533. (__v2di) __B,
  534. __U);
  535. }
  536. static __inline__ __m128i __DEFAULT_FN_ATTRS
  537. _mm128_shrdv_epi64(__m128i __S, __m128i __A, __m128i __B)
  538. {
  539. return (__m128i) __builtin_ia32_vpshrdvq128_mask ((__v2di) __S,
  540. (__v2di) __A,
  541. (__v2di) __B,
  542. (__mmask8) -1);
  543. }
  544. static __inline__ __m256i __DEFAULT_FN_ATTRS
  545. _mm256_mask_shrdv_epi32(__m256i __S, __mmask8 __U, __m256i __A, __m256i __B)
  546. {
  547. return (__m256i) __builtin_ia32_vpshrdvd256_mask ((__v8si) __S,
  548. (__v8si) __A,
  549. (__v8si) __B,
  550. __U);
  551. }
  552. static __inline__ __m256i __DEFAULT_FN_ATTRS
  553. _mm256_maskz_shrdv_epi32(__mmask8 __U, __m256i __S, __m256i __A, __m256i __B)
  554. {
  555. return (__m256i) __builtin_ia32_vpshrdvd256_maskz ((__v8si) __S,
  556. (__v8si) __A,
  557. (__v8si) __B,
  558. __U);
  559. }
  560. static __inline__ __m256i __DEFAULT_FN_ATTRS
  561. _mm256_shrdv_epi32(__m256i __S, __m256i __A, __m256i __B)
  562. {
  563. return (__m256i) __builtin_ia32_vpshrdvd256_mask ((__v8si) __S,
  564. (__v8si) __A,
  565. (__v8si) __B,
  566. (__mmask8) -1);
  567. }
  568. static __inline__ __m128i __DEFAULT_FN_ATTRS
  569. _mm128_mask_shrdv_epi32(__m128i __S, __mmask8 __U, __m128i __A, __m128i __B)
  570. {
  571. return (__m128i) __builtin_ia32_vpshrdvd128_mask ((__v4si) __S,
  572. (__v4si) __A,
  573. (__v4si) __B,
  574. __U);
  575. }
  576. static __inline__ __m128i __DEFAULT_FN_ATTRS
  577. _mm128_maskz_shrdv_epi32(__mmask8 __U, __m128i __S, __m128i __A, __m128i __B)
  578. {
  579. return (__m128i) __builtin_ia32_vpshrdvd128_maskz ((__v4si) __S,
  580. (__v4si) __A,
  581. (__v4si) __B,
  582. __U);
  583. }
  584. static __inline__ __m128i __DEFAULT_FN_ATTRS
  585. _mm128_shrdv_epi32(__m128i __S, __m128i __A, __m128i __B)
  586. {
  587. return (__m128i) __builtin_ia32_vpshrdvd128_mask ((__v4si) __S,
  588. (__v4si) __A,
  589. (__v4si) __B,
  590. (__mmask8) -1);
  591. }
  592. static __inline__ __m256i __DEFAULT_FN_ATTRS
  593. _mm256_mask_shrdv_epi16(__m256i __S, __mmask16 __U, __m256i __A, __m256i __B)
  594. {
  595. return (__m256i) __builtin_ia32_vpshrdvw256_mask ((__v16hi) __S,
  596. (__v16hi) __A,
  597. (__v16hi) __B,
  598. __U);
  599. }
  600. static __inline__ __m256i __DEFAULT_FN_ATTRS
  601. _mm256_maskz_shrdv_epi16(__mmask16 __U, __m256i __S, __m256i __A, __m256i __B)
  602. {
  603. return (__m256i) __builtin_ia32_vpshrdvw256_maskz ((__v16hi) __S,
  604. (__v16hi) __A,
  605. (__v16hi) __B,
  606. __U);
  607. }
  608. static __inline__ __m256i __DEFAULT_FN_ATTRS
  609. _mm256_shrdv_epi16(__m256i __S, __m256i __A, __m256i __B)
  610. {
  611. return (__m256i) __builtin_ia32_vpshrdvw256_mask ((__v16hi) __S,
  612. (__v16hi) __A,
  613. (__v16hi) __B,
  614. (__mmask16) -1);
  615. }
  616. static __inline__ __m128i __DEFAULT_FN_ATTRS
  617. _mm128_mask_shrdv_epi16(__m128i __S, __mmask8 __U, __m128i __A, __m128i __B)
  618. {
  619. return (__m128i) __builtin_ia32_vpshrdvw128_mask ((__v8hi) __S,
  620. (__v8hi) __A,
  621. (__v8hi) __B,
  622. __U);
  623. }
  624. static __inline__ __m128i __DEFAULT_FN_ATTRS
  625. _mm128_maskz_shrdv_epi16(__mmask8 __U, __m128i __S, __m128i __A, __m128i __B)
  626. {
  627. return (__m128i) __builtin_ia32_vpshrdvw128_maskz ((__v8hi) __S,
  628. (__v8hi) __A,
  629. (__v8hi) __B,
  630. __U);
  631. }
  632. static __inline__ __m128i __DEFAULT_FN_ATTRS
  633. _mm128_shrdv_epi16(__m128i __S, __m128i __A, __m128i __B)
  634. {
  635. return (__m128i) __builtin_ia32_vpshrdvw128_mask ((__v8hi) __S,
  636. (__v8hi) __A,
  637. (__v8hi) __B,
  638. (__mmask8) -1);
  639. }
  640. #undef __DEFAULT_FN_ATTRS
  641. #endif