This is the documentation for the Scripting APIs of this package.
Show / Hide Table of Contents
-
Unity.
Burst -
Unity.
Burst. Compiler Services -
Unity.
Burst. Intrinsics - Arm
-
Arm.
Neon - Properties
-
Methods
- __crc32b
- __crc32cb
- __crc32cd
- __crc32ch
- __crc32cw
- __crc32d
- __crc32h
- __crc32w
- vaba_s16
- vaba_s32
- vaba_s8
- vaba_u16
- vaba_u32
- vaba_u8
- vabal_high_s16
- vabal_high_s32
- vabal_high_s8
- vabal_high_u16
- vabal_high_u32
- vabal_high_u8
- vabal_s16
- vabal_s32
- vabal_s8
- vabal_u16
- vabal_u32
- vabal_u8
- vabaq_s16
- vabaq_s32
- vabaq_s8
- vabaq_u16
- vabaq_u32
- vabaq_u8
- vabd_f32
- vabd_f64
- vabd_s16
- vabd_s32
- vabd_s8
- vabd_u16
- vabd_u32
- vabd_u8
- vabdd_f64
- vabdl_high_s16
- vabdl_high_s32
- vabdl_high_s8
- vabdl_high_u16
- vabdl_high_u32
- vabdl_high_u8
- vabdl_s16
- vabdl_s32
- vabdl_s8
- vabdl_u16
- vabdl_u32
- vabdl_u8
- vabdq_f32
- vabdq_f64
- vabdq_s16
- vabdq_s32
- vabdq_s8
- vabdq_u16
- vabdq_u32
- vabdq_u8
- vabds_f32
- vabs_f32
- vabs_f64
- vabs_s16
- vabs_s32
- vabs_s64
- vabs_s8
- vabsd_s64
- vabsq_f32
- vabsq_f64
- vabsq_s16
- vabsq_s32
- vabsq_s64
- vabsq_s8
- vadd_f32
- vadd_f64
- vadd_s16
- vadd_s32
- vadd_s64
- vadd_s8
- vadd_u16
- vadd_u32
- vadd_u64
- vadd_u8
- vaddd_s64
- vaddd_u64
- vaddhn_high_s16
- vaddhn_high_s32
- vaddhn_high_s64
- vaddhn_high_u16
- vaddhn_high_u32
- vaddhn_high_u64
- vaddhn_s16
- vaddhn_s32
- vaddhn_s64
- vaddhn_u16
- vaddhn_u32
- vaddhn_u64
- vaddl_high_s16
- vaddl_high_s32
- vaddl_high_s8
- vaddl_high_u16
- vaddl_high_u32
- vaddl_high_u8
- vaddl_s16
- vaddl_s32
- vaddl_s8
- vaddl_u16
- vaddl_u32
- vaddl_u8
- vaddlv_s16
- vaddlv_s32
- vaddlv_s8
- vaddlv_u16
- vaddlv_u32
- vaddlv_u8
- vaddlvq_s16
- vaddlvq_s32
- vaddlvq_s8
- vaddlvq_u16
- vaddlvq_u32
- vaddlvq_u8
- vaddq_f32
- vaddq_f64
- vaddq_s16
- vaddq_s32
- vaddq_s64
- vaddq_s8
- vaddq_u16
- vaddq_u32
- vaddq_u64
- vaddq_u8
- vaddv_f32
- vaddv_s16
- vaddv_s32
- vaddv_s8
- vaddv_u16
- vaddv_u32
- vaddv_u8
- vaddvq_f32
- vaddvq_f64
- vaddvq_s16
- vaddvq_s32
- vaddvq_s64
- vaddvq_s8
- vaddvq_u16
- vaddvq_u32
- vaddvq_u64
- vaddvq_u8
- vaddw_high_s16
- vaddw_high_s32
- vaddw_high_s8
- vaddw_high_u16
- vaddw_high_u32
- vaddw_high_u8
- vaddw_s16
- vaddw_s32
- vaddw_s8
- vaddw_u16
- vaddw_u32
- vaddw_u8
- vaesdq_u8
- vaeseq_u8
- vaesimcq_u8
- vaesmcq_u8
- vand_s16
- vand_s32
- vand_s64
- vand_s8
- vand_u16
- vand_u32
- vand_u64
- vand_u8
- vandq_s16
- vandq_s32
- vandq_s64
- vandq_s8
- vandq_u16
- vandq_u32
- vandq_u64
- vandq_u8
- vbic_s16
- vbic_s32
- vbic_s64
- vbic_s8
- vbic_u16
- vbic_u32
- vbic_u64
- vbic_u8
- vbicq_s16
- vbicq_s32
- vbicq_s64
- vbicq_s8
- vbicq_u16
- vbicq_u32
- vbicq_u64
- vbicq_u8
- vbsl_f32
- vbsl_f64
- vbsl_s16
- vbsl_s32
- vbsl_s64
- vbsl_s8
- vbsl_u16
- vbsl_u32
- vbsl_u64
- vbsl_u8
- vbslq_f32
- vbslq_f64
- vbslq_s16
- vbslq_s32
- vbslq_s64
- vbslq_s8
- vbslq_u16
- vbslq_u32
- vbslq_u64
- vbslq_u8
- vcage_f32
- vcage_f64
- vcaged_f64
- vcageq_f32
- vcageq_f64
- vcages_f32
- vcagt_f32
- vcagt_f64
- vcagtd_f64
- vcagtq_f32
- vcagtq_f64
- vcagts_f32
- vcale_f32
- vcale_f64
- vcaled_f64
- vcaleq_f32
- vcaleq_f64
- vcales_f32
- vcalt_f32
- vcalt_f64
- vcaltd_f64
- vcaltq_f32
- vcaltq_f64
- vcalts_f32
- vceq_f32
- vceq_f64
- vceq_s16
- vceq_s32
- vceq_s64
- vceq_s8
- vceq_u16
- vceq_u32
- vceq_u64
- vceq_u8
- vceqd_f64
- vceqd_s64
- vceqd_u64
- vceqq_f32
- vceqq_f64
- vceqq_s16
- vceqq_s32
- vceqq_s64
- vceqq_s8
- vceqq_u16
- vceqq_u32
- vceqq_u64
- vceqq_u8
- vceqs_f32
- vceqz_f32
- vceqz_f64
- vceqz_s16
- vceqz_s32
- vceqz_s64
- vceqz_s8
- vceqz_u16
- vceqz_u32
- vceqz_u64
- vceqz_u8
- vceqzd_f64
- vceqzd_s64
- vceqzd_u64
- vceqzq_f32
- vceqzq_f64
- vceqzq_s16
- vceqzq_s32
- vceqzq_s64
- vceqzq_s8
- vceqzq_u16
- vceqzq_u32
- vceqzq_u64
- vceqzq_u8
- vceqzs_f32
- vcge_f32
- vcge_f64
- vcge_s16
- vcge_s32
- vcge_s64
- vcge_s8
- vcge_u16
- vcge_u32
- vcge_u64
- vcge_u8
- vcged_f64
- vcged_s64
- vcged_u64
- vcgeq_f32
- vcgeq_f64
- vcgeq_s16
- vcgeq_s32
- vcgeq_s64
- vcgeq_s8
- vcgeq_u16
- vcgeq_u32
- vcgeq_u64
- vcgeq_u8
- vcges_f32
- vcgez_f32
- vcgez_f64
- vcgez_s16
- vcgez_s32
- vcgez_s64
- vcgez_s8
- vcgezd_f64
- vcgezd_s64
- vcgezq_f32
- vcgezq_f64
- vcgezq_s16
- vcgezq_s32
- vcgezq_s64
- vcgezq_s8
- vcgezs_f32
- vcgt_f32
- vcgt_f64
- vcgt_s16
- vcgt_s32
- vcgt_s64
- vcgt_s8
- vcgt_u16
- vcgt_u32
- vcgt_u64
- vcgt_u8
- vcgtd_f64
- vcgtd_s64
- vcgtd_u64
- vcgtq_f32
- vcgtq_f64
- vcgtq_s16
- vcgtq_s32
- vcgtq_s64
- vcgtq_s8
- vcgtq_u16
- vcgtq_u32
- vcgtq_u64
- vcgtq_u8
- vcgts_f32
- vcgtz_f32
- vcgtz_f64
- vcgtz_s16
- vcgtz_s32
- vcgtz_s64
- vcgtz_s8
- vcgtzd_f64
- vcgtzd_s64
- vcgtzq_f32
- vcgtzq_f64
- vcgtzq_s16
- vcgtzq_s32
- vcgtzq_s64
- vcgtzq_s8
- vcgtzs_f32
- vcle_f32
- vcle_f64
- vcle_s16
- vcle_s32
- vcle_s64
- vcle_s8
- vcle_u16
- vcle_u32
- vcle_u64
- vcle_u8
- vcled_f64
- vcled_s64
- vcled_u64
- vcleq_f32
- vcleq_f64
- vcleq_s16
- vcleq_s32
- vcleq_s64
- vcleq_s8
- vcleq_u16
- vcleq_u32
- vcleq_u64
- vcleq_u8
- vcles_f32
- vclez_f32
- vclez_f64
- vclez_s16
- vclez_s32
- vclez_s64
- vclez_s8
- vclezd_f64
- vclezd_s64
- vclezq_f32
- vclezq_f64
- vclezq_s16
- vclezq_s32
- vclezq_s64
- vclezq_s8
- vclezs_f32
- vcls_s16
- vcls_s32
- vcls_s8
- vclsq_s16
- vclsq_s32
- vclsq_s8
- vclt_f32
- vclt_f64
- vclt_s16
- vclt_s32
- vclt_s64
- vclt_s8
- vclt_u16
- vclt_u32
- vclt_u64
- vclt_u8
- vcltd_f64
- vcltd_s64
- vcltd_u64
- vcltq_f32
- vcltq_f64
- vcltq_s16
- vcltq_s32
- vcltq_s64
- vcltq_s8
- vcltq_u16
- vcltq_u32
- vcltq_u64
- vcltq_u8
- vclts_f32
- vcltz_f32
- vcltz_f64
- vcltz_s16
- vcltz_s32
- vcltz_s64
- vcltz_s8
- vcltzd_f64
- vcltzd_s64
- vcltzq_f32
- vcltzq_f64
- vcltzq_s16
- vcltzq_s32
- vcltzq_s64
- vcltzq_s8
- vcltzs_f32
- vclz_s16
- vclz_s32
- vclz_s8
- vclz_u16
- vclz_u32
- vclz_u8
- vclzq_s16
- vclzq_s32
- vclzq_s8
- vclzq_u16
- vclzq_u32
- vclzq_u8
- vcnt_s8
- vcnt_u8
- vcntq_s8
- vcntq_u8
- vcombine_f16
- vcombine_f32
- vcombine_f64
- vcombine_s16
- vcombine_s32
- vcombine_s64
- vcombine_s8
- vcombine_u16
- vcombine_u32
- vcombine_u64
- vcombine_u8
- vcopy_lane_f32
- vcopy_lane_f64
- vcopy_lane_s16
- vcopy_lane_s32
- vcopy_lane_s64
- vcopy_lane_s8
- vcopy_lane_u16
- vcopy_lane_u32
- vcopy_lane_u64
- vcopy_lane_u8
- vcopy_laneq_f32
- vcopy_laneq_f64
- vcopy_laneq_s16
- vcopy_laneq_s32
- vcopy_laneq_s64
- vcopy_laneq_s8
- vcopy_laneq_u16
- vcopy_laneq_u32
- vcopy_laneq_u64
- vcopy_laneq_u8
- vcopyq_lane_f32
- vcopyq_lane_f64
- vcopyq_lane_s16
- vcopyq_lane_s32
- vcopyq_lane_s64
- vcopyq_lane_s8
- vcopyq_lane_u16
- vcopyq_lane_u32
- vcopyq_lane_u64
- vcopyq_lane_u8
- vcopyq_laneq_f32
- vcopyq_laneq_f64
- vcopyq_laneq_s16
- vcopyq_laneq_s32
- vcopyq_laneq_s64
- vcopyq_laneq_s8
- vcopyq_laneq_u16
- vcopyq_laneq_u32
- vcopyq_laneq_u64
- vcopyq_laneq_u8
- vcreate_f16
- vcreate_f32
- vcreate_f64
- vcreate_s16
- vcreate_s32
- vcreate_s64
- vcreate_s8
- vcreate_u16
- vcreate_u32
- vcreate_u64
- vcreate_u8
- vcvt_f32_f64
- vcvt_f32_s32
- vcvt_f32_u32
- vcvt_f64_f32
- vcvt_f64_s64
- vcvt_f64_u64
- vcvt_high_f32_f64
- vcvt_high_f64_f32
- vcvt_n_f32_s32
- vcvt_n_f32_u32
- vcvt_n_f64_s64
- vcvt_n_f64_u64
- vcvt_n_s32_f32
- vcvt_n_s64_f64
- vcvt_n_u32_f32
- vcvt_n_u64_f64
- vcvt_s32_f32
- vcvt_s64_f64
- vcvt_u32_f32
- vcvt_u64_f64
- vcvta_s32_f32
- vcvta_s64_f64
- vcvta_u32_f32
- vcvta_u64_f64
- vcvtad_s64_f64
- vcvtad_u64_f64
- vcvtaq_s32_f32
- vcvtaq_s64_f64
- vcvtaq_u32_f32
- vcvtaq_u64_f64
- vcvtas_s32_f32
- vcvtas_u32_f32
- vcvtd_f64_s64
- vcvtd_f64_u64
- vcvtd_n_f64_s64
- vcvtd_n_f64_u64
- vcvtd_n_s64_f64
- vcvtd_n_u64_f64
- vcvtd_s64_f64
- vcvtd_u64_f64
- vcvtm_s32_f32
- vcvtm_s64_f64
- vcvtm_u32_f32
- vcvtm_u64_f64
- vcvtmd_s64_f64
- vcvtmd_u64_f64
- vcvtmq_s32_f32
- vcvtmq_s64_f64
- vcvtmq_u32_f32
- vcvtmq_u64_f64
- vcvtms_s32_f32
- vcvtms_u32_f32
- vcvtn_s32_f32
- vcvtn_s64_f64
- vcvtn_u32_f32
- vcvtn_u64_f64
- vcvtnd_s64_f64
- vcvtnd_u64_f64
- vcvtnq_s32_f32
- vcvtnq_s64_f64
- vcvtnq_u32_f32
- vcvtnq_u64_f64
- vcvtns_s32_f32
- vcvtns_u32_f32
- vcvtp_s32_f32
- vcvtp_s64_f64
- vcvtp_u32_f32
- vcvtp_u64_f64
- vcvtpd_s64_f64
- vcvtpd_u64_f64
- vcvtpq_s32_f32
- vcvtpq_s64_f64
- vcvtpq_u32_f32
- vcvtpq_u64_f64
- vcvtps_s32_f32
- vcvtps_u32_f32
- vcvtq_f32_s32
- vcvtq_f32_u32
- vcvtq_f64_s64
- vcvtq_f64_u64
- vcvtq_n_f32_s32
- vcvtq_n_f32_u32
- vcvtq_n_f64_s64
- vcvtq_n_f64_u64
- vcvtq_n_s32_f32
- vcvtq_n_s64_f64
- vcvtq_n_u32_f32
- vcvtq_n_u64_f64
- vcvtq_s32_f32
- vcvtq_s64_f64
- vcvtq_u32_f32
- vcvtq_u64_f64
- vcvts_f32_s32
- vcvts_f32_u32
- vcvts_n_f32_s32
- vcvts_n_f32_u32
- vcvts_n_s32_f32
- vcvts_n_u32_f32
- vcvts_s32_f32
- vcvts_u32_f32
- vcvtx_f32_f64
- vcvtx_high_f32_f64
- vcvtxd_f32_f64
- vdiv_f32
- vdiv_f64
- vdivq_f32
- vdivq_f64
- vdot_lane_s32
- vdot_lane_u32
- vdot_laneq_s32
- vdot_laneq_u32
- vdot_s32
- vdot_u32
- vdotq_lane_s32
- vdotq_lane_u32
- vdotq_laneq_s32
- vdotq_laneq_u32
- vdotq_s32
- vdotq_u32
- vdup_lane_f32
- vdup_lane_f64
- vdup_lane_s16
- vdup_lane_s32
- vdup_lane_s64
- vdup_lane_s8
- vdup_lane_u16
- vdup_lane_u32
- vdup_lane_u64
- vdup_lane_u8
- vdup_laneq_f32
- vdup_laneq_f64
- vdup_laneq_s16
- vdup_laneq_s32
- vdup_laneq_s64
- vdup_laneq_s8
- vdup_laneq_u16
- vdup_laneq_u32
- vdup_laneq_u64
- vdup_laneq_u8
- vdup_n_f32
- vdup_n_f64
- vdup_n_s16
- vdup_n_s32
- vdup_n_s64
- vdup_n_s8
- vdup_n_u16
- vdup_n_u32
- vdup_n_u64
- vdup_n_u8
- vdupb_lane_s8
- vdupb_lane_u8
- vdupb_laneq_s8
- vdupb_laneq_u8
- vdupd_lane_f64
- vdupd_lane_s64
- vdupd_lane_u64
- vdupd_laneq_f64
- vdupd_laneq_s64
- vdupd_laneq_u64
- vduph_lane_s16
- vduph_lane_u16
- vduph_laneq_s16
- vduph_laneq_u16
- vdupq_lane_f32
- vdupq_lane_f64
- vdupq_lane_s16
- vdupq_lane_s32
- vdupq_lane_s64
- vdupq_lane_s8
- vdupq_lane_u16
- vdupq_lane_u32
- vdupq_lane_u64
- vdupq_lane_u8
- vdupq_laneq_f32
- vdupq_laneq_f64
- vdupq_laneq_s16
- vdupq_laneq_s32
- vdupq_laneq_s64
- vdupq_laneq_s8
- vdupq_laneq_u16
- vdupq_laneq_u32
- vdupq_laneq_u64
- vdupq_laneq_u8
- vdupq_n_f32
- vdupq_n_f64
- vdupq_n_s16
- vdupq_n_s32
- vdupq_n_s64
- vdupq_n_s8
- vdupq_n_u16
- vdupq_n_u32
- vdupq_n_u64
- vdupq_n_u8
- vdups_lane_f32
- vdups_lane_s32
- vdups_lane_u32
- vdups_laneq_f32
- vdups_laneq_s32
- vdups_laneq_u32
- veor_s16
- veor_s32
- veor_s64
- veor_s8
- veor_u16
- veor_u32
- veor_u64
- veor_u8
- veorq_s16
- veorq_s32
- veorq_s64
- veorq_s8
- veorq_u16
- veorq_u32
- veorq_u64
- veorq_u8
- vext_f32
- vext_f64
- vext_s16
- vext_s32
- vext_s64
- vext_s8
- vext_u16
- vext_u32
- vext_u64
- vext_u8
- vextq_f32
- vextq_f64
- vextq_s16
- vextq_s32
- vextq_s64
- vextq_s8
- vextq_u16
- vextq_u32
- vextq_u64
- vextq_u8
- vfma_f32
- vfma_f64
- vfma_lane_f32
- vfma_lane_f64
- vfma_laneq_f32
- vfma_laneq_f64
- vfma_n_f32
- vfma_n_f64
- vfmad_lane_f64
- vfmad_laneq_f64
- vfmaq_f32
- vfmaq_f64
- vfmaq_lane_f32
- vfmaq_lane_f64
- vfmaq_laneq_f32
- vfmaq_laneq_f64
- vfmaq_n_f32
- vfmaq_n_f64
- vfmas_lane_f32
- vfmas_laneq_f32
- vfms_f32
- vfms_f64
- vfms_lane_f32
- vfms_lane_f64
- vfms_laneq_f32
- vfms_laneq_f64
- vfms_n_f32
- vfms_n_f64
- vfmsd_lane_f64
- vfmsd_laneq_f64
- vfmsq_f32
- vfmsq_f64
- vfmsq_lane_f32
- vfmsq_lane_f64
- vfmsq_laneq_f32
- vfmsq_laneq_f64
- vfmsq_n_f32
- vfmsq_n_f64
- vfmss_lane_f32
- vfmss_laneq_f32
- vget_high_f32
- vget_high_f64
- vget_high_s16
- vget_high_s32
- vget_high_s64
- vget_high_s8
- vget_high_u16
- vget_high_u32
- vget_high_u64
- vget_high_u8
- vget_lane_f32
- vget_lane_f64
- vget_lane_s16
- vget_lane_s32
- vget_lane_s64
- vget_lane_s8
- vget_lane_u16
- vget_lane_u32
- vget_lane_u64
- vget_lane_u8
- vget_low_f32
- vget_low_f64
- vget_low_s16
- vget_low_s32
- vget_low_s64
- vget_low_s8
- vget_low_u16
- vget_low_u32
- vget_low_u64
- vget_low_u8
- vgetq_lane_f32
- vgetq_lane_f64
- vgetq_lane_s16
- vgetq_lane_s32
- vgetq_lane_s64
- vgetq_lane_s8
- vgetq_lane_u16
- vgetq_lane_u32
- vgetq_lane_u64
- vgetq_lane_u8
- vhadd_s16
- vhadd_s32
- vhadd_s8
- vhadd_u16
- vhadd_u32
- vhadd_u8
- vhaddq_s16
- vhaddq_s32
- vhaddq_s8
- vhaddq_u16
- vhaddq_u32
- vhaddq_u8
- vhsub_s16
- vhsub_s32
- vhsub_s8
- vhsub_u16
- vhsub_u32
- vhsub_u8
- vhsubq_s16
- vhsubq_s32
- vhsubq_s8
- vhsubq_u16
- vhsubq_u32
- vhsubq_u8
- vld1_f32
- vld1_f64
- vld1_s16
- vld1_s32
- vld1_s64
- vld1_s8
- vld1_u16
- vld1_u32
- vld1_u64
- vld1_u8
- vld1q_f32
- vld1q_f64
- vld1q_s16
- vld1q_s32
- vld1q_s64
- vld1q_s8
- vld1q_u16
- vld1q_u32
- vld1q_u64
- vld1q_u8
- vmax_f32
- vmax_f64
- vmax_s16
- vmax_s32
- vmax_s8
- vmax_u16
- vmax_u32
- vmax_u8
- vmaxnm_f32
- vmaxnm_f64
- vmaxnmq_f32
- vmaxnmq_f64
- vmaxnmv_f32
- vmaxnmvq_f32
- vmaxnmvq_f64
- vmaxq_f32
- vmaxq_f64
- vmaxq_s16
- vmaxq_s32
- vmaxq_s8
- vmaxq_u16
- vmaxq_u32
- vmaxq_u8
- vmaxv_f32
- vmaxv_s16
- vmaxv_s32
- vmaxv_s8
- vmaxv_u16
- vmaxv_u32
- vmaxv_u8
- vmaxvq_f32
- vmaxvq_f64
- vmaxvq_s16
- vmaxvq_s32
- vmaxvq_s8
- vmaxvq_u16
- vmaxvq_u32
- vmaxvq_u8
- vmin_f32
- vmin_f64
- vmin_s16
- vmin_s32
- vmin_s8
- vmin_u16
- vmin_u32
- vmin_u8
- vminnm_f32
- vminnm_f64
- vminnmq_f32
- vminnmq_f64
- vminnmv_f32
- vminnmvq_f32
- vminnmvq_f64
- vminq_f32
- vminq_f64
- vminq_s16
- vminq_s32
- vminq_s8
- vminq_u16
- vminq_u32
- vminq_u8
- vminv_f32
- vminv_s16
- vminv_s32
- vminv_s8
- vminv_u16
- vminv_u32
- vminv_u8
- vminvq_f32
- vminvq_f64
- vminvq_s16
- vminvq_s32
- vminvq_s8
- vminvq_u16
- vminvq_u32
- vminvq_u8
- vmla_f32
- vmla_f64
- vmla_lane_f32
- vmla_lane_s16
- vmla_lane_s32
- vmla_lane_u16
- vmla_lane_u32
- vmla_laneq_f32
- vmla_laneq_s16
- vmla_laneq_s32
- vmla_laneq_u16
- vmla_laneq_u32
- vmla_n_f32
- vmla_n_s16
- vmla_n_s32
- vmla_n_u16
- vmla_n_u32
- vmla_s16
- vmla_s32
- vmla_s8
- vmla_u16
- vmla_u32
- vmla_u8
- vmlal_high_lane_s16
- vmlal_high_lane_s32
- vmlal_high_lane_u16
- vmlal_high_lane_u32
- vmlal_high_laneq_s16
- vmlal_high_laneq_s32
- vmlal_high_laneq_u16
- vmlal_high_laneq_u32
- vmlal_high_n_s16
- vmlal_high_n_s32
- vmlal_high_n_u16
- vmlal_high_n_u32
- vmlal_high_s16
- vmlal_high_s32
- vmlal_high_s8
- vmlal_high_u16
- vmlal_high_u32
- vmlal_high_u8
- vmlal_lane_s16
- vmlal_lane_s32
- vmlal_lane_u16
- vmlal_lane_u32
- vmlal_laneq_s16
- vmlal_laneq_s32
- vmlal_laneq_u16
- vmlal_laneq_u32
- vmlal_n_s16
- vmlal_n_s32
- vmlal_n_u16
- vmlal_n_u32
- vmlal_s16
- vmlal_s32
- vmlal_s8
- vmlal_u16
- vmlal_u32
- vmlal_u8
- vmlaq_f32
- vmlaq_f64
- vmlaq_lane_f32
- vmlaq_lane_s16
- vmlaq_lane_s32
- vmlaq_lane_u16
- vmlaq_lane_u32
- vmlaq_laneq_f32
- vmlaq_laneq_s16
- vmlaq_laneq_s32
- vmlaq_laneq_u16
- vmlaq_laneq_u32
- vmlaq_n_f32
- vmlaq_n_s16
- vmlaq_n_s32
- vmlaq_n_u16
- vmlaq_n_u32
- vmlaq_s16
- vmlaq_s32
- vmlaq_s8
- vmlaq_u16
- vmlaq_u32
- vmlaq_u8
- vmls_f32
- vmls_f64
- vmls_lane_f32
- vmls_lane_s16
- vmls_lane_s32
- vmls_lane_u16
- vmls_lane_u32
- vmls_laneq_f32
- vmls_laneq_s16
- vmls_laneq_s32
- vmls_laneq_u16
- vmls_laneq_u32
- vmls_n_f32
- vmls_n_s16
- vmls_n_s32
- vmls_n_u16
- vmls_n_u32
- vmls_s16
- vmls_s32
- vmls_s8
- vmls_u16
- vmls_u32
- vmls_u8
- vmlsl_high_lane_s16
- vmlsl_high_lane_s32
- vmlsl_high_lane_u16
- vmlsl_high_lane_u32
- vmlsl_high_laneq_s16
- vmlsl_high_laneq_s32
- vmlsl_high_laneq_u16
- vmlsl_high_laneq_u32
- vmlsl_high_n_s16
- vmlsl_high_n_s32
- vmlsl_high_n_u16
- vmlsl_high_n_u32
- vmlsl_high_s16
- vmlsl_high_s32
- vmlsl_high_s8
- vmlsl_high_u16
- vmlsl_high_u32
- vmlsl_high_u8
- vmlsl_lane_s16
- vmlsl_lane_s32
- vmlsl_lane_u16
- vmlsl_lane_u32
- vmlsl_laneq_s16
- vmlsl_laneq_s32
- vmlsl_laneq_u16
- vmlsl_laneq_u32
- vmlsl_n_s16
- vmlsl_n_s32
- vmlsl_n_u16
- vmlsl_n_u32
- vmlsl_s16
- vmlsl_s32
- vmlsl_s8
- vmlsl_u16
- vmlsl_u32
- vmlsl_u8
- vmlsq_f32
- vmlsq_f64
- vmlsq_lane_f32
- vmlsq_lane_s16
- vmlsq_lane_s32
- vmlsq_lane_u16
- vmlsq_lane_u32
- vmlsq_laneq_f32
- vmlsq_laneq_s16
- vmlsq_laneq_s32
- vmlsq_laneq_u16
- vmlsq_laneq_u32
- vmlsq_n_f32
- vmlsq_n_s16
- vmlsq_n_s32
- vmlsq_n_u16
- vmlsq_n_u32
- vmlsq_s16
- vmlsq_s32
- vmlsq_s8
- vmlsq_u16
- vmlsq_u32
- vmlsq_u8
- vmov_n_f32
- vmov_n_f64
- vmov_n_s16
- vmov_n_s32
- vmov_n_s64
- vmov_n_s8
- vmov_n_u16
- vmov_n_u32
- vmov_n_u64
- vmov_n_u8
- vmovl_high_s16
- vmovl_high_s32
- vmovl_high_s8
- vmovl_high_u16
- vmovl_high_u32
- vmovl_high_u8
- vmovl_s16
- vmovl_s32
- vmovl_s8
- vmovl_u16
- vmovl_u32
- vmovl_u8
- vmovn_high_s16
- vmovn_high_s32
- vmovn_high_s64
- vmovn_high_u16
- vmovn_high_u32
- vmovn_high_u64
- vmovn_s16
- vmovn_s32
- vmovn_s64
- vmovn_u16
- vmovn_u32
- vmovn_u64
- vmovq_n_f32
- vmovq_n_f64
- vmovq_n_s16
- vmovq_n_s32
- vmovq_n_s64
- vmovq_n_s8
- vmovq_n_u16
- vmovq_n_u32
- vmovq_n_u64
- vmovq_n_u8
- vmul_f32
- vmul_f64
- vmul_lane_f32
- vmul_lane_f64
- vmul_lane_s16
- vmul_lane_s32
- vmul_lane_u16
- vmul_lane_u32
- vmul_laneq_f32
- vmul_laneq_f64
- vmul_laneq_s16
- vmul_laneq_s32
- vmul_laneq_u16
- vmul_laneq_u32
- vmul_n_f32
- vmul_n_f64
- vmul_n_s16
- vmul_n_s32
- vmul_n_u16
- vmul_n_u32
- vmul_s16
- vmul_s32
- vmul_s8
- vmul_u16
- vmul_u32
- vmul_u8
- vmuld_lane_f64
- vmuld_laneq_f64
- vmull_high_lane_s16
- vmull_high_lane_s32
- vmull_high_lane_u16
- vmull_high_lane_u32
- vmull_high_laneq_s16
- vmull_high_laneq_s32
- vmull_high_laneq_u16
- vmull_high_laneq_u32
- vmull_high_n_s16
- vmull_high_n_s32
- vmull_high_n_u16
- vmull_high_n_u32
- vmull_high_s16
- vmull_high_s32
- vmull_high_s8
- vmull_high_u16
- vmull_high_u32
- vmull_high_u8
- vmull_lane_s16
- vmull_lane_s32
- vmull_lane_u16
- vmull_lane_u32
- vmull_laneq_s16
- vmull_laneq_s32
- vmull_laneq_u16
- vmull_laneq_u32
- vmull_n_s16
- vmull_n_s32
- vmull_n_u16
- vmull_n_u32
- vmull_s16
- vmull_s32
- vmull_s8
- vmull_u16
- vmull_u32
- vmull_u8
- vmulq_f32
- vmulq_f64
- vmulq_lane_f32
- vmulq_lane_f64
- vmulq_lane_s16
- vmulq_lane_s32
- vmulq_lane_u16
- vmulq_lane_u32
- vmulq_laneq_f32
- vmulq_laneq_f64
- vmulq_laneq_s16
- vmulq_laneq_s32
- vmulq_laneq_u16
- vmulq_laneq_u32
- vmulq_n_f32
- vmulq_n_f64
- vmulq_n_s16
- vmulq_n_s32
- vmulq_n_u16
- vmulq_n_u32
- vmulq_s16
- vmulq_s32
- vmulq_s8
- vmulq_u16
- vmulq_u32
- vmulq_u8
- vmuls_lane_f32
- vmuls_laneq_f32
- vmulx_f32
- vmulx_f64
- vmulx_lane_f32
- vmulx_lane_f64
- vmulx_laneq_f32
- vmulx_laneq_f64
- vmulxd_f64
- vmulxd_lane_f64
- vmulxd_laneq_f64
- vmulxq_f32
- vmulxq_f64
- vmulxq_lane_f32
- vmulxq_lane_f64
- vmulxq_laneq_f32
- vmulxq_laneq_f64
- vmulxs_f32
- vmulxs_lane_f32
- vmulxs_laneq_f32
- vmvn_s16
- vmvn_s32
- vmvn_s8
- vmvn_u16
- vmvn_u32
- vmvn_u8
- vmvnq_s16
- vmvnq_s32
- vmvnq_s8
- vmvnq_u16
- vmvnq_u32
- vmvnq_u8
- vneg_f32
- vneg_f64
- vneg_s16
- vneg_s32
- vneg_s64
- vneg_s8
- vnegd_s64
- vnegq_f32
- vnegq_f64
- vnegq_s16
- vnegq_s32
- vnegq_s64
- vnegq_s8
- vorn_s16
- vorn_s32
- vorn_s64
- vorn_s8
- vorn_u16
- vorn_u32
- vorn_u64
- vorn_u8
- vornq_s16
- vornq_s32
- vornq_s64
- vornq_s8
- vornq_u16
- vornq_u32
- vornq_u64
- vornq_u8
- vorr_s16
- vorr_s32
- vorr_s64
- vorr_s8
- vorr_u16
- vorr_u32
- vorr_u64
- vorr_u8
- vorrq_s16
- vorrq_s32
- vorrq_s64
- vorrq_s8
- vorrq_u16
- vorrq_u32
- vorrq_u64
- vorrq_u8
- vpadal_s16
- vpadal_s32
- vpadal_s8
- vpadal_u16
- vpadal_u32
- vpadal_u8
- vpadalq_s16
- vpadalq_s32
- vpadalq_s8
- vpadalq_u16
- vpadalq_u32
- vpadalq_u8
- vpadd_f32
- vpadd_s16
- vpadd_s32
- vpadd_s8
- vpadd_u16
- vpadd_u32
- vpadd_u8
- vpaddd_f64
- vpaddd_s64
- vpaddd_u64
- vpaddl_s16
- vpaddl_s32
- vpaddl_s8
- vpaddl_u16
- vpaddl_u32
- vpaddl_u8
- vpaddlq_s16
- vpaddlq_s32
- vpaddlq_s8
- vpaddlq_u16
- vpaddlq_u32
- vpaddlq_u8
- vpaddq_f32
- vpaddq_f64
- vpaddq_s16
- vpaddq_s32
- vpaddq_s64
- vpaddq_s8
- vpaddq_u16
- vpaddq_u32
- vpaddq_u64
- vpaddq_u8
- vpadds_f32
- vpmax_f32
- vpmax_s16
- vpmax_s32
- vpmax_s8
- vpmax_u16
- vpmax_u32
- vpmax_u8
- vpmaxnm_f32
- vpmaxnmq_f32
- vpmaxnmq_f64
- vpmaxnmqd_f64
- vpmaxnms_f32
- vpmaxq_f32
- vpmaxq_f64
- vpmaxq_s16
- vpmaxq_s32
- vpmaxq_s8
- vpmaxq_u16
- vpmaxq_u32
- vpmaxq_u8
- vpmaxqd_f64
- vpmaxs_f32
- vpmin_f32
- vpmin_s16
- vpmin_s32
- vpmin_s8
- vpmin_u16
- vpmin_u32
- vpmin_u8
- vpminnm_f32
- vpminnmq_f32
- vpminnmq_f64
- vpminnmqd_f64
- vpminnms_f32
- vpminq_f32
- vpminq_f64
- vpminq_s16
- vpminq_s32
- vpminq_s8
- vpminq_u16
- vpminq_u32
- vpminq_u8
- vpminqd_f64
- vpmins_f32
- vqabs_s16
- vqabs_s32
- vqabs_s64
- vqabs_s8
- vqabsb_s8
- vqabsd_s64
- vqabsh_s16
- vqabsq_s16
- vqabsq_s32
- vqabsq_s64
- vqabsq_s8
- vqabss_s32
- vqadd_s16
- vqadd_s32
- vqadd_s64
- vqadd_s8
- vqadd_u16
- vqadd_u32
- vqadd_u64
- vqadd_u8
- vqaddb_s8
- vqaddb_u8
- vqaddd_s64
- vqaddd_u64
- vqaddh_s16
- vqaddh_u16
- vqaddq_s16
- vqaddq_s32
- vqaddq_s64
- vqaddq_s8
- vqaddq_u16
- vqaddq_u32
- vqaddq_u64
- vqaddq_u8
- vqadds_s32
- vqadds_u32
- vqdmlal_high_lane_s16
- vqdmlal_high_lane_s32
- vqdmlal_high_laneq_s16
- vqdmlal_high_laneq_s32
- vqdmlal_high_n_s16
- vqdmlal_high_n_s32
- vqdmlal_high_s16
- vqdmlal_high_s32
- vqdmlal_lane_s16
- vqdmlal_lane_s32
- vqdmlal_laneq_s16
- vqdmlal_laneq_s32
- vqdmlal_n_s16
- vqdmlal_n_s32
- vqdmlal_s16
- vqdmlal_s32
- vqdmlalh_lane_s16
- vqdmlalh_laneq_s16
- vqdmlalh_s16
- vqdmlals_lane_s32
- vqdmlals_laneq_s32
- vqdmlals_s32
- vqdmlsl_high_lane_s16
- vqdmlsl_high_lane_s32
- vqdmlsl_high_laneq_s16
- vqdmlsl_high_laneq_s32
- vqdmlsl_high_n_s16
- vqdmlsl_high_n_s32
- vqdmlsl_high_s16
- vqdmlsl_high_s32
- vqdmlsl_lane_s16
- vqdmlsl_lane_s32
- vqdmlsl_laneq_s16
- vqdmlsl_laneq_s32
- vqdmlsl_n_s16
- vqdmlsl_n_s32
- vqdmlsl_s16
- vqdmlsl_s32
- vqdmlslh_lane_s16
- vqdmlslh_laneq_s16
- vqdmlslh_s16
- vqdmlsls_lane_s32
- vqdmlsls_laneq_s32
- vqdmlsls_s32
- vqdmulh_lane_s16
- vqdmulh_lane_s32
- vqdmulh_laneq_s16
- vqdmulh_laneq_s32
- vqdmulh_n_s16
- vqdmulh_n_s32
- vqdmulh_s16
- vqdmulh_s32
- vqdmulhh_lane_s16
- vqdmulhh_laneq_s16
- vqdmulhh_s16
- vqdmulhq_lane_s16
- vqdmulhq_lane_s32
- vqdmulhq_laneq_s16
- vqdmulhq_laneq_s32
- vqdmulhq_n_s16
- vqdmulhq_n_s32
- vqdmulhq_s16
- vqdmulhq_s32
- vqdmulhs_lane_s32
- vqdmulhs_laneq_s32
- vqdmulhs_s32
- vqdmull_high_lane_s16
- vqdmull_high_lane_s32
- vqdmull_high_laneq_s16
- vqdmull_high_laneq_s32
- vqdmull_high_n_s16
- vqdmull_high_n_s32
- vqdmull_high_s16
- vqdmull_high_s32
- vqdmull_lane_s16
- vqdmull_lane_s32
- vqdmull_laneq_s16
- vqdmull_laneq_s32
- vqdmull_n_s16
- vqdmull_n_s32
- vqdmull_s16
- vqdmull_s32
- vqdmullh_lane_s16
- vqdmullh_laneq_s16
- vqdmullh_s16
- vqdmulls_lane_s32
- vqdmulls_laneq_s32
- vqdmulls_s32
- vqmovn_high_s16
- vqmovn_high_s32
- vqmovn_high_s64
- vqmovn_high_u16
- vqmovn_high_u32
- vqmovn_high_u64
- vqmovn_s16
- vqmovn_s32
- vqmovn_s64
- vqmovn_u16
- vqmovn_u32
- vqmovn_u64
- vqmovnd_s64
- vqmovnd_u64
- vqmovnh_s16
- vqmovnh_u16
- vqmovns_s32
- vqmovns_u32
- vqmovun_high_s16
- vqmovun_high_s32
- vqmovun_high_s64
- vqmovun_s16
- vqmovun_s32
- vqmovun_s64
- vqmovund_s64
- vqmovunh_s16
- vqmovuns_s32
- vqneg_s16
- vqneg_s32
- vqneg_s64
- vqneg_s8
- vqnegb_s8
- vqnegd_s64
- vqnegh_s16
- vqnegq_s16
- vqnegq_s32
- vqnegq_s64
- vqnegq_s8
- vqnegs_s32
- vqrdmlah_lane_s16
- vqrdmlah_lane_s32
- vqrdmlah_laneq_s16
- vqrdmlah_laneq_s32
- vqrdmlah_s16
- vqrdmlah_s32
- vqrdmlahh_lane_s16
- vqrdmlahh_laneq_s16
- vqrdmlahh_s16
- vqrdmlahq_lane_s16
- vqrdmlahq_lane_s32
- vqrdmlahq_laneq_s16
- vqrdmlahq_laneq_s32
- vqrdmlahq_s16
- vqrdmlahq_s32
- vqrdmlahs_lane_s32
- vqrdmlahs_s32
- vqrdmlsh_lane_s16
- vqrdmlsh_lane_s32
- vqrdmlsh_laneq_s16
- vqrdmlsh_laneq_s32
- vqrdmlsh_s16
- vqrdmlsh_s32
- vqrdmlshh_lane_s16
- vqrdmlshh_laneq_s16
- vqrdmlshh_s16
- vqrdmlshq_lane_s16
- vqrdmlshq_lane_s32
- vqrdmlshq_laneq_s16
- vqrdmlshq_laneq_s32
- vqrdmlshq_s16
- vqrdmlshq_s32
- vqrdmlshs_lane_s32
- vqrdmlshs_s32
- vqrdmulh_lane_s16
- vqrdmulh_lane_s32
- vqrdmulh_laneq_s16
- vqrdmulh_laneq_s32
- vqrdmulh_n_s16
- vqrdmulh_n_s32
- vqrdmulh_s16
- vqrdmulh_s32
- vqrdmulhh_lane_s16
- vqrdmulhh_laneq_s16
- vqrdmulhh_s16
- vqrdmulhq_lane_s16
- vqrdmulhq_lane_s32
- vqrdmulhq_laneq_s16
- vqrdmulhq_laneq_s32
- vqrdmulhq_n_s16
- vqrdmulhq_n_s32
- vqrdmulhq_s16
- vqrdmulhq_s32
- vqrdmulhs_lane_s32
- vqrdmulhs_laneq_s32
- vqrdmulhs_s32
- vqrshl_s16
- vqrshl_s32
- vqrshl_s64
- vqrshl_s8
- vqrshl_u16
- vqrshl_u32
- vqrshl_u64
- vqrshl_u8
- vqrshlb_s8
- vqrshlb_u8
- vqrshld_s64
- vqrshld_u64
- vqrshlh_s16
- vqrshlh_u16
- vqrshlq_s16
- vqrshlq_s32
- vqrshlq_s64
- vqrshlq_s8
- vqrshlq_u16
- vqrshlq_u32
- vqrshlq_u64
- vqrshlq_u8
- vqrshls_s32
- vqrshls_u32
- vqrshrn_high_n_s16
- vqrshrn_high_n_s32
- vqrshrn_high_n_s64
- vqrshrn_high_n_u16
- vqrshrn_high_n_u32
- vqrshrn_high_n_u64
- vqrshrn_n_s16
- vqrshrn_n_s32
- vqrshrn_n_s64
- vqrshrn_n_u16
- vqrshrn_n_u32
- vqrshrn_n_u64
- vqrshrnd_n_s64
- vqrshrnd_n_u64
- vqrshrnh_n_s16
- vqrshrnh_n_u16
- vqrshrns_n_s32
- vqrshrns_n_u32
- vqrshrun_high_n_s16
- vqrshrun_high_n_s32
- vqrshrun_high_n_s64
- vqrshrun_n_s16
- vqrshrun_n_s32
- vqrshrun_n_s64
- vqrshrund_n_s64
- vqrshrunh_n_s16
- vqrshruns_n_s32
- vqshl_n_s16
- vqshl_n_s32
- vqshl_n_s64
- vqshl_n_s8
- vqshl_n_u16
- vqshl_n_u32
- vqshl_n_u64
- vqshl_n_u8
- vqshl_s16
- vqshl_s32
- vqshl_s64
- vqshl_s8
- vqshl_u16
- vqshl_u32
- vqshl_u64
- vqshl_u8
- vqshlb_n_s8
- vqshlb_n_u8
- vqshlb_s8
- vqshlb_u8
- vqshld_n_s64
- vqshld_n_u64
- vqshld_s64
- vqshld_u64
- vqshlh_n_s16
- vqshlh_n_u16
- vqshlh_s16
- vqshlh_u16
- vqshlq_n_s16
- vqshlq_n_s32
- vqshlq_n_s64
- vqshlq_n_s8
- vqshlq_n_u16
- vqshlq_n_u32
- vqshlq_n_u64
- vqshlq_n_u8
- vqshlq_s16
- vqshlq_s32
- vqshlq_s64
- vqshlq_s8
- vqshlq_u16
- vqshlq_u32
- vqshlq_u64
- vqshlq_u8
- vqshls_n_s32
- vqshls_n_u32
- vqshls_s32
- vqshls_u32
- vqshlu_n_s16
- vqshlu_n_s32
- vqshlu_n_s64
- vqshlu_n_s8
- vqshlub_n_s8
- vqshlud_n_s64
- vqshluh_n_s16
- vqshluq_n_s16
- vqshluq_n_s32
- vqshluq_n_s64
- vqshluq_n_s8
- vqshlus_n_s32
- vqshrn_high_n_s16
- vqshrn_high_n_s32
- vqshrn_high_n_s64
- vqshrn_high_n_u16
- vqshrn_high_n_u32
- vqshrn_high_n_u64
- vqshrn_n_s16
- vqshrn_n_s32
- vqshrn_n_s64
- vqshrn_n_u16
- vqshrn_n_u32
- vqshrn_n_u64
- vqshrnd_n_s64
- vqshrnd_n_u64
- vqshrnh_n_s16
- vqshrnh_n_u16
- vqshrns_n_s32
- vqshrns_n_u32
- vqshrun_high_n_s16
- vqshrun_high_n_s32
- vqshrun_high_n_s64
- vqshrun_n_s16
- vqshrun_n_s32
- vqshrun_n_s64
- vqshrund_n_s64
- vqshrunh_n_s16
- vqshruns_n_s32
- vqsub_s16
- vqsub_s32
- vqsub_s64
- vqsub_s8
- vqsub_u16
- vqsub_u32
- vqsub_u64
- vqsub_u8
- vqsubb_s8
- vqsubb_u8
- vqsubd_s64
- vqsubd_u64
- vqsubh_s16
- vqsubh_u16
- vqsubq_s16
- vqsubq_s32
- vqsubq_s64
- vqsubq_s8
- vqsubq_u16
- vqsubq_u32
- vqsubq_u64
- vqsubq_u8
- vqsubs_s32
- vqsubs_u32
- vqtbl1_s8
- vqtbl1_u8
- vqtbl1q_s8
- vqtbl1q_u8
- vqtbx1_s8
- vqtbx1_u8
- vqtbx1q_s8
- vqtbx1q_u8
- vraddhn_high_s16
- vraddhn_high_s32
- vraddhn_high_s64
- vraddhn_high_u16
- vraddhn_high_u32
- vraddhn_high_u64
- vraddhn_s16
- vraddhn_s32
- vraddhn_s64
- vraddhn_u16
- vraddhn_u32
- vraddhn_u64
- vrbit_s8
- vrbit_u8
- vrbitq_s8
- vrbitq_u8
- vrecpe_f32
- vrecpe_f64
- vrecpe_u32
- vrecped_f64
- vrecpeq_f32
- vrecpeq_f64
- vrecpeq_u32
- vrecpes_f32
- vrecps_f32
- vrecps_f64
- vrecpsd_f64
- vrecpsq_f32
- vrecpsq_f64
- vrecpss_f32
- vrecpxd_f64
- vrecpxs_f32
- vrev16_s8
- vrev16_u8
- vrev16q_s8
- vrev16q_u8
- vrev32_s16
- vrev32_s8
- vrev32_u16
- vrev32_u8
- vrev32q_s16
- vrev32q_s8
- vrev32q_u16
- vrev32q_u8
- vrev64_f32
- vrev64_s16
- vrev64_s32
- vrev64_s8
- vrev64_u16
- vrev64_u32
- vrev64_u8
- vrev64q_f32
- vrev64q_s16
- vrev64q_s32
- vrev64q_s8
- vrev64q_u16
- vrev64q_u32
- vrev64q_u8
- vrhadd_s16
- vrhadd_s32
- vrhadd_s8
- vrhadd_u16
- vrhadd_u32
- vrhadd_u8
- vrhaddq_s16
- vrhaddq_s32
- vrhaddq_s8
- vrhaddq_u16
- vrhaddq_u32
- vrhaddq_u8
- vrnd_f32
- vrnd_f64
- vrnda_f32
- vrnda_f64
- vrndaq_f32
- vrndaq_f64
- vrndi_f32
- vrndi_f64
- vrndiq_f32
- vrndiq_f64
- vrndm_f32
- vrndm_f64
- vrndmq_f32
- vrndmq_f64
- vrndn_f32
- vrndn_f64
- vrndnq_f32
- vrndnq_f64
- vrndns_f32
- vrndp_f32
- vrndp_f64
- vrndpq_f32
- vrndpq_f64
- vrndq_f32
- vrndq_f64
- vrndx_f32
- vrndx_f64
- vrndxq_f32
- vrndxq_f64
- vrshl_s16
- vrshl_s32
- vrshl_s64
- vrshl_s8
- vrshl_u16
- vrshl_u32
- vrshl_u64
- vrshl_u8
- vrshld_s64
- vrshld_u64
- vrshlq_s16
- vrshlq_s32
- vrshlq_s64
- vrshlq_s8
- vrshlq_u16
- vrshlq_u32
- vrshlq_u64
- vrshlq_u8
- vrshr_n_s16
- vrshr_n_s32
- vrshr_n_s64
- vrshr_n_s8
- vrshr_n_u16
- vrshr_n_u32
- vrshr_n_u64
- vrshr_n_u8
- vrshrd_n_s64
- vrshrd_n_u64
- vrshrn_high_n_s16
- vrshrn_high_n_s32
- vrshrn_high_n_s64
- vrshrn_high_n_u16
- vrshrn_high_n_u32
- vrshrn_high_n_u64
- vrshrn_n_s16
- vrshrn_n_s32
- vrshrn_n_s64
- vrshrn_n_u16
- vrshrn_n_u32
- vrshrn_n_u64
- vrshrq_n_s16
- vrshrq_n_s32
- vrshrq_n_s64
- vrshrq_n_s8
- vrshrq_n_u16
- vrshrq_n_u32
- vrshrq_n_u64
- vrshrq_n_u8
- vrsqrte_f32
- vrsqrte_f64
- vrsqrte_u32
- vrsqrted_f64
- vrsqrteq_f32
- vrsqrteq_f64
- vrsqrteq_u32
- vrsqrtes_f32
- vrsqrts_f32
- vrsqrts_f64
- vrsqrtsd_f64
- vrsqrtsq_f32
- vrsqrtsq_f64
- vrsqrtss_f32
- vrsra_n_s16
- vrsra_n_s32
- vrsra_n_s64
- vrsra_n_s8
- vrsra_n_u16
- vrsra_n_u32
- vrsra_n_u64
- vrsra_n_u8
- vrsrad_n_s64
- vrsrad_n_u64
- vrsraq_n_s16
- vrsraq_n_s32
- vrsraq_n_s64
- vrsraq_n_s8
- vrsraq_n_u16
- vrsraq_n_u32
- vrsraq_n_u64
- vrsraq_n_u8
- vrsubhn_high_s16
- vrsubhn_high_s32
- vrsubhn_high_s64
- vrsubhn_high_u16
- vrsubhn_high_u32
- vrsubhn_high_u64
- vrsubhn_s16
- vrsubhn_s32
- vrsubhn_s64
- vrsubhn_u16
- vrsubhn_u32
- vrsubhn_u64
- vset_lane_f32
- vset_lane_f64
- vset_lane_s16
- vset_lane_s32
- vset_lane_s64
- vset_lane_s8
- vset_lane_u16
- vset_lane_u32
- vset_lane_u64
- vset_lane_u8
- vsetq_lane_f32
- vsetq_lane_f64
- vsetq_lane_s16
- vsetq_lane_s32
- vsetq_lane_s64
- vsetq_lane_s8
- vsetq_lane_u16
- vsetq_lane_u32
- vsetq_lane_u64
- vsetq_lane_u8
- vsha1cq_u32
- vsha1h_u32
- vsha1mq_u32
- vsha1pq_u32
- vsha1su0q_u32
- vsha1su1q_u32
- vsha256h2q_u32
- vsha256hq_u32
- vsha256su0q_u32
- vsha256su1q_u32
- vshl_n_s16
- vshl_n_s32
- vshl_n_s64
- vshl_n_s8
- vshl_n_u16
- vshl_n_u32
- vshl_n_u64
- vshl_n_u8
- vshl_s16
- vshl_s32
- vshl_s64
- vshl_s8
- vshl_u16
- vshl_u32
- vshl_u64
- vshl_u8
- vshld_n_s64
- vshld_n_u64
- vshld_s64
- vshld_u64
- vshll_high_n_s16
- vshll_high_n_s32
- vshll_high_n_s8
- vshll_high_n_u16
- vshll_high_n_u32
- vshll_high_n_u8
- vshll_n_s16
- vshll_n_s32
- vshll_n_s8
- vshll_n_u16
- vshll_n_u32
- vshll_n_u8
- vshlq_n_s16
- vshlq_n_s32
- vshlq_n_s64
- vshlq_n_s8
- vshlq_n_u16
- vshlq_n_u32
- vshlq_n_u64
- vshlq_n_u8
- vshlq_s16
- vshlq_s32
- vshlq_s64
- vshlq_s8
- vshlq_u16
- vshlq_u32
- vshlq_u64
- vshlq_u8
- vshr_n_s16
- vshr_n_s32
- vshr_n_s64
- vshr_n_s8
- vshr_n_u16
- vshr_n_u32
- vshr_n_u64
- vshr_n_u8
- vshrd_n_s64
- vshrd_n_u64
- vshrn_high_n_s16
- vshrn_high_n_s32
- vshrn_high_n_s64
- vshrn_high_n_u16
- vshrn_high_n_u32
- vshrn_high_n_u64
- vshrn_n_s16
- vshrn_n_s32
- vshrn_n_s64
- vshrn_n_u16
- vshrn_n_u32
- vshrn_n_u64
- vshrq_n_s16
- vshrq_n_s32
- vshrq_n_s64
- vshrq_n_s8
- vshrq_n_u16
- vshrq_n_u32
- vshrq_n_u64
- vshrq_n_u8
- vsli_n_s16
- vsli_n_s32
- vsli_n_s64
- vsli_n_s8
- vsli_n_u16
- vsli_n_u32
- vsli_n_u64
- vsli_n_u8
- vslid_n_s64
- vslid_n_u64
- vsliq_n_s16
- vsliq_n_s32
- vsliq_n_s64
- vsliq_n_s8
- vsliq_n_u16
- vsliq_n_u32
- vsliq_n_u64
- vsliq_n_u8
- vsqadd_u16
- vsqadd_u32
- vsqadd_u64
- vsqadd_u8
- vsqaddb_u8
- vsqaddd_u64
- vsqaddh_u16
- vsqaddq_u16
- vsqaddq_u32
- vsqaddq_u64
- vsqaddq_u8
- vsqadds_u32
- vsqrt_f32
- vsqrt_f64
- vsqrtq_f32
- vsqrtq_f64
- vsra_n_s16
- vsra_n_s32
- vsra_n_s64
- vsra_n_s8
- vsra_n_u16
- vsra_n_u32
- vsra_n_u64
- vsra_n_u8
- vsrad_n_s64
- vsrad_n_u64
- vsraq_n_s16
- vsraq_n_s32
- vsraq_n_s64
- vsraq_n_s8
- vsraq_n_u16
- vsraq_n_u32
- vsraq_n_u64
- vsraq_n_u8
- vsri_n_s16
- vsri_n_s32
- vsri_n_s64
- vsri_n_s8
- vsri_n_u16
- vsri_n_u32
- vsri_n_u64
- vsri_n_u8
- vsrid_n_s64
- vsrid_n_u64
- vsriq_n_s16
- vsriq_n_s32
- vsriq_n_s64
- vsriq_n_s8
- vsriq_n_u16
- vsriq_n_u32
- vsriq_n_u64
- vsriq_n_u8
- vst1_f32
- vst1_f64
- vst1_s16
- vst1_s32
- vst1_s64
- vst1_s8
- vst1_u16
- vst1_u32
- vst1_u64
- vst1_u8
- vst1q_f32
- vst1q_f64
- vst1q_s16
- vst1q_s32
- vst1q_s64
- vst1q_s8
- vst1q_u16
- vst1q_u32
- vst1q_u64
- vst1q_u8
- vsub_f32
- vsub_f64
- vsub_s16
- vsub_s32
- vsub_s64
- vsub_s8
- vsub_u16
- vsub_u32
- vsub_u64
- vsub_u8
- vsubd_s64
- vsubd_u64
- vsubhn_high_s16
- vsubhn_high_s32
- vsubhn_high_s64
- vsubhn_high_u16
- vsubhn_high_u32
- vsubhn_high_u64
- vsubhn_s16
- vsubhn_s32
- vsubhn_s64
- vsubhn_u16
- vsubhn_u32
- vsubhn_u64
- vsubl_high_s16
- vsubl_high_s32
- vsubl_high_s8
- vsubl_high_u16
- vsubl_high_u32
- vsubl_high_u8
- vsubl_s16
- vsubl_s32
- vsubl_s8
- vsubl_u16
- vsubl_u32
- vsubl_u8
- vsubq_f32
- vsubq_f64
- vsubq_s16
- vsubq_s32
- vsubq_s64
- vsubq_s8
- vsubq_u16
- vsubq_u32
- vsubq_u64
- vsubq_u8
- vsubw_high_s16
- vsubw_high_s32
- vsubw_high_s8
- vsubw_high_u16
- vsubw_high_u32
- vsubw_high_u8
- vsubw_s16
- vsubw_s32
- vsubw_s8
- vsubw_u16
- vsubw_u32
- vsubw_u8
- vtbl1_s8
- vtbl1_u8
- vtbx1_s8
- vtbx1_u8
- vtrn1_f32
- vtrn1_s16
- vtrn1_s32
- vtrn1_s8
- vtrn1_u16
- vtrn1_u32
- vtrn1_u8
- vtrn1q_f32
- vtrn1q_f64
- vtrn1q_s16
- vtrn1q_s32
- vtrn1q_s64
- vtrn1q_s8
- vtrn1q_u16
- vtrn1q_u32
- vtrn1q_u64
- vtrn1q_u8
- vtrn2_f32
- vtrn2_s16
- vtrn2_s32
- vtrn2_s8
- vtrn2_u16
- vtrn2_u32
- vtrn2_u8
- vtrn2q_f32
- vtrn2q_f64
- vtrn2q_s16
- vtrn2q_s32
- vtrn2q_s64
- vtrn2q_s8
- vtrn2q_u16
- vtrn2q_u32
- vtrn2q_u64
- vtrn2q_u8
- vtst_s16
- vtst_s32
- vtst_s64
- vtst_s8
- vtst_u16
- vtst_u32
- vtst_u64
- vtst_u8
- vtstd_s64
- vtstd_u64
- vtstq_s16
- vtstq_s32
- vtstq_s64
- vtstq_s8
- vtstq_u16
- vtstq_u32
- vtstq_u64
- vtstq_u8
- vuqadd_s16
- vuqadd_s32
- vuqadd_s64
- vuqadd_s8
- vuqaddb_s8
- vuqaddd_s64
- vuqaddh_s16
- vuqaddq_s16
- vuqaddq_s32
- vuqaddq_s64
- vuqaddq_s8
- vuqadds_s32
- vuzp1_f32
- vuzp1_s16
- vuzp1_s32
- vuzp1_s8
- vuzp1_u16
- vuzp1_u32
- vuzp1_u8
- vuzp1q_f32
- vuzp1q_f64
- vuzp1q_s16
- vuzp1q_s32
- vuzp1q_s64
- vuzp1q_s8
- vuzp1q_u16
- vuzp1q_u32
- vuzp1q_u64
- vuzp1q_u8
- vuzp2_f32
- vuzp2_s16
- vuzp2_s32
- vuzp2_s8
- vuzp2_u16
- vuzp2_u32
- vuzp2_u8
- vuzp2q_f32
- vuzp2q_f64
- vuzp2q_s16
- vuzp2q_s32
- vuzp2q_s64
- vuzp2q_s8
- vuzp2q_u16
- vuzp2q_u32
- vuzp2q_u64
- vuzp2q_u8
- vzip1_f32
- vzip1_s16
- vzip1_s32
- vzip1_s8
- vzip1_u16
- vzip1_u32
- vzip1_u8
- vzip1q_f32
- vzip1q_f64
- vzip1q_s16
- vzip1q_s32
- vzip1q_s64
- vzip1q_s8
- vzip1q_u16
- vzip1q_u32
- vzip1q_u64
- vzip1q_u8
- vzip2_f32
- vzip2_s16
- vzip2_s32
- vzip2_s8
- vzip2_u16
- vzip2_u32
- vzip2_u8
- vzip2q_f32
- vzip2q_f64
- vzip2q_s16
- vzip2q_s32
- vzip2q_s64
- vzip2q_s8
- vzip2q_u16
- vzip2q_u32
- vzip2q_u64
- vzip2q_u8
- Common
- X86
-
X86.
Avx - Properties
-
Methods
- broadcast_ss
- cmp_pd
- cmp_ps
- cmp_sd
- cmp_ss
- maskload_pd
- maskload_ps
- maskstore_pd
- maskstore_ps
- mm256_add_pd
- mm256_add_ps
- mm256_addsub_pd
- mm256_addsub_ps
- mm256_and_pd
- mm256_and_ps
- mm256_andnot_pd
- mm256_andnot_ps
- mm256_blend_pd
- mm256_blend_ps
- mm256_blendv_pd
- mm256_blendv_ps
- mm256_broadcast_pd
- mm256_broadcast_ps
- mm256_broadcast_sd
- mm256_broadcast_ss
- mm256_castpd128_pd256
- mm256_castpd256_pd128
- mm256_castpd_ps
- mm256_castpd_si256
- mm256_castps128_ps256
- mm256_castps256_ps128
- mm256_castps_pd
- mm256_castps_si256
- mm256_castsi128_si256
- mm256_castsi256_pd
- mm256_castsi256_ps
- mm256_castsi256_si128
- mm256_ceil_pd
- mm256_ceil_ps
- mm256_cmp_pd
- mm256_cmp_ps
- mm256_cvtepi32_pd
- mm256_cvtepi32_ps
- mm256_cvtpd_epi32
- mm256_cvtpd_ps
- mm256_cvtps_epi32
- mm256_cvtps_pd
- mm256_cvtss_f32
- mm256_cvttpd_epi32
- mm256_cvttps_epi32
- mm256_div_pd
- mm256_div_ps
- mm256_dp_ps
- mm256_extract_epi32
- mm256_extract_epi64
- mm256_extractf128_pd
- mm256_extractf128_ps
- mm256_extractf128_si256
- mm256_floor_pd
- mm256_floor_ps
- mm256_hadd_pd
- mm256_hadd_ps
- mm256_hsub_pd
- mm256_hsub_ps
- mm256_insert_epi16
- mm256_insert_epi32
- mm256_insert_epi64
- mm256_insert_epi8
- mm256_insertf128_pd
- mm256_insertf128_ps
- mm256_insertf128_si256
- mm256_lddqu_si256
- mm256_load_pd
- mm256_load_ps
- mm256_load_si256
- mm256_loadu2_m128
- mm256_loadu2_m128d
- mm256_loadu2_m128i
- mm256_loadu_pd
- mm256_loadu_ps
- mm256_loadu_si256
- mm256_maskload_pd
- mm256_maskload_ps
- mm256_maskstore_pd
- mm256_maskstore_ps
- mm256_max_pd
- mm256_max_ps
- mm256_min_pd
- mm256_min_ps
- mm256_movedup_pd
- mm256_movehdup_ps
- mm256_moveldup_ps
- mm256_movemask_pd
- mm256_movemask_ps
- mm256_mul_pd
- mm256_mul_ps
- mm256_or_pd
- mm256_or_ps
- mm256_permute2f128_pd
- mm256_permute2f128_ps
- mm256_permute2f128_si256
- mm256_permute_pd
- mm256_permute_ps
- mm256_permutevar_pd
- mm256_permutevar_ps
- mm256_rcp_ps
- mm256_round_pd
- mm256_round_ps
- mm256_rsqrt_ps
- mm256_set1_epi16
- mm256_set1_epi32
- mm256_set1_epi64x
- mm256_set1_epi8
- mm256_set1_pd
- mm256_set1_ps
- mm256_set_epi16
- mm256_set_epi32
- mm256_set_epi64x
- mm256_set_epi8
- mm256_set_m128
- mm256_set_m128d
- mm256_set_m128i
- mm256_set_pd
- mm256_set_ps
- mm256_setr_epi16
- mm256_setr_epi32
- mm256_setr_epi64x
- mm256_setr_epi8
- mm256_setr_m128
- mm256_setr_m128d
- mm256_setr_m128i
- mm256_setr_pd
- mm256_setr_ps
- mm256_setzero_pd
- mm256_setzero_ps
- mm256_setzero_si256
- mm256_shuffle_pd
- mm256_shuffle_ps
- mm256_sqrt_pd
- mm256_sqrt_ps
- mm256_store_pd
- mm256_store_ps
- mm256_store_si256
- mm256_storeu2_m128
- mm256_storeu2_m128d
- mm256_storeu2_m128i
- mm256_storeu_pd
- mm256_storeu_ps
- mm256_storeu_si256
- mm256_stream_pd
- mm256_stream_ps
- mm256_stream_si256
- mm256_sub_pd
- mm256_sub_ps
- mm256_testc_pd
- mm256_testc_ps
- mm256_testc_si256
- mm256_testnzc_pd
- mm256_testnzc_ps
- mm256_testnzc_si256
- mm256_testz_pd
- mm256_testz_ps
- mm256_testz_si256
- mm256_undefined_pd
- mm256_undefined_ps
- mm256_undefined_si256
- mm256_unpackhi_pd
- mm256_unpackhi_ps
- mm256_unpacklo_pd
- mm256_unpacklo_ps
- mm256_xor_pd
- mm256_xor_ps
- mm256_zeroall
- mm256_zeroupper
- mm256_zextpd128_pd256
- mm256_zextps128_ps256
- mm256_zextsi128_si256
- permute_pd
- permute_ps
- permutevar_pd
- permutevar_ps
- testc_pd
- testc_ps
- testnzc_pd
- testnzc_ps
- testz_pd
- testz_ps
- undefined_pd
- undefined_ps
- undefined_si128
-
X86.
Avx. CMP -
X86.
Avx2 - Properties
-
Methods
- blend_epi32
- broadcastb_epi8
- broadcastd_epi32
- broadcastq_epi64
- broadcastsd_pd
- broadcastss_ps
- broadcastw_epi16
- i32gather_epi32
- i32gather_epi64
- i32gather_pd
- i32gather_ps
- i64gather_epi32
- i64gather_epi64
- i64gather_pd
- i64gather_ps
- mask_i32gather_epi32
- mask_i32gather_epi64
- mask_i32gather_pd
- mask_i32gather_ps
- mask_i64gather_epi32
- mask_i64gather_epi64
- mask_i64gather_pd
- mask_i64gather_ps
- maskload_epi32
- maskload_epi64
- maskstore_epi32
- maskstore_epi64
- mm256_abs_epi16
- mm256_abs_epi32
- mm256_abs_epi8
- mm256_add_epi16
- mm256_add_epi32
- mm256_add_epi64
- mm256_add_epi8
- mm256_adds_epi16
- mm256_adds_epi8
- mm256_adds_epu16
- mm256_adds_epu8
- mm256_alignr_epi8
- mm256_and_si256
- mm256_andnot_si256
- mm256_avg_epu16
- mm256_avg_epu8
- mm256_blend_epi16
- mm256_blend_epi32
- mm256_blendv_epi8
- mm256_broadcastb_epi8
- mm256_broadcastd_epi32
- mm256_broadcastq_epi64
- mm256_broadcastsd_pd
- mm256_broadcastsi128_si256
- mm256_broadcastss_ps
- mm256_broadcastw_epi16
- mm256_bslli_epi128
- mm256_bsrli_epi128
- mm256_cmpeq_epi16
- mm256_cmpeq_epi32
- mm256_cmpeq_epi64
- mm256_cmpeq_epi8
- mm256_cmpgt_epi16
- mm256_cmpgt_epi32
- mm256_cmpgt_epi64
- mm256_cmpgt_epi8
- mm256_cvtepi16_epi32
- mm256_cvtepi16_epi64
- mm256_cvtepi32_epi64
- mm256_cvtepi8_epi16
- mm256_cvtepi8_epi32
- mm256_cvtepi8_epi64
- mm256_cvtepu16_epi32
- mm256_cvtepu16_epi64
- mm256_cvtepu32_epi64
- mm256_cvtepu8_epi16
- mm256_cvtepu8_epi32
- mm256_cvtepu8_epi64
- mm256_cvtsd_f64
- mm256_cvtsi256_si32
- mm256_cvtsi256_si64
- mm256_extract_epi16
- mm256_extract_epi8
- mm256_extracti128_si256
- mm256_hadd_epi16
- mm256_hadd_epi32
- mm256_hadds_epi16
- mm256_hsub_epi16
- mm256_hsub_epi32
- mm256_hsubs_epi16
- mm256_i32gather_epi32
- mm256_i32gather_epi64
- mm256_i32gather_pd
- mm256_i32gather_ps
- mm256_i64gather_epi32
- mm256_i64gather_epi64
- mm256_i64gather_pd
- mm256_i64gather_ps
- mm256_inserti128_si256
- mm256_madd_epi16
- mm256_maddubs_epi16
- mm256_mask_i32gather_epi32
- mm256_mask_i32gather_epi64
- mm256_mask_i32gather_pd
- mm256_mask_i32gather_ps
- mm256_mask_i64gather_epi32
- mm256_mask_i64gather_epi64
- mm256_mask_i64gather_pd
- mm256_mask_i64gather_ps
- mm256_maskload_epi32
- mm256_maskload_epi64
- mm256_maskstore_epi32
- mm256_maskstore_epi64
- mm256_max_epi16
- mm256_max_epi32
- mm256_max_epi8
- mm256_max_epu16
- mm256_max_epu32
- mm256_max_epu8
- mm256_min_epi16
- mm256_min_epi32
- mm256_min_epi8
- mm256_min_epu16
- mm256_min_epu32
- mm256_min_epu8
- mm256_movemask_epi8
- mm256_mpsadbw_epu8
- mm256_mul_epi32
- mm256_mul_epu32
- mm256_mulhi_epi16
- mm256_mulhi_epu16
- mm256_mulhrs_epi16
- mm256_mullo_epi16
- mm256_mullo_epi32
- mm256_or_si256
- mm256_packs_epi16
- mm256_packs_epi32
- mm256_packus_epi16
- mm256_packus_epi32
- mm256_permute2x128_si256
- mm256_permute4x64_epi64
- mm256_permute4x64_pd
- mm256_permutevar8x32_epi32
- mm256_permutevar8x32_ps
- mm256_sad_epu8
- mm256_shuffle_epi32
- mm256_shuffle_epi8
- mm256_shufflehi_epi16
- mm256_shufflelo_epi16
- mm256_sign_epi16
- mm256_sign_epi32
- mm256_sign_epi8
- mm256_sll_epi16
- mm256_sll_epi32
- mm256_sll_epi64
- mm256_slli_epi16
- mm256_slli_epi32
- mm256_slli_epi64
- mm256_slli_si256
- mm256_sllv_epi32
- mm256_sllv_epi64
- mm256_sra_epi16
- mm256_sra_epi32
- mm256_srai_epi16
- mm256_srai_epi32
- mm256_srav_epi32
- mm256_srl_epi16
- mm256_srl_epi32
- mm256_srl_epi64
- mm256_srli_epi16
- mm256_srli_epi32
- mm256_srli_epi64
- mm256_srli_si256
- mm256_srlv_epi32
- mm256_srlv_epi64
- mm256_stream_load_si256
- mm256_sub_epi16
- mm256_sub_epi32
- mm256_sub_epi64
- mm256_sub_epi8
- mm256_subs_epi16
- mm256_subs_epi8
- mm256_subs_epu16
- mm256_subs_epu8
- mm256_unpackhi_epi16
- mm256_unpackhi_epi32
- mm256_unpackhi_epi64
- mm256_unpackhi_epi8
- mm256_unpacklo_epi16
- mm256_unpacklo_epi32
- mm256_unpacklo_epi64
- mm256_unpacklo_epi8
- mm256_xor_si256
- sllv_epi32
- sllv_epi64
- srav_epi32
- srlv_epi32
- srlv_epi64
-
X86.
Bmi1 -
X86.
Bmi2 -
X86.
F16C -
X86.
Fma - Properties
-
Methods
- fmadd_pd
- fmadd_ps
- fmadd_sd
- fmadd_ss
- fmaddsub_pd
- fmaddsub_ps
- fmsub_pd
- fmsub_ps
- fmsub_sd
- fmsub_ss
- fmsubadd_pd
- fmsubadd_ps
- fnmadd_pd
- fnmadd_ps
- fnmadd_sd
- fnmadd_ss
- fnmsub_pd
- fnmsub_ps
- fnmsub_sd
- fnmsub_ss
- mm256_fmadd_pd
- mm256_fmadd_ps
- mm256_fmaddsub_pd
- mm256_fmaddsub_ps
- mm256_fmsub_pd
- mm256_fmsub_ps
- mm256_fmsubadd_pd
- mm256_fmsubadd_ps
- mm256_fnmadd_pd
- mm256_fnmadd_ps
- mm256_fnmsub_pd
- mm256_fnmsub_ps
-
X86.
MXCSRBits -
X86.
Popcnt -
X86.
Rounding Mode -
X86.
Sse - Properties
-
Methods
- SHUFFLE
- TRANSPOSE4_PS
- add_ps
- add_ss
- and_ps
- andnot_ps
- cmpeq_ps
- cmpeq_ss
- cmpge_ps
- cmpge_ss
- cmpgt_ps
- cmpgt_ss
- cmple_ps
- cmple_ss
- cmplt_ps
- cmplt_ss
- cmpneq_ps
- cmpneq_ss
- cmpnge_ps
- cmpnge_ss
- cmpngt_ps
- cmpngt_ss
- cmpnle_ps
- cmpnle_ss
- cmpnlt_ps
- cmpnlt_ss
- cmpord_ps
- cmpord_ss
- cmpunord_ps
- cmpunord_ss
- comieq_ss
- comige_ss
- comigt_ss
- comile_ss
- comilt_ss
- comineq_ss
- cvt_ss2si
- cvtsi32_ss
- cvtsi64_ss
- cvtss_f32
- cvtss_si32
- cvtss_si64
- cvtt_ss2si
- cvttss_si32
- cvttss_si64
- div_ps
- div_ss
- load_ps
- loadu_ps
- loadu_si16
- loadu_si64
- max_ps
- max_ss
- min_ps
- min_ss
- move_ss
- movehl_ps
- movelh_ps
- movemask_ps
- mul_ps
- mul_ss
- or_ps
- rcp_ps
- rcp_ss
- rsqrt_ps
- rsqrt_ss
- set1_ps
- set_ps
- set_ps1
- set_ss
- setr_ps
- setzero_ps
- shuffle_ps
- sqrt_ps
- sqrt_ss
- store_ps
- storeu_ps
- storeu_si16
- storeu_si64
- stream_ps
- sub_ps
- sub_ss
- ucomieq_ss
- ucomige_ss
- ucomigt_ss
- ucomile_ss
- ucomilt_ss
- ucomineq_ss
- unpackhi_ps
- unpacklo_ps
- xor_ps
-
X86.
Sse2 - Properties
-
Methods
- SHUFFLE2
- add_epi16
- add_epi32
- add_epi64
- add_epi8
- add_pd
- add_sd
- adds_epi16
- adds_epi8
- adds_epu16
- adds_epu8
- and_pd
- and_si128
- andnot_pd
- andnot_si128
- avg_epu16
- avg_epu8
- bslli_si128
- bsrli_si128
- clflush
- cmpeq_epi16
- cmpeq_epi32
- cmpeq_epi8
- cmpeq_pd
- cmpeq_sd
- cmpge_pd
- cmpge_sd
- cmpgt_epi16
- cmpgt_epi32
- cmpgt_epi8
- cmpgt_pd
- cmpgt_sd
- cmple_pd
- cmple_sd
- cmplt_epi16
- cmplt_epi32
- cmplt_epi8
- cmplt_pd
- cmplt_sd
- cmpneq_pd
- cmpneq_sd
- cmpnge_pd
- cmpnge_sd
- cmpngt_pd
- cmpngt_sd
- cmpnle_pd
- cmpnle_sd
- cmpnlt_pd
- cmpnlt_sd
- cmpord_pd
- cmpord_sd
- cmpunord_pd
- cmpunord_sd
- comieq_sd
- comige_sd
- comigt_sd
- comile_sd
- comilt_sd
- comineq_sd
- cvtepi32_pd
- cvtepi32_ps
- cvtpd_epi32
- cvtpd_ps
- cvtps_epi32
- cvtps_pd
- cvtsd_f64
- cvtsd_si32
- cvtsd_si64
- cvtsd_si64x
- cvtsd_ss
- cvtsi128_si32
- cvtsi128_si64
- cvtsi128_si64x
- cvtsi32_sd
- cvtsi32_si128
- cvtsi64_sd
- cvtsi64_si128
- cvtsi64x_sd
- cvtsi64x_si128
- cvtss_sd
- cvttpd_epi32
- cvttps_epi32
- cvttsd_si32
- cvttsd_si64
- cvttsd_si64x
- div_pd
- div_sd
- extract_epi16
- insert_epi16
- load_si128
- loadu_si128
- loadu_si32
- madd_epi16
- max_epi16
- max_epu8
- max_pd
- max_sd
- min_epi16
- min_epu8
- min_pd
- min_sd
- move_epi64
- move_sd
- movemask_epi8
- movemask_pd
- mul_epu32
- mul_pd
- mul_sd
- mulhi_epi16
- mulhi_epu16
- mullo_epi16
- or_pd
- or_si128
- packs_epi16
- packs_epi32
- packus_epi16
- sad_epu8
- set1_epi16
- set1_epi32
- set1_epi64x
- set1_epi8
- set1_pd
- set_epi16
- set_epi32
- set_epi64x
- set_epi8
- set_pd
- set_pd1
- set_sd
- setr_epi16
- setr_epi32
- setr_epi8
- setr_pd
- setzero_si128
- shuffle_epi32
- shuffle_pd
- shufflehi_epi16
- shufflelo_epi16
- sll_epi16
- sll_epi32
- sll_epi64
- slli_epi16
- slli_epi32
- slli_epi64
- slli_si128
- sqrt_pd
- sqrt_sd
- sra_epi16
- sra_epi32
- srai_epi16
- srai_epi32
- srl_epi16
- srl_epi32
- srl_epi64
- srli_epi16
- srli_epi32
- srli_epi64
- srli_si128
- store_si128
- storeu_si128
- storeu_si32
- stream_pd
- stream_si128
- stream_si32
- stream_si64
- sub_epi16
- sub_epi32
- sub_epi64
- sub_epi8
- sub_pd
- sub_sd
- subs_epi16
- subs_epi8
- subs_epu16
- subs_epu8
- ucomieq_sd
- ucomige_sd
- ucomigt_sd
- ucomile_sd
- ucomilt_sd
- ucomineq_sd
- unpackhi_epi16
- unpackhi_epi32
- unpackhi_epi64
- unpackhi_epi8
- unpackhi_pd
- unpacklo_epi16
- unpacklo_epi32
- unpacklo_epi64
- unpacklo_epi8
- unpacklo_pd
- xor_pd
- xor_si128
-
X86.
Sse3 -
X86.
Sse4_1 - Properties
-
Methods
- MK_INSERTPS_NDX
- blend_epi16
- blend_pd
- blend_ps
- blendv_epi8
- blendv_pd
- blendv_ps
- ceil_pd
- ceil_ps
- ceil_sd
- ceil_ss
- cmpeq_epi64
- cvtepi16_epi32
- cvtepi16_epi64
- cvtepi32_epi64
- cvtepi8_epi16
- cvtepi8_epi32
- cvtepi8_epi64
- cvtepu16_epi32
- cvtepu16_epi64
- cvtepu32_epi64
- cvtepu8_epi16
- cvtepu8_epi32
- cvtepu8_epi64
- dp_pd
- dp_ps
- extract_epi32
- extract_epi64
- extract_epi8
- extract_ps
- extractf_ps
- floor_pd
- floor_ps
- floor_sd
- floor_ss
- insert_epi32
- insert_epi64
- insert_epi8
- insert_ps
- max_epi32
- max_epi8
- max_epu16
- max_epu32
- min_epi32
- min_epi8
- min_epu16
- min_epu32
- minpos_epu16
- mpsadbw_epu8
- mul_epi32
- mullo_epi32
- packus_epi32
- round_pd
- round_ps
- round_sd
- round_ss
- stream_load_si128
- test_all_ones
- test_all_zeros
- test_mix_ones_zeroes
- testc_si128
- testnzc_si128
- testz_si128
-
X86.
Sse4_2 -
X86.
Sse4_2. SIDD -
X86.
Ssse3 -
v128
- Constructors
-
Fields
- Byte0
- Byte1
- Byte10
- Byte11
- Byte12
- Byte13
- Byte14
- Byte15
- Byte2
- Byte3
- Byte4
- Byte5
- Byte6
- Byte7
- Byte8
- Byte9
- Double0
- Double1
- Float0
- Float1
- Float2
- Float3
- Hi64
- Lo64
- SByte0
- SByte1
- SByte10
- SByte11
- SByte12
- SByte13
- SByte14
- SByte15
- SByte2
- SByte3
- SByte4
- SByte5
- SByte6
- SByte7
- SByte8
- SByte9
- SInt0
- SInt1
- SInt2
- SInt3
- SLong0
- SLong1
- SShort0
- SShort1
- SShort2
- SShort3
- SShort4
- SShort5
- SShort6
- SShort7
- UInt0
- UInt1
- UInt2
- UInt3
- ULong0
- ULong1
- UShort0
- UShort1
- UShort2
- UShort3
- UShort4
- UShort5
- UShort6
- UShort7
-
v256
- Constructors
-
Fields
- Byte0
- Byte1
- Byte10
- Byte11
- Byte12
- Byte13
- Byte14
- Byte15
- Byte16
- Byte17
- Byte18
- Byte19
- Byte2
- Byte20
- Byte21
- Byte22
- Byte23
- Byte24
- Byte25
- Byte26
- Byte27
- Byte28
- Byte29
- Byte3
- Byte30
- Byte31
- Byte4
- Byte5
- Byte6
- Byte7
- Byte8
- Byte9
- Double0
- Double1
- Double2
- Double3
- Float0
- Float1
- Float2
- Float3
- Float4
- Float5
- Float6
- Float7
- Hi128
- Lo128
- SByte0
- SByte1
- SByte10
- SByte11
- SByte12
- SByte13
- SByte14
- SByte15
- SByte16
- SByte17
- SByte18
- SByte19
- SByte2
- SByte20
- SByte21
- SByte22
- SByte23
- SByte24
- SByte25
- SByte26
- SByte27
- SByte28
- SByte29
- SByte3
- SByte30
- SByte31
- SByte4
- SByte5
- SByte6
- SByte7
- SByte8
- SByte9
- SInt0
- SInt1
- SInt2
- SInt3
- SInt4
- SInt5
- SInt6
- SInt7
- SLong0
- SLong1
- SLong2
- SLong3
- SShort0
- SShort1
- SShort10
- SShort11
- SShort12
- SShort13
- SShort14
- SShort15
- SShort2
- SShort3
- SShort4
- SShort5
- SShort6
- SShort7
- SShort8
- SShort9
- UInt0
- UInt1
- UInt2
- UInt3
- UInt4
- UInt5
- UInt6
- UInt7
- ULong0
- ULong1
- ULong2
- ULong3
- UShort0
- UShort1
- UShort10
- UShort11
- UShort12
- UShort13
- UShort14
- UShort15
- UShort2
- UShort3
- UShort4
- UShort5
- UShort6
- UShort7
- UShort8
- UShort9
- v64