mirror of
https://github.com/bitdefender/bddisasm.git
synced 2024-12-22 22:18:09 +00:00
76d92e73c2
- Add support for AVX512-FP16 instructions, as per https://software.intel.com/content/www/us/en/develop/download/intel-avx512-fp16-architecture-specification.html - Bug fix: zeroing with no masking is not supported, so return an error if we encounter such encodings - Bug fix: ignore VEX/EVEX.W field outside 64 bit mode for some instructions - Several other minor fixes and improvements
189 lines
6.5 KiB
NASM
189 lines
6.5 KiB
NASM
bits 64
|
|
|
|
vpbroadcastmb2q xmm2, k1
|
|
vpbroadcastmb2q ymm16, k1
|
|
vpbroadcastmb2q zmm24, k1
|
|
vpbroadcastmw2d xmm2, k1
|
|
vpbroadcastmw2d ymm16, k1
|
|
vpbroadcastmw2d zmm24, k1
|
|
vpconflictd xmm2, xmm0
|
|
vpconflictd xmm2, [rbx]
|
|
vpconflictd xmm2, [rbx]{1to4}
|
|
vpconflictd xmm2, [rbx+r11*8+256]
|
|
vpconflictd xmm2, [rbx+r11*8-256]
|
|
vpconflictd xmm2{k5}, xmm0
|
|
vpconflictd xmm2{k5}, [rbx]
|
|
vpconflictd xmm2{k5}, [rbx]{1to4}
|
|
vpconflictd xmm2{k5}, [rbx+r11*8+256]
|
|
vpconflictd xmm2{k5}, [rbx+r11*8-256]
|
|
vpconflictd xmm2{k5}{z}, xmm0
|
|
vpconflictd xmm2{k5}{z}, [rbx]
|
|
vpconflictd xmm2{k5}{z}, [rbx]{1to4}
|
|
vpconflictd xmm2{k5}{z}, [rbx+r11*8+256]
|
|
vpconflictd xmm2{k5}{z}, [rbx+r11*8-256]
|
|
vpconflictd ymm16, ymm15
|
|
vpconflictd ymm16, [rbx]
|
|
vpconflictd ymm16, [rbx]{1to8}
|
|
vpconflictd ymm16, [rbx+r11*8+256]
|
|
vpconflictd ymm16, [rbx+r11*8-256]
|
|
vpconflictd ymm16{k5}, ymm15
|
|
vpconflictd ymm16{k5}, [rbx]
|
|
vpconflictd ymm16{k5}, [rbx]{1to8}
|
|
vpconflictd ymm16{k5}, [rbx+r11*8+256]
|
|
vpconflictd ymm16{k5}, [rbx+r11*8-256]
|
|
vpconflictd ymm16{k5}{z}, ymm15
|
|
vpconflictd ymm16{k5}{z}, [rbx]
|
|
vpconflictd ymm16{k5}{z}, [rbx]{1to8}
|
|
vpconflictd ymm16{k5}{z}, [rbx+r11*8+256]
|
|
vpconflictd ymm16{k5}{z}, [rbx+r11*8-256]
|
|
vpconflictd zmm24, zmm31
|
|
vpconflictd zmm24, [rbx]
|
|
vpconflictd zmm24, [rbx]{1to16}
|
|
vpconflictd zmm24, [rbx+r11*8+256]
|
|
vpconflictd zmm24, [rbx+r11*8-256]
|
|
vpconflictd zmm24{k5}, zmm31
|
|
vpconflictd zmm24{k5}, [rbx]
|
|
vpconflictd zmm24{k5}, [rbx]{1to16}
|
|
vpconflictd zmm24{k5}, [rbx+r11*8+256]
|
|
vpconflictd zmm24{k5}, [rbx+r11*8-256]
|
|
vpconflictd zmm24{k5}{z}, zmm31
|
|
vpconflictd zmm24{k5}{z}, [rbx]
|
|
vpconflictd zmm24{k5}{z}, [rbx]{1to16}
|
|
vpconflictd zmm24{k5}{z}, [rbx+r11*8+256]
|
|
vpconflictd zmm24{k5}{z}, [rbx+r11*8-256]
|
|
vpconflictq xmm2, xmm0
|
|
vpconflictq xmm2, [rbx]
|
|
vpconflictq xmm2, [rbx]{1to2}
|
|
vpconflictq xmm2, [rbx+r11*8+256]
|
|
vpconflictq xmm2, [rbx+r11*8-256]
|
|
vpconflictq xmm2{k5}, xmm0
|
|
vpconflictq xmm2{k5}, [rbx]
|
|
vpconflictq xmm2{k5}, [rbx]{1to2}
|
|
vpconflictq xmm2{k5}, [rbx+r11*8+256]
|
|
vpconflictq xmm2{k5}, [rbx+r11*8-256]
|
|
vpconflictq xmm2{k5}{z}, xmm0
|
|
vpconflictq xmm2{k5}{z}, [rbx]
|
|
vpconflictq xmm2{k5}{z}, [rbx]{1to2}
|
|
vpconflictq xmm2{k5}{z}, [rbx+r11*8+256]
|
|
vpconflictq xmm2{k5}{z}, [rbx+r11*8-256]
|
|
vpconflictq ymm16, ymm15
|
|
vpconflictq ymm16, [rbx]
|
|
vpconflictq ymm16, [rbx]{1to4}
|
|
vpconflictq ymm16, [rbx+r11*8+256]
|
|
vpconflictq ymm16, [rbx+r11*8-256]
|
|
vpconflictq ymm16{k5}, ymm15
|
|
vpconflictq ymm16{k5}, [rbx]
|
|
vpconflictq ymm16{k5}, [rbx]{1to4}
|
|
vpconflictq ymm16{k5}, [rbx+r11*8+256]
|
|
vpconflictq ymm16{k5}, [rbx+r11*8-256]
|
|
vpconflictq ymm16{k5}{z}, ymm15
|
|
vpconflictq ymm16{k5}{z}, [rbx]
|
|
vpconflictq ymm16{k5}{z}, [rbx]{1to4}
|
|
vpconflictq ymm16{k5}{z}, [rbx+r11*8+256]
|
|
vpconflictq ymm16{k5}{z}, [rbx+r11*8-256]
|
|
vpconflictq zmm24, zmm31
|
|
vpconflictq zmm24, [rbx]
|
|
vpconflictq zmm24, [rbx]{1to8}
|
|
vpconflictq zmm24, [rbx+r11*8+256]
|
|
vpconflictq zmm24, [rbx+r11*8-256]
|
|
vpconflictq zmm24{k5}, zmm31
|
|
vpconflictq zmm24{k5}, [rbx]
|
|
vpconflictq zmm24{k5}, [rbx]{1to8}
|
|
vpconflictq zmm24{k5}, [rbx+r11*8+256]
|
|
vpconflictq zmm24{k5}, [rbx+r11*8-256]
|
|
vpconflictq zmm24{k5}{z}, zmm31
|
|
vpconflictq zmm24{k5}{z}, [rbx]
|
|
vpconflictq zmm24{k5}{z}, [rbx]{1to8}
|
|
vpconflictq zmm24{k5}{z}, [rbx+r11*8+256]
|
|
vpconflictq zmm24{k5}{z}, [rbx+r11*8-256]
|
|
vplzcntd xmm2, xmm0
|
|
vplzcntd xmm2, [rbx]
|
|
vplzcntd xmm2, [rbx]{1to4}
|
|
vplzcntd xmm2, [rbx+r11*8+256]
|
|
vplzcntd xmm2, [rbx+r11*8-256]
|
|
vplzcntd xmm2{k5}, xmm0
|
|
vplzcntd xmm2{k5}, [rbx]
|
|
vplzcntd xmm2{k5}, [rbx]{1to4}
|
|
vplzcntd xmm2{k5}, [rbx+r11*8+256]
|
|
vplzcntd xmm2{k5}, [rbx+r11*8-256]
|
|
vplzcntd xmm2{k5}{z}, xmm0
|
|
vplzcntd xmm2{k5}{z}, [rbx]
|
|
vplzcntd xmm2{k5}{z}, [rbx]{1to4}
|
|
vplzcntd xmm2{k5}{z}, [rbx+r11*8+256]
|
|
vplzcntd xmm2{k5}{z}, [rbx+r11*8-256]
|
|
vplzcntd ymm16, ymm15
|
|
vplzcntd ymm16, [rbx]
|
|
vplzcntd ymm16, [rbx]{1to8}
|
|
vplzcntd ymm16, [rbx+r11*8+256]
|
|
vplzcntd ymm16, [rbx+r11*8-256]
|
|
vplzcntd ymm16{k5}, ymm15
|
|
vplzcntd ymm16{k5}, [rbx]
|
|
vplzcntd ymm16{k5}, [rbx]{1to8}
|
|
vplzcntd ymm16{k5}, [rbx+r11*8+256]
|
|
vplzcntd ymm16{k5}, [rbx+r11*8-256]
|
|
vplzcntd ymm16{k5}{z}, ymm15
|
|
vplzcntd ymm16{k5}{z}, [rbx]
|
|
vplzcntd ymm16{k5}{z}, [rbx]{1to8}
|
|
vplzcntd ymm16{k5}{z}, [rbx+r11*8+256]
|
|
vplzcntd ymm16{k5}{z}, [rbx+r11*8-256]
|
|
vplzcntd zmm24, zmm31
|
|
vplzcntd zmm24, [rbx]
|
|
vplzcntd zmm24, [rbx]{1to16}
|
|
vplzcntd zmm24, [rbx+r11*8+256]
|
|
vplzcntd zmm24, [rbx+r11*8-256]
|
|
vplzcntd zmm24{k5}, zmm31
|
|
vplzcntd zmm24{k5}, [rbx]
|
|
vplzcntd zmm24{k5}, [rbx]{1to16}
|
|
vplzcntd zmm24{k5}, [rbx+r11*8+256]
|
|
vplzcntd zmm24{k5}, [rbx+r11*8-256]
|
|
vplzcntd zmm24{k5}{z}, zmm31
|
|
vplzcntd zmm24{k5}{z}, [rbx]
|
|
vplzcntd zmm24{k5}{z}, [rbx]{1to16}
|
|
vplzcntd zmm24{k5}{z}, [rbx+r11*8+256]
|
|
vplzcntd zmm24{k5}{z}, [rbx+r11*8-256]
|
|
vplzcntq xmm2, xmm0
|
|
vplzcntq xmm2, [rbx]
|
|
vplzcntq xmm2, [rbx]{1to2}
|
|
vplzcntq xmm2, [rbx+r11*8+256]
|
|
vplzcntq xmm2, [rbx+r11*8-256]
|
|
vplzcntq xmm2{k5}, xmm0
|
|
vplzcntq xmm2{k5}, [rbx]
|
|
vplzcntq xmm2{k5}, [rbx]{1to2}
|
|
vplzcntq xmm2{k5}, [rbx+r11*8+256]
|
|
vplzcntq xmm2{k5}, [rbx+r11*8-256]
|
|
vplzcntq xmm2{k5}{z}, xmm0
|
|
vplzcntq xmm2{k5}{z}, [rbx]
|
|
vplzcntq xmm2{k5}{z}, [rbx]{1to2}
|
|
vplzcntq xmm2{k5}{z}, [rbx+r11*8+256]
|
|
vplzcntq xmm2{k5}{z}, [rbx+r11*8-256]
|
|
vplzcntq ymm16, ymm15
|
|
vplzcntq ymm16, [rbx]
|
|
vplzcntq ymm16, [rbx]{1to4}
|
|
vplzcntq ymm16, [rbx+r11*8+256]
|
|
vplzcntq ymm16, [rbx+r11*8-256]
|
|
vplzcntq ymm16{k5}, ymm15
|
|
vplzcntq ymm16{k5}, [rbx]
|
|
vplzcntq ymm16{k5}, [rbx]{1to4}
|
|
vplzcntq ymm16{k5}, [rbx+r11*8+256]
|
|
vplzcntq ymm16{k5}, [rbx+r11*8-256]
|
|
vplzcntq ymm16{k5}{z}, ymm15
|
|
vplzcntq ymm16{k5}{z}, [rbx]
|
|
vplzcntq ymm16{k5}{z}, [rbx]{1to4}
|
|
vplzcntq ymm16{k5}{z}, [rbx+r11*8+256]
|
|
vplzcntq ymm16{k5}{z}, [rbx+r11*8-256]
|
|
vplzcntq zmm24, zmm31
|
|
vplzcntq zmm24, [rbx]
|
|
vplzcntq zmm24, [rbx]{1to8}
|
|
vplzcntq zmm24, [rbx+r11*8+256]
|
|
vplzcntq zmm24, [rbx+r11*8-256]
|
|
vplzcntq zmm24{k5}, zmm31
|
|
vplzcntq zmm24{k5}, [rbx]
|
|
vplzcntq zmm24{k5}, [rbx]{1to8}
|
|
vplzcntq zmm24{k5}, [rbx+r11*8+256]
|
|
vplzcntq zmm24{k5}, [rbx+r11*8-256]
|
|
vplzcntq zmm24{k5}{z}, zmm31
|
|
vplzcntq zmm24{k5}{z}, [rbx]
|
|
vplzcntq zmm24{k5}{z}, [rbx]{1to8}
|
|
vplzcntq zmm24{k5}{z}, [rbx+r11*8+256]
|
|
vplzcntq zmm24{k5}{z}, [rbx+r11*8-256]
|