From 658ac53d0ff77b6b7d4c7d3e01dca4eac786e465 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ciro=20Santilli=20=E5=85=AD=E5=9B=9B=E4=BA=8B=E4=BB=B6=20?= =?UTF-8?q?=E6=B3=95=E8=BD=AE=E5=8A=9F?= Date: Sun, 16 Jun 2019 00:00:03 +0000 Subject: [PATCH] x86 asm: move rotation and bit instructoins in from x86-assembly-cheat --- README.adoc | 57 ++++++++++++++++++++++++++++++++++++ userland/arch/x86_64/bt.S | 42 ++++++++++++++++++++++++++ userland/arch/x86_64/btc.S | 16 ++++++++++ userland/arch/x86_64/btr.S | 16 ++++++++++ userland/arch/x86_64/rcl.S | 38 ++++++++++++++++++++++++ userland/arch/x86_64/rol.S | 27 +++++++++++++++++ userland/arch/x86_64/sal.S | 24 +++++++++++++++ userland/arch/x86_64/setcc.S | 26 ++++++++++++++++ userland/arch/x86_64/shl.S | 43 +++++++++++++++++++++++++++ userland/arch/x86_64/test.S | 24 +++++++++++++++ 10 files changed, 313 insertions(+) create mode 100644 userland/arch/x86_64/bt.S create mode 100644 userland/arch/x86_64/btc.S create mode 100644 userland/arch/x86_64/btr.S create mode 100644 userland/arch/x86_64/rcl.S create mode 100644 userland/arch/x86_64/rol.S create mode 100644 userland/arch/x86_64/sal.S create mode 100644 userland/arch/x86_64/setcc.S create mode 100644 userland/arch/x86_64/shl.S create mode 100644 userland/arch/x86_64/test.S diff --git a/README.adoc b/README.adoc index f3c12b5..48ec621 100644 --- a/README.adoc +++ b/README.adoc @@ -12378,6 +12378,63 @@ Bibliography: * link:userland/arch/x86_64/or.S[OR] * link:userland/arch/x86_64/xor.S[XOR] +=== x86 shift and rotate instructions + +<> 5.1.5 "Shift and Rotate Instructions" + +* link:userland/arch/x86_64/shl.S[SHL and SHR] ++ +SHift left or Right and insert 0. ++ +CF == the bit that got shifted out. ++ +Application: quick unsigned multiply and divide by powers of 2. +* link:userland/arch/x86_64/sal.S[SAL and SAR] ++ +Application: signed multiply and divide by powers of 2. ++ +Mnemonics: Shift Arithmetic Left and Right ++ +Keeps the same sign on right shift. ++ +Not directly exposed in C, for which signed shift is undetermined behavior, but does exist in Java via the `>>>` operator. C compilers can omit it however. ++ +SHL and SAL are exactly the same and have the same encoding: https://stackoverflow.com/questions/8373415/difference-between-shl-and-sal-in-80x86/56621271#56621271 +* link:userland/arch/x86_64/rol.S[ROL and ROR] ++ +Rotates the bit that is going out around to the other side. +* link:userland/arch/x86_64/rol.S[RCL and RCR] ++ +Like ROL and ROR, but insert the carry bit instead, which effectively generates a rotation of 8 + 1 bits. TODO application. + +=== x86 bit and byte instructions + +<> 5.1.6 "Bit and Byte Instructions" + +* link:userland/arch/x86_64/bt.S[BT] ++ +Bit test: test if the Nth bit a bit of a register is set and store the result in the CF FLAG. ++ +.... +CF = reg[N] +.... +* link:userland/arch/x86_64/btr.S[BTR] ++ +Do a BT and then set the bit to 0. +* link:userland/arch/x86_64/btc.S[BTC] ++ +Do a BT and then swap the value of the tested bit. +* link:userland/arch/x86_64/setcc.S[SETcc] ++ +Set a a byte of a register to 0 or 1 depending on the cc condition. +* link:userland/arch/x86_64/test.S[TEST] ++ +Like <> but does AND instead of SUB: ++ +.... +ZF = (!(X && Y)) ? 1 : 0 +.... + === x86 control transfer instructions <> 5.1.7 "Control Transfer Instructions" diff --git a/userland/arch/x86_64/bt.S b/userland/arch/x86_64/bt.S new file mode 100644 index 0000000..b2a582f --- /dev/null +++ b/userland/arch/x86_64/bt.S @@ -0,0 +1,42 @@ +/* https://github.com/cirosantilli/linux-kernel-module-cheat#x86-bit-and-byte-instructions */ + +#include + +LKMC_PROLOGUE + /* 0101 1010 */ + mov $0x5A, %r12 + + bt $0, %r12w + LKMC_ASSERT(jnc) + + bt $1, %r12w + LKMC_ASSERT(jc) + + bt $2, %r12w + LKMC_ASSERT(jnc) + + bt $3, %r12w + LKMC_ASSERT(jc) + + bt $4, %r12w + LKMC_ASSERT(jc) + + bt $5, %r12w + LKMC_ASSERT(jnc) + + bt $6, %r12w + LKMC_ASSERT(jc) + + bt $7, %r12w + LKMC_ASSERT(jnc) + + /* The register is unchanged. */ + LKMC_ASSERT_EQ(%r12, $0x5A) + +#if 0 + /* There is no Byte decoding for bt: + * Error: operand size mismatch for `bt' + */ + bt $0, %r12b +#endif +LKMC_EPILOGUE diff --git a/userland/arch/x86_64/btc.S b/userland/arch/x86_64/btc.S new file mode 100644 index 0000000..3e7fa68 --- /dev/null +++ b/userland/arch/x86_64/btc.S @@ -0,0 +1,16 @@ +/* https://github.com/cirosantilli/linux-kernel-module-cheat#x86-bit-and-byte-instructions */ + +#include + +LKMC_PROLOGUE + /* 0101 1010 */ + mov $0x5A, %r12 + btc $0, %r12 + LKMC_ASSERT(jnc) + LKMC_ASSERT_EQ(%r12, $0x5B) + + /* 0101 1010 */ + btc $0, %r12 + LKMC_ASSERT(jc) + LKMC_ASSERT_EQ(%r12, $0x5A) +LKMC_EPILOGUE diff --git a/userland/arch/x86_64/btr.S b/userland/arch/x86_64/btr.S new file mode 100644 index 0000000..48ebe56 --- /dev/null +++ b/userland/arch/x86_64/btr.S @@ -0,0 +1,16 @@ +/* https://github.com/cirosantilli/linux-kernel-module-cheat#x86-bit-and-byte-instructions */ + +#include + +LKMC_PROLOGUE + /* 0101 1010 */ + mov $0x5A, %r12 + btr $1, %r12 + LKMC_ASSERT(jc) + LKMC_ASSERT_EQ(%r12, $0x58) + + /* 0101 1000 */ + btr $1, %r12 + LKMC_ASSERT(jnc) + LKMC_ASSERT_EQ(%r12, $0x58) +LKMC_EPILOGUE diff --git a/userland/arch/x86_64/rcl.S b/userland/arch/x86_64/rcl.S new file mode 100644 index 0000000..21224d1 --- /dev/null +++ b/userland/arch/x86_64/rcl.S @@ -0,0 +1,38 @@ +/* https://github.com/cirosantilli/linux-kernel-module-cheat#x86-shift-and-rotate-instructions */ + +#include + +LKMC_PROLOGUE + mov $0x81, %r12 + clc + + rcl $1, %r12b + /* We'll have to save and restore flags across our asserts! + * 2x PUSHF to maintain 16-bit stack alignment. + * https://github.com/cirosantilli/linux-kernel-module-cheat#x86_64-calling-convention + */ + pushf + pushf + LKMC_ASSERT(jc) + LKMC_ASSERT_EQ(%r12, $2) + + popf + rcl $1, %r12b + pushf + LKMC_ASSERT(jnc) + LKMC_ASSERT_EQ(%r12, $5) + + popf + rcr $2, %r12b + pushf + LKMC_ASSERT(jnc) + LKMC_ASSERT_EQ(%r12, $0x81) + + popf + rcr $1, %r12b + pushf + LKMC_ASSERT(jc) + LKMC_ASSERT_EQ(%r12, $0x40) + + add $16, %rsp +LKMC_EPILOGUE diff --git a/userland/arch/x86_64/rol.S b/userland/arch/x86_64/rol.S new file mode 100644 index 0000000..cebe4ac --- /dev/null +++ b/userland/arch/x86_64/rol.S @@ -0,0 +1,27 @@ +/* https://github.com/cirosantilli/linux-kernel-module-cheat#x86-shift-and-rotate-instructions */ + +#include + +LKMC_PROLOGUE + mov $0x81, %r12 + + /* axl = 03h, CF = 1 */ + rol $1, %r12b + LKMC_ASSERT(jc) + LKMC_ASSERT_EQ(%r12, $3) + + /* axl = 04h, CF = 0 */ + rol $1, %r12b + LKMC_ASSERT(jnc) + LKMC_ASSERT_EQ(%r12, $6) + + /* axl = 03h, CF = 0 */ + ror $2, %r12b + LKMC_ASSERT(jc) + LKMC_ASSERT_EQ(%r12, $0x81) + + /* axl = 81h, CF = 1 */ + ror $1, %r12b + LKMC_ASSERT(jc) + LKMC_ASSERT_EQ(%r12, $0x0C0) +LKMC_EPILOGUE diff --git a/userland/arch/x86_64/sal.S b/userland/arch/x86_64/sal.S new file mode 100644 index 0000000..5943a21 --- /dev/null +++ b/userland/arch/x86_64/sal.S @@ -0,0 +1,24 @@ +/* https://github.com/cirosantilli/linux-kernel-module-cheat#x86-shift-and-rotate-instructions */ + +#include + +LKMC_PROLOGUE + /* 0xFF == -1 in 2's complement with 8-bits. */ + mov $0xFF, %r12 + sal %r12b + LKMC_ASSERT(jc) + /* 0xFE == -2 in 2's complement with 8-bits. */ + LKMC_ASSERT_EQ(%r12, $0xFE) + + /* SAR*/ + sar %r12b + LKMC_ASSERT(jnc) + /* -1 */ + LKMC_ASSERT_EQ(%r12, $0xFF) + + /* SAR rounds to -infinity: -1 goes to -1 again. */ + sar %r12b + LKMC_ASSERT(jc) + /* -1 */ + LKMC_ASSERT_EQ(%r12, $0xFF) +LKMC_EPILOGUE diff --git a/userland/arch/x86_64/setcc.S b/userland/arch/x86_64/setcc.S new file mode 100644 index 0000000..83bbc6c --- /dev/null +++ b/userland/arch/x86_64/setcc.S @@ -0,0 +1,26 @@ +/* https://github.com/cirosantilli/linux-kernel-module-cheat#x86-bit-and-byte-instructions */ + +#include + +LKMC_PROLOGUE + mov $0xFF, %r12 + + /* Set Carry flag. */ + stc + /* Check for carry flag. */ + setc %r12b + /* Carry flag was set, so set the r12b to 1. */ + LKMC_ASSERT_EQ(%r12, $1) + + /* Clear carry flag. */ + clc + setc %r12b + LKMC_ASSERT_EQ(%r12, $0) + +#if 0 + /* The operand size can only be one byte: + * Error: operand size mismatch for `setc' + */ + setc %eax +#endif +LKMC_EPILOGUE diff --git a/userland/arch/x86_64/shl.S b/userland/arch/x86_64/shl.S new file mode 100644 index 0000000..bcd0063 --- /dev/null +++ b/userland/arch/x86_64/shl.S @@ -0,0 +1,43 @@ +/* https://github.com/cirosantilli/linux-kernel-module-cheat#x86-shift-and-rotate-instructions */ + +#include + +LKMC_PROLOGUE + mov $0x81, %r12 + + /* Shift left by one. */ + shl %r12b + LKMC_ASSERT(jc) + LKMC_ASSERT_EQ(%r12, $2) + + /* Shift left by one. */ + shl %r12b + LKMC_ASSERT(jnc) + LKMC_ASSERT_EQ(%r12, $4) + + /* Shift right by one. */ + shr %r12b + LKMC_ASSERT(jnc) + LKMC_ASSERT_EQ(%r12, $2) + + /* Shift left by 2 immediate. + * Differentent coding than shift by 1. + */ + shl $2, %r12b + LKMC_ASSERT(jnc) + LKMC_ASSERT_EQ(%r12, $8) + + /* Shift left by 2 in cl register. */ + mov $2, %cl + shl %cl, %r12b + LKMC_ASSERT(jnc) + LKMC_ASSERT_EQ(%r12, $0x20) + +#if 0 + /* cl is the only possible register choice + * Error: operand type mismatch for `shr' + */ + shr %bl, %ax +#endif + +LKMC_EPILOGUE diff --git a/userland/arch/x86_64/test.S b/userland/arch/x86_64/test.S new file mode 100644 index 0000000..8a61592 --- /dev/null +++ b/userland/arch/x86_64/test.S @@ -0,0 +1,24 @@ +/* https://github.com/cirosantilli/linux-kernel-module-cheat#x86-bit-and-byte-instructions */ + +#include + +LKMC_PROLOGUE + /* 0xF0 & 0x00 == 0x00 */ + mov $0xF0, %r12 + test $0, %r12b + /* The comparison was equal 0. */ + LKMC_ASSERT(jz) + /* r12 is unchanged. */ + LKMC_ASSERT_EQ(%r12, $0x0F0) + + /* 0xF0 & 0x18 == 0x10 != 0x00 */ + mov $0xF0, %r12 + test $0x18, %r12b + LKMC_ASSERT(jnz) + LKMC_ASSERT_EQ(%r12, $0x0F0) + + /* test %rax, %rax vs cmp $0, %rax: test produces a shorter + * encoding to decide if a register equals zero or not. + * http://stackoverflow.com/questions/147173/x86-assembly-testl-eax-against-eax + */ +LKMC_EPILOGUE