summaryrefslogtreecommitdiffstats
path: root/sys/lib/libkern
diff options
context:
space:
mode:
authorderaadt <deraadt@openbsd.org>2018-07-10 16:01:26 +0000
committerderaadt <deraadt@openbsd.org>2018-07-10 16:01:26 +0000
commita5910b4fbcaa3dd48faff38aeadcb22c953af71e (patch)
tree2d8ece63ecd0e2f36fd526bf03b2ebc02be93ea0 /sys/lib/libkern
parentrde_update_get_prefix() and friends should also verify the prefixlen. (diff)
downloadwireguard-openbsd-a5910b4fbcaa3dd48faff38aeadcb22c953af71e.tar.xz
wireguard-openbsd-a5910b4fbcaa3dd48faff38aeadcb22c953af71e.zip
In asm.h ensure NENTRY uses the old-school nop-sled align, but change standard
ENTRY is a trapsled. Fix a few functions which fall-through into an ENTRY macro. amd64 binaries now are free of double+-nop sequences (except for one assember nit in aes-586.pl). Previous changes by guenther got us here. ok mortimer kettenis
Diffstat (limited to 'sys/lib/libkern')
-rw-r--r--sys/lib/libkern/arch/amd64/htonl.S4
-rw-r--r--sys/lib/libkern/arch/amd64/htons.S4
-rw-r--r--sys/lib/libkern/arch/amd64/memmove.S2
3 files changed, 5 insertions, 5 deletions
diff --git a/sys/lib/libkern/arch/amd64/htonl.S b/sys/lib/libkern/arch/amd64/htonl.S
index f7d640521d1..07965cd1fd9 100644
--- a/sys/lib/libkern/arch/amd64/htonl.S
+++ b/sys/lib/libkern/arch/amd64/htonl.S
@@ -41,8 +41,8 @@
#include <machine/asm.h>
_ENTRY(_C_LABEL(htonl))
-_ENTRY(_C_LABEL(ntohl))
-_ENTRY(_C_LABEL(bswap32))
+_NENTRY(_C_LABEL(ntohl))
+_NENTRY(_C_LABEL(bswap32))
_PROF_PROLOGUE
RETGUARD_SETUP(htonl, r11)
movl %edi,%eax
diff --git a/sys/lib/libkern/arch/amd64/htons.S b/sys/lib/libkern/arch/amd64/htons.S
index 15016f5736e..3a702594733 100644
--- a/sys/lib/libkern/arch/amd64/htons.S
+++ b/sys/lib/libkern/arch/amd64/htons.S
@@ -41,8 +41,8 @@
#include <machine/asm.h>
_ENTRY(_C_LABEL(htons))
-_ENTRY(_C_LABEL(ntohs))
-_ENTRY(_C_LABEL(bswap16))
+_NENTRY(_C_LABEL(ntohs))
+_NENTRY(_C_LABEL(bswap16))
_PROF_PROLOGUE
RETGUARD_SETUP(htons, r11)
movl %edi,%eax
diff --git a/sys/lib/libkern/arch/amd64/memmove.S b/sys/lib/libkern/arch/amd64/memmove.S
index 3a0bed88790..71d5b007f41 100644
--- a/sys/lib/libkern/arch/amd64/memmove.S
+++ b/sys/lib/libkern/arch/amd64/memmove.S
@@ -44,7 +44,7 @@ ENTRY(bcopy)
xchgq %rdi,%rsi
/* fall into memmove */
-ENTRY(memmove)
+NENTRY(memmove)
RETGUARD_SETUP(memmove, r10)
movq %rdi,%r11 /* save dest */
movq %rdx,%rcx