aboutsummaryrefslogtreecommitdiffstats
path: root/tools/arch
diff options
context:
space:
mode:
Diffstat (limited to 'tools/arch')
-rw-r--r--tools/arch/x86/include/asm/disabled-features.h1
-rw-r--r--tools/arch/x86/include/asm/required-features.h1
-rw-r--r--tools/arch/x86/include/uapi/asm/unistd.h1
-rw-r--r--tools/arch/x86/lib/memcpy_64.S5
4 files changed, 5 insertions, 3 deletions
diff --git a/tools/arch/x86/include/asm/disabled-features.h b/tools/arch/x86/include/asm/disabled-features.h
index c1a6d5d0da0d..c10c9128f54e 100644
--- a/tools/arch/x86/include/asm/disabled-features.h
+++ b/tools/arch/x86/include/asm/disabled-features.h
@@ -1,4 +1,3 @@
-/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_X86_DISABLED_FEATURES_H
#define _ASM_X86_DISABLED_FEATURES_H
diff --git a/tools/arch/x86/include/asm/required-features.h b/tools/arch/x86/include/asm/required-features.h
index 59ac6baafb6a..d91ba04dd007 100644
--- a/tools/arch/x86/include/asm/required-features.h
+++ b/tools/arch/x86/include/asm/required-features.h
@@ -1,4 +1,3 @@
-/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_X86_REQUIRED_FEATURES_H
#define _ASM_X86_REQUIRED_FEATURES_H
diff --git a/tools/arch/x86/include/uapi/asm/unistd.h b/tools/arch/x86/include/uapi/asm/unistd.h
index a26df0d75cd0..30d7d04d72d6 100644
--- a/tools/arch/x86/include/uapi/asm/unistd.h
+++ b/tools/arch/x86/include/uapi/asm/unistd.h
@@ -1,3 +1,4 @@
+/* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
#ifndef _UAPI_ASM_X86_UNISTD_H
#define _UAPI_ASM_X86_UNISTD_H
diff --git a/tools/arch/x86/lib/memcpy_64.S b/tools/arch/x86/lib/memcpy_64.S
index ecf2c2067281..9a53a06e5a3e 100644
--- a/tools/arch/x86/lib/memcpy_64.S
+++ b/tools/arch/x86/lib/memcpy_64.S
@@ -1,10 +1,10 @@
-/* SPDX-License-Identifier: GPL-2.0 */
/* Copyright 2002 Andi Kleen */
#include <linux/linkage.h>
#include <asm/errno.h>
#include <asm/cpufeatures.h>
#include <asm/alternative-asm.h>
+#include <asm/export.h>
/*
* We build a jump to memcpy_orig by default which gets NOPped out on
@@ -41,6 +41,8 @@ ENTRY(memcpy)
ret
ENDPROC(memcpy)
ENDPROC(__memcpy)
+EXPORT_SYMBOL(memcpy)
+EXPORT_SYMBOL(__memcpy)
/*
* memcpy_erms() - enhanced fast string memcpy. This is faster and
@@ -275,6 +277,7 @@ ENTRY(memcpy_mcsafe_unrolled)
xorq %rax, %rax
ret
ENDPROC(memcpy_mcsafe_unrolled)
+EXPORT_SYMBOL_GPL(memcpy_mcsafe_unrolled)
.section .fixup, "ax"
/* Return -EFAULT for any failure */