aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/arch/arm64/kernel/entry-fpsimd.S
blob: 6325db1a2179cf5ddfefd9ea32cb5abce2bd2c7c (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
/* SPDX-License-Identifier: GPL-2.0-only */
/*
 * FP/SIMD state saving and restoring
 *
 * Copyright (C) 2012 ARM Ltd.
 * Author: Catalin Marinas <catalin.marinas@arm.com>
 */

#include <linux/linkage.h>

#include <asm/assembler.h>
#include <asm/fpsimdmacros.h>

/*
 * Save the FP registers.
 *
 * x0 - pointer to struct fpsimd_state
 */
SYM_FUNC_START(fpsimd_save_state)
	fpsimd_save x0, 8
	ret
SYM_FUNC_END(fpsimd_save_state)

/*
 * Load the FP registers.
 *
 * x0 - pointer to struct fpsimd_state
 */
SYM_FUNC_START(fpsimd_load_state)
	fpsimd_restore x0, 8
	ret
SYM_FUNC_END(fpsimd_load_state)

#ifdef CONFIG_ARM64_SVE

/*
 * Save the SVE state
 *
 * x0 - pointer to buffer for state
 * x1 - pointer to storage for FPSR
 * x2 - Save FFR if non-zero
 */
SYM_FUNC_START(sve_save_state)
	sve_save 0, x1, x2, 3
	ret
SYM_FUNC_END(sve_save_state)

/*
 * Load the SVE state
 *
 * x0 - pointer to buffer for state
 * x1 - pointer to storage for FPSR
 * x2 - Restore FFR if non-zero
 */
SYM_FUNC_START(sve_load_state)
	sve_load 0, x1, x2, 4
	ret
SYM_FUNC_END(sve_load_state)

SYM_FUNC_START(sve_get_vl)
	_sve_rdvl	0, 1
	ret
SYM_FUNC_END(sve_get_vl)

SYM_FUNC_START(sve_set_vq)
	sve_load_vq x0, x1, x2
	ret
SYM_FUNC_END(sve_set_vq)

/*
 * Zero all SVE registers but the first 128-bits of each vector
 *
 * VQ must already be configured by caller, any further updates of VQ
 * will need to ensure that the register state remains valid.
 *
 * x0 = include FFR?
 * x1 = VQ - 1
 */
SYM_FUNC_START(sve_flush_live)
	cbz		x1, 1f	// A VQ-1 of 0 is 128 bits so no extra Z state
	sve_flush_z
1:	sve_flush_p
	tbz		x0, #0, 2f
	sve_flush_ffr
2:	ret
SYM_FUNC_END(sve_flush_live)

#endif /* CONFIG_ARM64_SVE */

#ifdef CONFIG_ARM64_SME

SYM_FUNC_START(sme_get_vl)
	_sme_rdsvl	0, 1
	ret
SYM_FUNC_END(sme_get_vl)

SYM_FUNC_START(sme_set_vq)
	sme_load_vq x0, x1, x2
	ret
SYM_FUNC_END(sme_set_vq)

/*
 * Save the ZA and ZT state
 *
 * x0 - pointer to buffer for state
 * x1 - number of ZT registers to save
 */
SYM_FUNC_START(sme_save_state)
	_sme_rdsvl	2, 1		// x2 = VL/8
	sme_save_za 0, x2, 12		// Leaves x0 pointing to the end of ZA

	cbz	x1, 1f
	_str_zt 0
1:
	ret
SYM_FUNC_END(sme_save_state)

/*
 * Load the ZA and ZT state
 *
 * x0 - pointer to buffer for state
 * x1 - number of ZT registers to save
 */
SYM_FUNC_START(sme_load_state)
	_sme_rdsvl	2, 1		// x2 = VL/8
	sme_load_za 0, x2, 12		// Leaves x0 pointing to the end of ZA

	cbz	x1, 1f
	_ldr_zt 0
1:
	ret
SYM_FUNC_END(sme_load_state)

#endif /* CONFIG_ARM64_SME */