1/*
2 * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include <arch.h>
32#include <asm_macros.S>
33#include <context.h>
34
35/* -----------------------------------------------------
36 * The following function strictly follows the AArch64
37 * PCS to use x9-x17 (temporary caller-saved registers)
38 * to save EL1 system register context. It assumes that
39 * 'x0' is pointing to a 'el1_sys_regs' structure where
40 * the register context will be saved.
41 * -----------------------------------------------------
42 */
43	.global el1_sysregs_context_save
44func el1_sysregs_context_save
45
46	mrs	x9, spsr_el1
47	mrs	x10, elr_el1
48	stp	x9, x10, [x0, #CTX_SPSR_EL1]
49
50	mrs	x11, spsr_abt
51	mrs	x12, spsr_und
52	stp	x11, x12, [x0, #CTX_SPSR_ABT]
53
54	mrs	x13, spsr_irq
55	mrs	x14, spsr_fiq
56	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
57
58	mrs	x15, sctlr_el1
59	mrs	x16, actlr_el1
60	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
61
62	mrs	x17, cpacr_el1
63	mrs	x9, csselr_el1
64	stp	x17, x9, [x0, #CTX_CPACR_EL1]
65
66	mrs	x10, sp_el1
67	mrs	x11, esr_el1
68	stp	x10, x11, [x0, #CTX_SP_EL1]
69
70	mrs	x12, ttbr0_el1
71	mrs	x13, ttbr1_el1
72	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
73
74	mrs	x14, mair_el1
75	mrs	x15, amair_el1
76	stp	x14, x15, [x0, #CTX_MAIR_EL1]
77
78	mrs	x16, tcr_el1
79	mrs	x17, tpidr_el1
80	stp	x16, x17, [x0, #CTX_TCR_EL1]
81
82	mrs	x9, tpidr_el0
83	mrs	x10, tpidrro_el0
84	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
85
86	mrs	x11, dacr32_el2
87	mrs	x12, ifsr32_el2
88	stp	x11, x12, [x0, #CTX_DACR32_EL2]
89
90	mrs	x13, par_el1
91	mrs	x14, far_el1
92	stp	x13, x14, [x0, #CTX_PAR_EL1]
93
94	mrs	x15, afsr0_el1
95	mrs	x16, afsr1_el1
96	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
97
98	mrs	x17, contextidr_el1
99	mrs	x9, vbar_el1
100	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
101
102	/* Save NS timer registers if the build has instructed so */
103#if NS_TIMER_SWITCH
104	mrs	x10, cntp_ctl_el0
105	mrs	x11, cntp_cval_el0
106	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
107
108	mrs	x12, cntv_ctl_el0
109	mrs	x13, cntv_cval_el0
110	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
111
112	mrs	x14, cntkctl_el1
113	str	x14, [x0, #CTX_CNTKCTL_EL1]
114#endif
115
116	mrs	x15, fpexc32_el2
117	str	x15, [x0, #CTX_FP_FPEXC32_EL2]
118
119	ret
120
121/* -----------------------------------------------------
122 * The following function strictly follows the AArch64
123 * PCS to use x9-x17 (temporary caller-saved registers)
124 * to restore EL1 system register context.  It assumes
125 * that 'x0' is pointing to a 'el1_sys_regs' structure
126 * from where the register context will be restored
127 * -----------------------------------------------------
128 */
129	.global el1_sysregs_context_restore
130func el1_sysregs_context_restore
131
132	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
133	msr	spsr_el1, x9
134	msr	elr_el1, x10
135
136	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
137	msr	spsr_abt, x11
138	msr	spsr_und, x12
139
140	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
141	msr	spsr_irq, x13
142	msr	spsr_fiq, x14
143
144	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
145	msr	sctlr_el1, x15
146	msr	actlr_el1, x16
147
148	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
149	msr	cpacr_el1, x17
150	msr	csselr_el1, x9
151
152	ldp	x10, x11, [x0, #CTX_SP_EL1]
153	msr	sp_el1, x10
154	msr	esr_el1, x11
155
156	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
157	msr	ttbr0_el1, x12
158	msr	ttbr1_el1, x13
159
160	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
161	msr	mair_el1, x14
162	msr	amair_el1, x15
163
164	ldp	x16, x17, [x0, #CTX_TCR_EL1]
165	msr	tcr_el1, x16
166	msr	tpidr_el1, x17
167
168	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
169	msr	tpidr_el0, x9
170	msr	tpidrro_el0, x10
171
172	ldp	x11, x12, [x0, #CTX_DACR32_EL2]
173	msr	dacr32_el2, x11
174	msr	ifsr32_el2, x12
175
176	ldp	x13, x14, [x0, #CTX_PAR_EL1]
177	msr	par_el1, x13
178	msr	far_el1, x14
179
180	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
181	msr	afsr0_el1, x15
182	msr	afsr1_el1, x16
183
184	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
185	msr	contextidr_el1, x17
186	msr	vbar_el1, x9
187
188	/* Restore NS timer registers if the build has instructed so */
189#if NS_TIMER_SWITCH
190	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
191	msr	cntp_ctl_el0, x10
192	msr	cntp_cval_el0, x11
193
194	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
195	msr	cntv_ctl_el0, x12
196	msr	cntv_cval_el0, x13
197
198	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
199	msr	cntkctl_el1, x14
200#endif
201
202	ldr	x15, [x0, #CTX_FP_FPEXC32_EL2]
203	msr	fpexc32_el2, x15
204
205	/* No explict ISB required here as ERET covers it */
206
207	ret
208
209/* -----------------------------------------------------
210 * The following function follows the aapcs_64 strictly
211 * to use x9-x17 (temporary caller-saved registers
212 * according to AArch64 PCS) to save floating point
213 * register context. It assumes that 'x0' is pointing to
214 * a 'fp_regs' structure where the register context will
215 * be saved.
216 *
217 * Access to VFP registers will trap if CPTR_EL3.TFP is
218 * set.  However currently we don't use VFP registers
219 * nor set traps in Trusted Firmware, and assume it's
220 * cleared
221 *
222 * TODO: Revisit when VFP is used in secure world
223 * -----------------------------------------------------
224 */
225#if CTX_INCLUDE_FPREGS
226	.global fpregs_context_save
227func fpregs_context_save
228	stp	q0, q1, [x0, #CTX_FP_Q0]
229	stp	q2, q3, [x0, #CTX_FP_Q2]
230	stp	q4, q5, [x0, #CTX_FP_Q4]
231	stp	q6, q7, [x0, #CTX_FP_Q6]
232	stp	q8, q9, [x0, #CTX_FP_Q8]
233	stp	q10, q11, [x0, #CTX_FP_Q10]
234	stp	q12, q13, [x0, #CTX_FP_Q12]
235	stp	q14, q15, [x0, #CTX_FP_Q14]
236	stp	q16, q17, [x0, #CTX_FP_Q16]
237	stp	q18, q19, [x0, #CTX_FP_Q18]
238	stp	q20, q21, [x0, #CTX_FP_Q20]
239	stp	q22, q23, [x0, #CTX_FP_Q22]
240	stp	q24, q25, [x0, #CTX_FP_Q24]
241	stp	q26, q27, [x0, #CTX_FP_Q26]
242	stp	q28, q29, [x0, #CTX_FP_Q28]
243	stp	q30, q31, [x0, #CTX_FP_Q30]
244
245	mrs	x9, fpsr
246	str	x9, [x0, #CTX_FP_FPSR]
247
248	mrs	x10, fpcr
249	str	x10, [x0, #CTX_FP_FPCR]
250
251	ret
252
253/* -----------------------------------------------------
254 * The following function follows the aapcs_64 strictly
255 * to use x9-x17 (temporary caller-saved registers
256 * according to AArch64 PCS) to restore floating point
257 * register context. It assumes that 'x0' is pointing to
258 * a 'fp_regs' structure from where the register context
259 * will be restored.
260 *
261 * Access to VFP registers will trap if CPTR_EL3.TFP is
262 * set.  However currently we don't use VFP registers
263 * nor set traps in Trusted Firmware, and assume it's
264 * cleared
265 *
266 * TODO: Revisit when VFP is used in secure world
267 * -----------------------------------------------------
268 */
269	.global fpregs_context_restore
270func fpregs_context_restore
271	ldp	q0, q1, [x0, #CTX_FP_Q0]
272	ldp	q2, q3, [x0, #CTX_FP_Q2]
273	ldp	q4, q5, [x0, #CTX_FP_Q4]
274	ldp	q6, q7, [x0, #CTX_FP_Q6]
275	ldp	q8, q9, [x0, #CTX_FP_Q8]
276	ldp	q10, q11, [x0, #CTX_FP_Q10]
277	ldp	q12, q13, [x0, #CTX_FP_Q12]
278	ldp	q14, q15, [x0, #CTX_FP_Q14]
279	ldp	q16, q17, [x0, #CTX_FP_Q16]
280	ldp	q18, q19, [x0, #CTX_FP_Q18]
281	ldp	q20, q21, [x0, #CTX_FP_Q20]
282	ldp	q22, q23, [x0, #CTX_FP_Q22]
283	ldp	q24, q25, [x0, #CTX_FP_Q24]
284	ldp	q26, q27, [x0, #CTX_FP_Q26]
285	ldp	q28, q29, [x0, #CTX_FP_Q28]
286	ldp	q30, q31, [x0, #CTX_FP_Q30]
287
288	ldr	x9, [x0, #CTX_FP_FPSR]
289	msr	fpsr, x9
290
291	str	x10, [x0, #CTX_FP_FPCR]
292	msr	fpcr, x10
293
294	/*
295	 * No explict ISB required here as ERET to
296	 * swtich to secure EL1 or non-secure world
297	 * covers it
298	 */
299
300	ret
301#endif /* CTX_INCLUDE_FPREGS */
302