VirtualBox

source: vbox/trunk/include/iprt/armv8.h@ 104212

Last change on this file since 104212 was 104147, checked in by vboxsync, 8 months ago

VMM/IEM: Optimize (?) the TLB code on ARM64 by using LDP and (for code) STP. Current disabled. Also a disabled native recompiler profiling tweak. bugref:10374

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 241.5 KB
Line 
1/** @file
2 * IPRT - ARMv8 (AArch64 and AArch32) Structures and Definitions.
3 */
4
5/*
6 * Copyright (C) 2023 Oracle and/or its affiliates.
7 *
8 * This file is part of VirtualBox base platform packages, as
9 * available from https://www.virtualbox.org.
10 *
11 * This program is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU General Public License
13 * as published by the Free Software Foundation, in version 3 of the
14 * License.
15 *
16 * This program is distributed in the hope that it will be useful, but
17 * WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * General Public License for more details.
20 *
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, see <https://www.gnu.org/licenses>.
23 *
24 * The contents of this file may alternatively be used under the terms
25 * of the Common Development and Distribution License Version 1.0
26 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
27 * in the VirtualBox distribution, in which case the provisions of the
28 * CDDL are applicable instead of those of the GPL.
29 *
30 * You may elect to license modified versions of this file under the
31 * terms and conditions of either the GPL or the CDDL or both.
32 *
33 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
34 */
35
36#ifndef IPRT_INCLUDED_armv8_h
37#define IPRT_INCLUDED_armv8_h
38#ifndef RT_WITHOUT_PRAGMA_ONCE
39# pragma once
40#endif
41
42#ifndef VBOX_FOR_DTRACE_LIB
43# include <iprt/types.h>
44# include <iprt/assert.h>
45#else
46# pragma D depends_on library vbox-types.d
47#endif
48
49/** @defgroup grp_rt_armv8 ARMv8 Types and Definitions
50 * @ingroup grp_rt
51 * @{
52 */
53
54/** @name The AArch64 register encoding - deprecated.
55 * @deprecated Use ARMV8_A64_REG_XXX instead.
56 * @todo correct code and drop these remaining ones.
57 * @{ */
58#define ARMV8_AARCH64_REG_X0 0
59#define ARMV8_AARCH64_REG_X1 1
60#define ARMV8_AARCH64_REG_X2 2
61#define ARMV8_AARCH64_REG_X3 3
62#define ARMV8_AARCH64_REG_ZR 31
63/** @} */
64
65/** @name The AArch64 general purpose register encoding.
66 * @{ */
67#define ARMV8_A64_REG_X0 0
68#define ARMV8_A64_REG_X1 1
69#define ARMV8_A64_REG_X2 2
70#define ARMV8_A64_REG_X3 3
71#define ARMV8_A64_REG_X4 4
72#define ARMV8_A64_REG_X5 5
73#define ARMV8_A64_REG_X6 6
74#define ARMV8_A64_REG_X7 7
75#define ARMV8_A64_REG_X8 8
76#define ARMV8_A64_REG_X9 9
77#define ARMV8_A64_REG_X10 10
78#define ARMV8_A64_REG_X11 11
79#define ARMV8_A64_REG_X12 12
80#define ARMV8_A64_REG_X13 13
81#define ARMV8_A64_REG_X14 14
82#define ARMV8_A64_REG_X15 15
83#define ARMV8_A64_REG_X16 16
84#define ARMV8_A64_REG_X17 17
85#define ARMV8_A64_REG_X18 18
86#define ARMV8_A64_REG_X19 19
87#define ARMV8_A64_REG_X20 20
88#define ARMV8_A64_REG_X21 21
89#define ARMV8_A64_REG_X22 22
90#define ARMV8_A64_REG_X23 23
91#define ARMV8_A64_REG_X24 24
92#define ARMV8_A64_REG_X25 25
93#define ARMV8_A64_REG_X26 26
94#define ARMV8_A64_REG_X27 27
95#define ARMV8_A64_REG_X28 28
96#define ARMV8_A64_REG_X29 29
97#define ARMV8_A64_REG_X30 30
98/** @} */
99
100/** @name The AArch64 32-bit general purpose register names.
101 * @{ */
102#define ARMV8_A64_REG_W0 ARMV8_A64_REG_X0
103#define ARMV8_A64_REG_W1 ARMV8_A64_REG_X1
104#define ARMV8_A64_REG_W2 ARMV8_A64_REG_X2
105#define ARMV8_A64_REG_W3 ARMV8_A64_REG_X3
106#define ARMV8_A64_REG_W4 ARMV8_A64_REG_X4
107#define ARMV8_A64_REG_W5 ARMV8_A64_REG_X5
108#define ARMV8_A64_REG_W6 ARMV8_A64_REG_X6
109#define ARMV8_A64_REG_W7 ARMV8_A64_REG_X7
110#define ARMV8_A64_REG_W8 ARMV8_A64_REG_X8
111#define ARMV8_A64_REG_W9 ARMV8_A64_REG_X9
112#define ARMV8_A64_REG_W10 ARMV8_A64_REG_X10
113#define ARMV8_A64_REG_W11 ARMV8_A64_REG_X11
114#define ARMV8_A64_REG_W12 ARMV8_A64_REG_X12
115#define ARMV8_A64_REG_W13 ARMV8_A64_REG_X13
116#define ARMV8_A64_REG_W14 ARMV8_A64_REG_X14
117#define ARMV8_A64_REG_W15 ARMV8_A64_REG_X15
118#define ARMV8_A64_REG_W16 ARMV8_A64_REG_X16
119#define ARMV8_A64_REG_W17 ARMV8_A64_REG_X17
120#define ARMV8_A64_REG_W18 ARMV8_A64_REG_X18
121#define ARMV8_A64_REG_W19 ARMV8_A64_REG_X19
122#define ARMV8_A64_REG_W20 ARMV8_A64_REG_X20
123#define ARMV8_A64_REG_W21 ARMV8_A64_REG_X21
124#define ARMV8_A64_REG_W22 ARMV8_A64_REG_X22
125#define ARMV8_A64_REG_W23 ARMV8_A64_REG_X23
126#define ARMV8_A64_REG_W24 ARMV8_A64_REG_X24
127#define ARMV8_A64_REG_W25 ARMV8_A64_REG_X25
128#define ARMV8_A64_REG_W26 ARMV8_A64_REG_X26
129#define ARMV8_A64_REG_W27 ARMV8_A64_REG_X27
130#define ARMV8_A64_REG_W28 ARMV8_A64_REG_X28
131#define ARMV8_A64_REG_W29 ARMV8_A64_REG_X29
132#define ARMV8_A64_REG_W30 ARMV8_A64_REG_X30
133/** @} */
134
135/** @name The AArch64 NEON scalar register encoding.
136 * @{ */
137#define ARMV8_A64_REG_Q0 0
138#define ARMV8_A64_REG_Q1 1
139#define ARMV8_A64_REG_Q2 2
140#define ARMV8_A64_REG_Q3 3
141#define ARMV8_A64_REG_Q4 4
142#define ARMV8_A64_REG_Q5 5
143#define ARMV8_A64_REG_Q6 6
144#define ARMV8_A64_REG_Q7 7
145#define ARMV8_A64_REG_Q8 8
146#define ARMV8_A64_REG_Q9 9
147#define ARMV8_A64_REG_Q10 10
148#define ARMV8_A64_REG_Q11 11
149#define ARMV8_A64_REG_Q12 12
150#define ARMV8_A64_REG_Q13 13
151#define ARMV8_A64_REG_Q14 14
152#define ARMV8_A64_REG_Q15 15
153#define ARMV8_A64_REG_Q16 16
154#define ARMV8_A64_REG_Q17 17
155#define ARMV8_A64_REG_Q18 18
156#define ARMV8_A64_REG_Q19 19
157#define ARMV8_A64_REG_Q20 20
158#define ARMV8_A64_REG_Q21 21
159#define ARMV8_A64_REG_Q22 22
160#define ARMV8_A64_REG_Q23 23
161#define ARMV8_A64_REG_Q24 24
162#define ARMV8_A64_REG_Q25 25
163#define ARMV8_A64_REG_Q26 26
164#define ARMV8_A64_REG_Q27 27
165#define ARMV8_A64_REG_Q28 28
166#define ARMV8_A64_REG_Q29 29
167#define ARMV8_A64_REG_Q30 30
168#define ARMV8_A64_REG_Q31 31
169/** @} */
170
171/** @name The AArch64 NEON vector register encoding.
172 * @{ */
173#define ARMV8_A64_REG_V0 ARMV8_A64_REG_Q0
174#define ARMV8_A64_REG_V1 ARMV8_A64_REG_Q1
175#define ARMV8_A64_REG_V2 ARMV8_A64_REG_Q2
176#define ARMV8_A64_REG_V3 ARMV8_A64_REG_Q3
177#define ARMV8_A64_REG_V4 ARMV8_A64_REG_Q4
178#define ARMV8_A64_REG_V5 ARMV8_A64_REG_Q5
179#define ARMV8_A64_REG_V6 ARMV8_A64_REG_Q6
180#define ARMV8_A64_REG_V7 ARMV8_A64_REG_Q7
181#define ARMV8_A64_REG_V8 ARMV8_A64_REG_Q8
182#define ARMV8_A64_REG_V9 ARMV8_A64_REG_Q9
183#define ARMV8_A64_REG_V10 ARMV8_A64_REG_Q10
184#define ARMV8_A64_REG_V11 ARMV8_A64_REG_Q11
185#define ARMV8_A64_REG_V12 ARMV8_A64_REG_Q12
186#define ARMV8_A64_REG_V13 ARMV8_A64_REG_Q13
187#define ARMV8_A64_REG_V14 ARMV8_A64_REG_Q14
188#define ARMV8_A64_REG_V15 ARMV8_A64_REG_Q15
189#define ARMV8_A64_REG_V16 ARMV8_A64_REG_Q16
190#define ARMV8_A64_REG_V17 ARMV8_A64_REG_Q17
191#define ARMV8_A64_REG_V18 ARMV8_A64_REG_Q18
192#define ARMV8_A64_REG_V19 ARMV8_A64_REG_Q19
193#define ARMV8_A64_REG_V20 ARMV8_A64_REG_Q20
194#define ARMV8_A64_REG_V21 ARMV8_A64_REG_Q21
195#define ARMV8_A64_REG_V22 ARMV8_A64_REG_Q22
196#define ARMV8_A64_REG_V23 ARMV8_A64_REG_Q23
197#define ARMV8_A64_REG_V24 ARMV8_A64_REG_Q24
198#define ARMV8_A64_REG_V25 ARMV8_A64_REG_Q25
199#define ARMV8_A64_REG_V26 ARMV8_A64_REG_Q26
200#define ARMV8_A64_REG_V27 ARMV8_A64_REG_Q27
201#define ARMV8_A64_REG_V28 ARMV8_A64_REG_Q28
202#define ARMV8_A64_REG_V29 ARMV8_A64_REG_Q29
203#define ARMV8_A64_REG_V30 ARMV8_A64_REG_Q30
204#define ARMV8_A64_REG_V31 ARMV8_A64_REG_Q31
205/** @} */
206
207/** @name The AArch64 register 31.
208 * @note Register 31 typically refers to the zero register, but can also in
209 * select case (by instruction and opecode field) refer the to stack
210 * pointer of the current exception level. ARM typically uses \<Xn|SP\>
211 * to indicate that register 31 is taken as SP, if just \<Xn\> is used
212 * 31 will be the zero register.
213 * @{ */
214/** The stack pointer. */
215#define ARMV8_A64_REG_SP 31
216/** The zero register. Reads as zero, writes ignored. */
217#define ARMV8_A64_REG_XZR 31
218/** The zero register, the 32-bit register name. */
219#define ARMV8_A64_REG_WZR ARMV8_A64_REG_XZR
220/** @} */
221
222/** @name AArch64 register aliases
223 * @{ */
224/** The link register is typically mapped to x30 as that's the default pick of
225 * the RET instruction. */
226#define ARMV8_A64_REG_LR ARMV8_A64_REG_X30
227/** Frame base pointer is typically mapped to x29. */
228#define ARMV8_A64_REG_BP ARMV8_A64_REG_X29
229/** @} */
230
231
232/** @name System register encoding.
233 * @{
234 */
235/** Mask for the op0 part of an MSR/MRS instruction */
236#define ARMV8_AARCH64_SYSREG_OP0_MASK (RT_BIT_32(19) | RT_BIT_32(20))
237/** Shift for the op0 part of an MSR/MRS instruction */
238#define ARMV8_AARCH64_SYSREG_OP0_SHIFT 19
239/** Returns the op0 part of the given MRS/MSR instruction. */
240#define ARMV8_AARCH64_SYSREG_OP0_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_OP0_MASK) >> ARMV8_AARCH64_SYSREG_OP0_SHIFT)
241/** Mask for the op1 part of an MSR/MRS instruction */
242#define ARMV8_AARCH64_SYSREG_OP1_MASK (RT_BIT_32(16) | RT_BIT_32(17) | RT_BIT_32(18))
243/** Shift for the op1 part of an MSR/MRS instruction */
244#define ARMV8_AARCH64_SYSREG_OP1_SHIFT 16
245/** Returns the op1 part of the given MRS/MSR instruction. */
246#define ARMV8_AARCH64_SYSREG_OP1_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_OP1_MASK) >> ARMV8_AARCH64_SYSREG_OP1_SHIFT)
247/** Mask for the CRn part of an MSR/MRS instruction */
248#define ARMV8_AARCH64_SYSREG_CRN_MASK ( RT_BIT_32(12) | RT_BIT_32(13) | RT_BIT_32(14) \
249 | RT_BIT_32(15) )
250/** Shift for the CRn part of an MSR/MRS instruction */
251#define ARMV8_AARCH64_SYSREG_CRN_SHIFT 12
252/** Returns the CRn part of the given MRS/MSR instruction. */
253#define ARMV8_AARCH64_SYSREG_CRN_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_CRN_MASK) >> ARMV8_AARCH64_SYSREG_CRN_SHIFT)
254/** Mask for the CRm part of an MSR/MRS instruction */
255#define ARMV8_AARCH64_SYSREG_CRM_MASK ( RT_BIT_32(8) | RT_BIT_32(9) | RT_BIT_32(10) \
256 | RT_BIT_32(11) )
257/** Shift for the CRm part of an MSR/MRS instruction */
258#define ARMV8_AARCH64_SYSREG_CRM_SHIFT 8
259/** Returns the CRn part of the given MRS/MSR instruction. */
260#define ARMV8_AARCH64_SYSREG_CRM_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_CRM_MASK) >> ARMV8_AARCH64_SYSREG_CRM_SHIFT)
261/** Mask for the op2 part of an MSR/MRS instruction */
262#define ARMV8_AARCH64_SYSREG_OP2_MASK (RT_BIT_32(5) | RT_BIT_32(6) | RT_BIT_32(7))
263/** Shift for the op2 part of an MSR/MRS instruction */
264#define ARMV8_AARCH64_SYSREG_OP2_SHIFT 5
265/** Returns the op2 part of the given MRS/MSR instruction. */
266#define ARMV8_AARCH64_SYSREG_OP2_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_OP2_MASK) >> ARMV8_AARCH64_SYSREG_OP2_SHIFT)
267/** Mask for all system register encoding relevant fields in an MRS/MSR instruction. */
268#define ARMV8_AARCH64_SYSREG_MASK ( ARMV8_AARCH64_SYSREG_OP0_MASK | ARMV8_AARCH64_SYSREG_OP1_MASK \
269 | ARMV8_AARCH64_SYSREG_CRN_MASK | ARMV8_AARCH64_SYSREG_CRN_MASK \
270 | ARMV8_AARCH64_SYSREG_OP2_MASK)
271/** @} */
272
273/** @name Mapping of op0:op1:CRn:CRm:op2 to a system register ID. This is
274 * IPRT specific and not part of the ARMv8 specification.
275 * @{ */
276#define ARMV8_AARCH64_SYSREG_ID_CREATE(a_Op0, a_Op1, a_CRn, a_CRm, a_Op2) \
277 UINT16_C( (((a_Op0) & 0x3) << 14) \
278 | (((a_Op1) & 0x7) << 11) \
279 | (((a_CRn) & 0xf) << 7) \
280 | (((a_CRm) & 0xf) << 3) \
281 | ((a_Op2) & 0x7))
282/** Returns the internal system register ID from the given MRS/MSR instruction. */
283#define ARMV8_AARCH64_SYSREG_ID_FROM_MRS_MSR(a_MsrMrsInsn) \
284 ARMV8_AARCH64_SYSREG_ID_CREATE(ARMV8_AARCH64_SYSREG_OP0_GET(a_MsrMrsInsn), \
285 ARMV8_AARCH64_SYSREG_OP1_GET(a_MsrMrsInsn), \
286 ARMV8_AARCH64_SYSREG_CRN_GET(a_MsrMrsInsn), \
287 ARMV8_AARCH64_SYSREG_CRM_GET(a_MsrMrsInsn), \
288 ARMV8_AARCH64_SYSREG_OP2_GET(a_MsrMrsInsn))
289/** Encodes the given system register ID in the given MSR/MRS instruction. */
290#define ARMV8_AARCH64_SYSREG_ID_ENCODE_IN_MRS_MSR(a_MsrMrsInsn, a_SysregId) \
291 ((a_MsrMrsInsn) = ((a_MsrMrsInsn) & ~ARMV8_AARCH64_SYSREG_MASK) | (a_SysregId << ARMV8_AARCH64_SYSREG_OP2_SHIFT))
292/** @} */
293
294
295/** @name System register IDs.
296 * @{ */
297/** OSLAR_EL1 register - WO. */
298#define ARMV8_AARCH64_SYSREG_OSLAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 0, 4)
299/** OSLSR_EL1 register - RO. */
300#define ARMV8_AARCH64_SYSREG_OSLSR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 1, 4)
301/** OSDLR_EL1 register - RW. */
302#define ARMV8_AARCH64_SYSREG_OSDLR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 3, 4)
303
304/** MIDR_EL1 register - RO. */
305#define ARMV8_AARCH64_SYSREG_MIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 0, 0)
306/** MIPDR_EL1 register - RO. */
307#define ARMV8_AARCH64_SYSREG_MPIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 0, 5)
308/** REVIDR_EL1 register - RO. */
309#define ARMV8_AARCH64_SYSREG_REVIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 0, 6)
310/** ID_PFR0_EL1 register - RO. */
311#define ARMV8_AARCH64_SYSREG_ID_PFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 0)
312/** ID_PFR1_EL1 register - RO. */
313#define ARMV8_AARCH64_SYSREG_ID_PFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 1)
314/** ID_DFR0_EL1 register - RO. */
315#define ARMV8_AARCH64_SYSREG_ID_DFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 2)
316/** ID_AFR0_EL1 register - RO. */
317#define ARMV8_AARCH64_SYSREG_ID_AFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 3)
318/** ID_MMFR0_EL1 register - RO. */
319#define ARMV8_AARCH64_SYSREG_ID_MMFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 4)
320/** ID_MMFR1_EL1 register - RO. */
321#define ARMV8_AARCH64_SYSREG_ID_MMFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 5)
322/** ID_MMFR2_EL1 register - RO. */
323#define ARMV8_AARCH64_SYSREG_ID_MMFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 6)
324/** ID_MMFR3_EL1 register - RO. */
325#define ARMV8_AARCH64_SYSREG_ID_MMFR3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 7)
326
327/** ID_ISAR0_EL1 register - RO. */
328#define ARMV8_AARCH64_SYSREG_ID_ISAR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 0)
329/** ID_ISAR1_EL1 register - RO. */
330#define ARMV8_AARCH64_SYSREG_ID_ISAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 1)
331/** ID_ISAR2_EL1 register - RO. */
332#define ARMV8_AARCH64_SYSREG_ID_ISAR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 2)
333/** ID_ISAR3_EL1 register - RO. */
334#define ARMV8_AARCH64_SYSREG_ID_ISAR3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 3)
335/** ID_ISAR4_EL1 register - RO. */
336#define ARMV8_AARCH64_SYSREG_ID_ISAR4_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 4)
337/** ID_ISAR5_EL1 register - RO. */
338#define ARMV8_AARCH64_SYSREG_ID_ISAR5_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 5)
339/** ID_MMFR4_EL1 register - RO. */
340#define ARMV8_AARCH64_SYSREG_ID_MMFR4_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 6)
341/** ID_ISAR6_EL1 register - RO. */
342#define ARMV8_AARCH64_SYSREG_ID_ISAR6_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 7)
343
344/** MVFR0_EL1 register - RO. */
345#define ARMV8_AARCH64_SYSREG_MVFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 0)
346/** MVFR1_EL1 register - RO. */
347#define ARMV8_AARCH64_SYSREG_MVFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 1)
348/** MVFR2_EL1 register - RO. */
349#define ARMV8_AARCH64_SYSREG_MVFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 2)
350/** ID_PFR2_EL1 register - RO. */
351#define ARMV8_AARCH64_SYSREG_ID_PFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 4)
352/** ID_DFR1_EL1 register - RO. */
353#define ARMV8_AARCH64_SYSREG_ID_DFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 5)
354/** ID_MMFR5_EL1 register - RO. */
355#define ARMV8_AARCH64_SYSREG_ID_MMFR5_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 6)
356
357/** ID_AA64PFR0_EL1 register - RO. */
358#define ARMV8_AARCH64_SYSREG_ID_AA64PFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 0)
359/** ID_AA64PFR0_EL1 register - RO. */
360#define ARMV8_AARCH64_SYSREG_ID_AA64PFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 1)
361/** ID_AA64ZFR0_EL1 register - RO. */
362#define ARMV8_AARCH64_SYSREG_ID_AA64ZFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 4)
363/** ID_AA64SMFR0_EL1 register - RO. */
364#define ARMV8_AARCH64_SYSREG_ID_AA64SMFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 5)
365
366/** ID_AA64DFR0_EL1 register - RO. */
367#define ARMV8_AARCH64_SYSREG_ID_AA64DFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 0)
368/** ID_AA64DFR0_EL1 register - RO. */
369#define ARMV8_AARCH64_SYSREG_ID_AA64DFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 1)
370/** ID_AA64AFR0_EL1 register - RO. */
371#define ARMV8_AARCH64_SYSREG_ID_AA64AFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 4)
372/** ID_AA64AFR1_EL1 register - RO. */
373#define ARMV8_AARCH64_SYSREG_ID_AA64AFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 5)
374
375/** ID_AA64ISAR0_EL1 register - RO. */
376#define ARMV8_AARCH64_SYSREG_ID_AA64ISAR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 6, 0)
377/** ID_AA64ISAR1_EL1 register - RO. */
378#define ARMV8_AARCH64_SYSREG_ID_AA64ISAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 6, 1)
379/** ID_AA64ISAR2_EL1 register - RO. */
380#define ARMV8_AARCH64_SYSREG_ID_AA64ISAR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 6, 2)
381
382/** ID_AA64MMFR0_EL1 register - RO. */
383#define ARMV8_AARCH64_SYSREG_ID_AA64MMFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 7, 0)
384/** ID_AA64MMFR1_EL1 register - RO. */
385#define ARMV8_AARCH64_SYSREG_ID_AA64MMFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 7, 1)
386/** ID_AA64MMFR2_EL1 register - RO. */
387#define ARMV8_AARCH64_SYSREG_ID_AA64MMFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 7, 2)
388
389/** SCTRL_EL1 register - RW. */
390#define ARMV8_AARCH64_SYSREG_SCTRL_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 0)
391/** ACTRL_EL1 register - RW. */
392#define ARMV8_AARCH64_SYSREG_ACTRL_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 1)
393/** CPACR_EL1 register - RW. */
394#define ARMV8_AARCH64_SYSREG_CPACR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 2)
395/** RGSR_EL1 register - RW. */
396#define ARMV8_AARCH64_SYSREG_RGSR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 5)
397/** GCR_EL1 register - RW. */
398#define ARMV8_AARCH64_SYSREG_GCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 6)
399
400/** ZCR_EL1 register - RW. */
401#define ARMV8_AARCH64_SYSREG_ZCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 0)
402/** TRFCR_EL1 register - RW. */
403#define ARMV8_AARCH64_SYSREG_TRFCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 1)
404/** SMPRI_EL1 register - RW. */
405#define ARMV8_AARCH64_SYSREG_SMPRI_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 4)
406/** SMCR_EL1 register - RW. */
407#define ARMV8_AARCH64_SYSREG_SMCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 6)
408
409/** TTBR0_EL1 register - RW. */
410#define ARMV8_AARCH64_SYSREG_TTBR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 0, 0)
411/** TTBR1_EL1 register - RW. */
412#define ARMV8_AARCH64_SYSREG_TTBR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 0, 1)
413/** TCR_EL1 register - RW. */
414#define ARMV8_AARCH64_SYSREG_TCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 0, 2)
415
416/** @todo APIA,APIB,APDA,APDB,APGA registers. */
417
418/** SPSR_EL1 register - RW. */
419#define ARMV8_AARCH64_SYSREG_SPSR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 0, 0)
420/** ELR_EL1 register - RW. */
421#define ARMV8_AARCH64_SYSREG_ELR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 0, 1)
422
423/** SP_EL0 register - RW. */
424#define ARMV8_AARCH64_SYSREG_SP_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 1, 0)
425
426/** PSTATE.SPSel value. */
427#define ARMV8_AARCH64_SYSREG_SPSEL ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 0)
428/** PSTATE.CurrentEL value. */
429#define ARMV8_AARCH64_SYSREG_CURRENTEL ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 2)
430/** PSTATE.PAN value. */
431#define ARMV8_AARCH64_SYSREG_PAN ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 3)
432/** PSTATE.UAO value. */
433#define ARMV8_AARCH64_SYSREG_UAO ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 4)
434
435/** PSTATE.ALLINT value. */
436#define ARMV8_AARCH64_SYSREG_ALLINT ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 3, 0)
437
438/** ICC_PMR_EL1 register - RW. */
439#define ARMV8_AARCH64_SYSREG_ICC_PMR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 6, 0)
440
441/** AFSR0_EL1 register - RW. */
442#define ARMV8_AARCH64_SYSREG_AFSR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 1, 0)
443/** AFSR1_EL1 register - RW. */
444#define ARMV8_AARCH64_SYSREG_AFSR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 1, 1)
445
446/** ESR_EL1 register - RW. */
447#define ARMV8_AARCH64_SYSREG_ESR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 2, 0)
448
449/** ERRIDR_EL1 register - RO. */
450#define ARMV8_AARCH64_SYSREG_ERRIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 3, 0)
451/** ERRSELR_EL1 register - RW. */
452#define ARMV8_AARCH64_SYSREG_ERRSELR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 3, 1)
453
454/** ICC_IAR0_EL1 register - RO. */
455#define ARMV8_AARCH64_SYSREG_ICC_IAR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 0)
456/** ICC_EOIR0_EL1 register - WO. */
457#define ARMV8_AARCH64_SYSREG_ICC_EOIR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 1)
458/** ICC_HPPIR0_EL1 register - WO. */
459#define ARMV8_AARCH64_SYSREG_ICC_HPPIR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 2)
460/** ICC_BPR0_EL1 register - RW. */
461#define ARMV8_AARCH64_SYSREG_ICC_BPR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 3)
462/** ICC_AP0R0_EL1 register - RW. */
463#define ARMV8_AARCH64_SYSREG_ICC_AP0R0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 4)
464/** ICC_AP0R1_EL1 register - RW. */
465#define ARMV8_AARCH64_SYSREG_ICC_AP0R1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 5)
466/** ICC_AP0R2_EL1 register - RW. */
467#define ARMV8_AARCH64_SYSREG_ICC_AP0R2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 6)
468/** ICC_AP0R3_EL1 register - RW. */
469#define ARMV8_AARCH64_SYSREG_ICC_AP0R3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 7)
470
471/** ICC_AP1R0_EL1 register - RW. */
472#define ARMV8_AARCH64_SYSREG_ICC_AP1R0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 0)
473/** ICC_AP1R1_EL1 register - RW. */
474#define ARMV8_AARCH64_SYSREG_ICC_AP1R1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 1)
475/** ICC_AP1R2_EL1 register - RW. */
476#define ARMV8_AARCH64_SYSREG_ICC_AP1R2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 2)
477/** ICC_AP1R3_EL1 register - RW. */
478#define ARMV8_AARCH64_SYSREG_ICC_AP1R3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 3)
479/** ICC_NMIAR1_EL1 register - RO. */
480#define ARMV8_AARCH64_SYSREG_ICC_NMIAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 5)
481
482/** ICC_DIR_EL1 register - WO. */
483#define ARMV8_AARCH64_SYSREG_ICC_DIR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 1)
484/** ICC_RPR_EL1 register - RO. */
485#define ARMV8_AARCH64_SYSREG_ICC_RPR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 3)
486/** ICC_SGI1R_EL1 register - WO. */
487#define ARMV8_AARCH64_SYSREG_ICC_SGI1R_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 5)
488/** ICC_ASGI1R_EL1 register - WO. */
489#define ARMV8_AARCH64_SYSREG_ICC_ASGI1R_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 6)
490/** ICC_SGI0R_EL1 register - WO. */
491#define ARMV8_AARCH64_SYSREG_ICC_SGI0R_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 7)
492
493/** ICC_IAR1_EL1 register - RO. */
494#define ARMV8_AARCH64_SYSREG_ICC_IAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 0)
495/** ICC_EOIR1_EL1 register - WO. */
496#define ARMV8_AARCH64_SYSREG_ICC_EOIR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 1)
497/** ICC_HPPIR1_EL1 register - RO. */
498#define ARMV8_AARCH64_SYSREG_ICC_HPPIR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 2)
499/** ICC_BPR1_EL1 register - RW. */
500#define ARMV8_AARCH64_SYSREG_ICC_BPR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 3)
501/** ICC_CTLR_EL1 register - RW. */
502#define ARMV8_AARCH64_SYSREG_ICC_CTLR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 4)
503/** ICC_SRE_EL1 register - RW. */
504#define ARMV8_AARCH64_SYSREG_ICC_SRE_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 5)
505/** ICC_IGRPEN0_EL1 register - RW. */
506#define ARMV8_AARCH64_SYSREG_ICC_IGRPEN0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 6)
507/** ICC_IGRPEN1_EL1 register - RW. */
508#define ARMV8_AARCH64_SYSREG_ICC_IGRPEN1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 7)
509
510/** NZCV - Status Flags - ??. */
511#define ARMV8_AARCH64_SYSREG_NZCV ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 0)
512/** DAIF - Interrupt Mask Bits - ??. */
513#define ARMV8_AARCH64_SYSREG_DAIF ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 1)
514/** SVCR - Streaming Vector Control Register - ??. */
515#define ARMV8_AARCH64_SYSREG_SVCR ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 2)
516/** DIT - Data Independent Timing - ??. */
517#define ARMV8_AARCH64_SYSREG_DIT ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 5)
518/** SSBS - Speculative Store Bypass Safe - ??. */
519#define ARMV8_AARCH64_SYSREG_SSBS ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 6)
520/** TCO - Tag Check Override - ??. */
521#define ARMV8_AARCH64_SYSREG_TCO ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 7)
522
523/** CNTV_CTL_EL0 register - RW. */
524#define ARMV8_AARCH64_SYSREG_CNTV_CTL_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 3, 1)
525/** @} */
526
527
528/**
529 * SPSR_EL2 (according to chapter C5.2.19)
530 */
531typedef union ARMV8SPSREL2
532{
533 /** The plain unsigned view. */
534 uint64_t u;
535 /** The 8-bit view. */
536 uint8_t au8[8];
537 /** The 16-bit view. */
538 uint16_t au16[4];
539 /** The 32-bit view. */
540 uint32_t au32[2];
541 /** The 64-bit view. */
542 uint64_t u64;
543} ARMV8SPSREL2;
544/** Pointer to SPSR_EL2. */
545typedef ARMV8SPSREL2 *PARMV8SPSREL2;
546/** Pointer to const SPSR_EL2. */
547typedef const ARMV8SPSREL2 *PCXARMV8SPSREL2;
548
549
550/** @name SPSR_EL2 (When exception is taken from AArch64 state)
551 * @{
552 */
553/** Bit 0 - 3 - M - AArch64 Exception level and selected stack pointer. */
554#define ARMV8_SPSR_EL2_AARCH64_M (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
555#define ARMV8_SPSR_EL2_AARCH64_GET_M(a_Spsr) ((a_Spsr) & ARMV8_SPSR_EL2_AARCH64_M)
556/** Bit 0 - SP - Selected stack pointer. */
557#define ARMV8_SPSR_EL2_AARCH64_SP RT_BIT_64(0)
558#define ARMV8_SPSR_EL2_AARCH64_SP_BIT 0
559/** Bit 1 - Reserved (read as zero). */
560#define ARMV8_SPSR_EL2_AARCH64_RSVD_1 RT_BIT_64(1)
561/** Bit 2 - 3 - EL - Exception level. */
562#define ARMV8_SPSR_EL2_AARCH64_EL (RT_BIT_64(2) | RT_BIT_64(3))
563#define ARMV8_SPSR_EL2_AARCH64_EL_SHIFT 2
564#define ARMV8_SPSR_EL2_AARCH64_GET_EL(a_Spsr) (((a_Spsr) >> ARMV8_SPSR_EL2_AARCH64_EL_SHIFT) & 3)
565#define ARMV8_SPSR_EL2_AARCH64_SET_EL(a_El) ((a_El) << ARMV8_SPSR_EL2_AARCH64_EL_SHIFT)
566/** Bit 4 - M[4] - Execution state (0 means AArch64, when 1 this contains a AArch32 state). */
567#define ARMV8_SPSR_EL2_AARCH64_M4 RT_BIT_64(4)
568#define ARMV8_SPSR_EL2_AARCH64_M4_BIT 4
569/** Bit 5 - T - T32 instruction set state (only valid when ARMV8_SPSR_EL2_AARCH64_M4 is set). */
570#define ARMV8_SPSR_EL2_AARCH64_T RT_BIT_64(5)
571#define ARMV8_SPSR_EL2_AARCH64_T_BIT 5
572/** Bit 6 - I - FIQ interrupt mask. */
573#define ARMV8_SPSR_EL2_AARCH64_F RT_BIT_64(6)
574#define ARMV8_SPSR_EL2_AARCH64_F_BIT 6
575/** Bit 7 - I - IRQ interrupt mask. */
576#define ARMV8_SPSR_EL2_AARCH64_I RT_BIT_64(7)
577#define ARMV8_SPSR_EL2_AARCH64_I_BIT 7
578/** Bit 8 - A - SError interrupt mask. */
579#define ARMV8_SPSR_EL2_AARCH64_A RT_BIT_64(8)
580#define ARMV8_SPSR_EL2_AARCH64_A_BIT 8
581/** Bit 9 - D - Debug Exception mask. */
582#define ARMV8_SPSR_EL2_AARCH64_D RT_BIT_64(9)
583#define ARMV8_SPSR_EL2_AARCH64_D_BIT 9
584/** Bit 10 - 11 - BTYPE - Branch Type indicator. */
585#define ARMV8_SPSR_EL2_AARCH64_BYTPE (RT_BIT_64(10) | RT_BIT_64(11))
586#define ARMV8_SPSR_EL2_AARCH64_BYTPE_SHIFT 10
587#define ARMV8_SPSR_EL2_AARCH64_GET_BYTPE(a_Spsr) (((a_Spsr) >> ARMV8_SPSR_EL2_AARCH64_BYTPE_SHIFT) & 3)
588/** Bit 12 - SSBS - Speculative Store Bypass. */
589#define ARMV8_SPSR_EL2_AARCH64_SSBS RT_BIT_64(12)
590#define ARMV8_SPSR_EL2_AARCH64_SSBS_BIT 12
591/** Bit 13 - ALLINT - All IRQ or FIQ interrupts mask. */
592#define ARMV8_SPSR_EL2_AARCH64_ALLINT RT_BIT_64(13)
593#define ARMV8_SPSR_EL2_AARCH64_ALLINT_BIT 13
594/** Bit 14 - 19 - Reserved (read as zero). */
595#define ARMV8_SPSR_EL2_AARCH64_RSVD_14_19 ( RT_BIT_64(14) | RT_BIT_64(15) | RT_BIT_64(16) \
596 | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
597/** Bit 20 - IL - Illegal Execution State flag. */
598#define ARMV8_SPSR_EL2_AARCH64_IL RT_BIT_64(20)
599#define ARMV8_SPSR_EL2_AARCH64_IL_BIT 20
600/** Bit 21 - SS - Software Step flag. */
601#define ARMV8_SPSR_EL2_AARCH64_SS RT_BIT_64(21)
602#define ARMV8_SPSR_EL2_AARCH64_SS_BIT 21
603/** Bit 22 - PAN - Privileged Access Never flag. */
604#define ARMV8_SPSR_EL2_AARCH64_PAN RT_BIT_64(25)
605#define ARMV8_SPSR_EL2_AARCH64_PAN_BIT 22
606/** Bit 23 - UAO - User Access Override flag. */
607#define ARMV8_SPSR_EL2_AARCH64_UAO RT_BIT_64(23)
608#define ARMV8_SPSR_EL2_AARCH64_UAO_BIT 23
609/** Bit 24 - DIT - Data Independent Timing flag. */
610#define ARMV8_SPSR_EL2_AARCH64_DIT RT_BIT_64(24)
611#define ARMV8_SPSR_EL2_AARCH64_DIT_BIT 24
612/** Bit 25 - TCO - Tag Check Override flag. */
613#define ARMV8_SPSR_EL2_AARCH64_TCO RT_BIT_64(25)
614#define ARMV8_SPSR_EL2_AARCH64_TCO_BIT 25
615/** Bit 26 - 27 - Reserved (read as zero). */
616#define ARMV8_SPSR_EL2_AARCH64_RSVD_26_27 (RT_BIT_64(26) | RT_BIT_64(27))
617/** Bit 28 - V - Overflow condition flag. */
618#define ARMV8_SPSR_EL2_AARCH64_V RT_BIT_64(28)
619#define ARMV8_SPSR_EL2_AARCH64_V_BIT 28
620/** Bit 29 - C - Carry condition flag. */
621#define ARMV8_SPSR_EL2_AARCH64_C RT_BIT_64(29)
622#define ARMV8_SPSR_EL2_AARCH64_C_BIT 29
623/** Bit 30 - Z - Zero condition flag. */
624#define ARMV8_SPSR_EL2_AARCH64_Z RT_BIT_64(30)
625#define ARMV8_SPSR_EL2_AARCH64_Z_BIT 30
626/** Bit 31 - N - Negative condition flag. */
627#define ARMV8_SPSR_EL2_AARCH64_N RT_BIT_64(31)
628#define ARMV8_SPSR_EL2_AARCH64_N_BIT 31
629/** Bit 32 - 63 - Reserved (read as zero). */
630#define ARMV8_SPSR_EL2_AARCH64_RSVD_32_63 (UINT64_C(0xffffffff00000000))
631/** Checks whether the given SPSR value contains a AARCH64 execution state. */
632#define ARMV8_SPSR_EL2_IS_AARCH64_STATE(a_Spsr) (!((a_Spsr) & ARMV8_SPSR_EL2_AARCH64_M4))
633/** @} */
634
635/** @name Aarch64 Exception levels
636 * @{ */
637/** Exception Level 0 - User mode. */
638#define ARMV8_AARCH64_EL_0 0
639/** Exception Level 1 - Supervisor mode. */
640#define ARMV8_AARCH64_EL_1 1
641/** Exception Level 2 - Hypervisor mode. */
642#define ARMV8_AARCH64_EL_2 2
643/** @} */
644
645
646/** @name ESR_EL2 (Exception Syndrome Register, EL2)
647 * @{
648 */
649/** Bit 0 - 24 - ISS - Instruction Specific Syndrome, encoding depends on the exception class. */
650#define ARMV8_ESR_EL2_ISS UINT64_C(0x1ffffff)
651#define ARMV8_ESR_EL2_ISS_GET(a_Esr) ((a_Esr) & ARMV8_ESR_EL2_ISS)
652/** Bit 25 - IL - Instruction length for synchronous exception (0 means 16-bit instruction, 1 32-bit instruction). */
653#define ARMV8_ESR_EL2_IL RT_BIT_64(25)
654#define ARMV8_ESR_EL2_IL_BIT 25
655#define ARMV8_ESR_EL2_IL_IS_32BIT(a_Esr) RT_BOOL((a_Esr) & ARMV8_ESR_EL2_IL)
656#define ARMV8_ESR_EL2_IL_IS_16BIT(a_Esr) (!((a_Esr) & ARMV8_ESR_EL2_IL))
657/** Bit 26 - 31 - EC - Exception class, indicates reason for the exception that this register holds information about. */
658#define ARMV8_ESR_EL2_EC ( RT_BIT_64(26) | RT_BIT_64(27) | RT_BIT_64(28) \
659 | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
660#define ARMV8_ESR_EL2_EC_GET(a_Esr) (((a_Esr) & ARMV8_ESR_EL2_EC) >> 26)
661/** Bit 32 - 36 - ISS2 - Only valid when FEAT_LS64_V and/or FEAT_LS64_ACCDATA is present. */
662#define ARMV8_ESR_EL2_ISS2 ( RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) \
663 | RT_BIT_64(35) | RT_BIT_64(36))
664#define ARMV8_ESR_EL2_ISS2_GET(a_Esr) (((a_Esr) & ARMV8_ESR_EL2_ISS2) >> 32)
665/** @} */
666
667
668/** @name ESR_EL2 Exception Classes (EC)
669 * @{ */
670/** Unknown exception reason. */
671#define ARMV8_ESR_EL2_EC_UNKNOWN UINT32_C(0)
672/** Trapped WF* instruction. */
673#define ARMV8_ESR_EL2_EC_TRAPPED_WFX UINT32_C(1)
674/** AArch32 - Trapped MCR or MRC access (coproc == 0b1111) not reported through ARMV8_ESR_EL2_EC_UNKNOWN. */
675#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MCR_MRC_COPROC_15 UINT32_C(3)
676/** AArch32 - Trapped MCRR or MRRC access (coproc == 0b1111) not reported through ARMV8_ESR_EL2_EC_UNKNOWN. */
677#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MCRR_MRRC_COPROC15 UINT32_C(4)
678/** AArch32 - Trapped MCR or MRC access (coproc == 0b1110). */
679#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MCR_MRC_COPROC_14 UINT32_C(5)
680/** AArch32 - Trapped LDC or STC access. */
681#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_LDC_STC UINT32_C(6)
682/** AArch32 - Trapped access to SME, SVE or Advanced SIMD or floating point fnunctionality. */
683#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_SME_SVE_NEON UINT32_C(7)
684/** AArch32 - Trapped VMRS access not reported using ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_SME_SVE_NEON. */
685#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_VMRS UINT32_C(8)
686/** AArch32 - Trapped pointer authentication instruction. */
687#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_PA_INSN UINT32_C(9)
688/** FEAT_LS64 - Exception from LD64B or ST64B instruction. */
689#define ARMV8_ESR_EL2_EC_LS64_EXCEPTION UINT32_C(10)
690/** AArch32 - Trapped MRRC access (coproc == 0b1110). */
691#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MRRC_COPROC14 UINT32_C(12)
692/** FEAT_BTI - Branch Target Exception. */
693#define ARMV8_ESR_EL2_EC_BTI_BRANCH_TARGET_EXCEPTION UINT32_C(13)
694/** Illegal Execution State. */
695#define ARMV8_ESR_EL2_ILLEGAL_EXECUTION_STATE UINT32_C(14)
696/** AArch32 - SVC instruction execution. */
697#define ARMV8_ESR_EL2_EC_AARCH32_SVC_INSN UINT32_C(17)
698/** AArch32 - HVC instruction execution. */
699#define ARMV8_ESR_EL2_EC_AARCH32_HVC_INSN UINT32_C(18)
700/** AArch32 - SMC instruction execution. */
701#define ARMV8_ESR_EL2_EC_AARCH32_SMC_INSN UINT32_C(19)
702/** AArch64 - SVC instruction execution. */
703#define ARMV8_ESR_EL2_EC_AARCH64_SVC_INSN UINT32_C(21)
704/** AArch64 - HVC instruction execution. */
705#define ARMV8_ESR_EL2_EC_AARCH64_HVC_INSN UINT32_C(22)
706/** AArch64 - SMC instruction execution. */
707#define ARMV8_ESR_EL2_EC_AARCH64_SMC_INSN UINT32_C(23)
708/** AArch64 - Trapped MSR, MRS or System instruction execution in AArch64 state. */
709#define ARMV8_ESR_EL2_EC_AARCH64_TRAPPED_SYS_INSN UINT32_C(24)
710/** FEAT_SVE - Access to SVE vunctionality not reported using ARMV8_ESR_EL2_EC_UNKNOWN. */
711#define ARMV8_ESR_EL2_EC_SVE_TRAPPED UINT32_C(25)
712/** FEAT_PAuth and FEAT_NV - Trapped ERET, ERETAA or ERTAB instruction. */
713#define ARMV8_ESR_EL2_EC_PAUTH_NV_TRAPPED_ERET_ERETAA_ERETAB UINT32_C(26)
714/** FEAT_TME - Exception from TSTART instruction. */
715#define ARMV8_ESR_EL2_EC_TME_TSTART_INSN_EXCEPTION UINT32_C(27)
716/** FEAT_FPAC - Exception from a Pointer Authentication instruction failure. */
717#define ARMV8_ESR_EL2_EC_FPAC_PA_INSN_FAILURE_EXCEPTION UINT32_C(28)
718/** FEAT_SME - Access to SME functionality trapped. */
719#define ARMV8_ESR_EL2_EC_SME_TRAPPED_SME_ACCESS UINT32_C(29)
720/** FEAT_RME - Exception from Granule Protection Check. */
721#define ARMV8_ESR_EL2_EC_RME_GRANULE_PROT_CHECK_EXCEPTION UINT32_C(30)
722/** Instruction Abort from a lower Exception level. */
723#define ARMV8_ESR_EL2_INSN_ABORT_FROM_LOWER_EL UINT32_C(32)
724/** Instruction Abort from the same Exception level. */
725#define ARMV8_ESR_EL2_INSN_ABORT_FROM_EL2 UINT32_C(33)
726/** PC alignment fault exception. */
727#define ARMV8_ESR_EL2_PC_ALIGNMENT_EXCEPTION UINT32_C(34)
728/** Data Abort from a lower Exception level. */
729#define ARMV8_ESR_EL2_DATA_ABORT_FROM_LOWER_EL UINT32_C(36)
730/** Data Abort from the same Exception level (or access associated with VNCR_EL2). */
731#define ARMV8_ESR_EL2_DATA_ABORT_FROM_EL2 UINT32_C(37)
732/** SP alignment fault exception. */
733#define ARMV8_ESR_EL2_SP_ALIGNMENT_EXCEPTION UINT32_C(38)
734/** FEAT_MOPS - Memory Operation Exception. */
735#define ARMV8_ESR_EL2_EC_MOPS_EXCEPTION UINT32_C(39)
736/** AArch32 - Trapped floating point exception. */
737#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_FP_EXCEPTION UINT32_C(40)
738/** AArch64 - Trapped floating point exception. */
739#define ARMV8_ESR_EL2_EC_AARCH64_TRAPPED_FP_EXCEPTION UINT32_C(44)
740/** SError interrupt. */
741#define ARMV8_ESR_EL2_SERROR_INTERRUPT UINT32_C(47)
742/** Breakpoint Exception from a lower Exception level. */
743#define ARMV8_ESR_EL2_BKPT_EXCEPTION_FROM_LOWER_EL UINT32_C(48)
744/** Breakpoint Exception from the same Exception level. */
745#define ARMV8_ESR_EL2_BKPT_EXCEPTION_FROM_EL2 UINT32_C(49)
746/** Software Step Exception from a lower Exception level. */
747#define ARMV8_ESR_EL2_SS_EXCEPTION_FROM_LOWER_EL UINT32_C(50)
748/** Software Step Exception from the same Exception level. */
749#define ARMV8_ESR_EL2_SS_EXCEPTION_FROM_EL2 UINT32_C(51)
750/** Watchpoint Exception from a lower Exception level. */
751#define ARMV8_ESR_EL2_WATCHPOINT_EXCEPTION_FROM_LOWER_EL UINT32_C(52)
752/** Watchpoint Exception from the same Exception level. */
753#define ARMV8_ESR_EL2_WATCHPOINT_EXCEPTION_FROM_EL2 UINT32_C(53)
754/** AArch32 - BKPT instruction execution. */
755#define ARMV8_ESR_EL2_EC_AARCH32_BKPT_INSN UINT32_C(56)
756/** AArch32 - Vector Catch exception. */
757#define ARMV8_ESR_EL2_EC_AARCH32_VEC_CATCH_EXCEPTION UINT32_C(58)
758/** AArch64 - BRK instruction execution. */
759#define ARMV8_ESR_EL2_EC_AARCH64_BRK_INSN UINT32_C(60)
760/** @} */
761
762
763/** @name ISS encoding for Data Abort exceptions.
764 * @{ */
765/** Bit 0 - 5 - DFSC - Data Fault Status Code. */
766#define ARMV8_EC_ISS_DATA_ABRT_DFSC ( RT_BIT_32(0) | RT_BIT_32(1) | RT_BIT_32(2) \
767 | RT_BIT_32(3) | RT_BIT_32(4) | RT_BIT_32(5))
768#define ARMV8_EC_ISS_DATA_ABRT_DFSC_GET(a_Iss) ((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_DFSC)
769/** Bit 6 - WnR - Write not Read. */
770#define ARMV8_EC_ISS_DATA_ABRT_WNR RT_BIT_32(6)
771#define ARMV8_EC_ISS_DATA_ABRT_WNR_BIT 6
772/** Bit 7 - S1PTW - Stage 2 translation fault for an access made for a stage 1 translation table walk. */
773#define ARMV8_EC_ISS_DATA_ABRT_S1PTW RT_BIT_32(7)
774#define ARMV8_EC_ISS_DATA_ABRT_S1PTW_BIT 7
775/** Bit 8 - CM - Cache maintenance instruction. */
776#define ARMV8_EC_ISS_DATA_ABRT_CM RT_BIT_32(8)
777#define ARMV8_EC_ISS_DATA_ABRT_CM_BIT 8
778/** Bit 9 - EA - External abort type. */
779#define ARMV8_EC_ISS_DATA_ABRT_EA RT_BIT_32(9)
780#define ARMV8_EC_ISS_DATA_ABRT_EA_BIT 9
781/** Bit 10 - FnV - FAR not Valid. */
782#define ARMV8_EC_ISS_DATA_ABRT_FNV RT_BIT_32(10)
783#define ARMV8_EC_ISS_DATA_ABRT_FNV_BIT 10
784/** Bit 11 - 12 - LST - Load/Store Type. */
785#define ARMV8_EC_ISS_DATA_ABRT_LST (RT_BIT_32(11) | RT_BIT_32(12))
786#define ARMV8_EC_ISS_DATA_ABRT_LST_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_LST) >> 11)
787/** Bit 13 - VNCR - Fault came from use of VNCR_EL2 register by EL1 code. */
788#define ARMV8_EC_ISS_DATA_ABRT_VNCR RT_BIT_32(13)
789#define ARMV8_EC_ISS_DATA_ABRT_VNCR_BIT 13
790/** Bit 14 - AR - Acquire/Release semantics. */
791#define ARMV8_EC_ISS_DATA_ABRT_AR RT_BIT_32(14)
792#define ARMV8_EC_ISS_DATA_ABRT_AR_BIT 14
793/** Bit 15 - SF - Sixty Four bit general-purpose register transfer (only when ISV is 1). */
794#define ARMV8_EC_ISS_DATA_ABRT_SF RT_BIT_32(15)
795#define ARMV8_EC_ISS_DATA_ABRT_SF_BIT 15
796/** Bit 16 - 20 - SRT - Syndrome Register Transfer. */
797#define ARMV8_EC_ISS_DATA_ABRT_SRT ( RT_BIT_32(16) | RT_BIT_32(17) | RT_BIT_32(18) \
798 | RT_BIT_32(19) | RT_BIT_32(20))
799#define ARMV8_EC_ISS_DATA_ABRT_SRT_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_SRT) >> 16)
800/** Bit 21 - SSE - Syndrome Sign Extend. */
801#define ARMV8_EC_ISS_DATA_ABRT_SSE RT_BIT_32(21)
802#define ARMV8_EC_ISS_DATA_ABRT_SSE_BIT 21
803/** Bit 22 - 23 - SAS - Syndrome Access Size. */
804#define ARMV8_EC_ISS_DATA_ABRT_SAS (RT_BIT_32(22) | RT_BIT_32(23))
805#define ARMV8_EC_ISS_DATA_ABRT_SAS_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_SAS) >> 22)
806/** Bit 24 - ISV - Instruction Syndrome Valid. */
807#define ARMV8_EC_ISS_DATA_ABRT_ISV RT_BIT_32(24)
808#define ARMV8_EC_ISS_DATA_ABRT_ISV_BIT 24
809/** @} */
810
811
812/** @name Data Fault Status Code (DFSC).
813 * @{ */
814/** Address size fault, level 0 of translation or translation table base register. */
815#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL0 0
816/** Address size fault, level 1. */
817#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL1 1
818/** Address size fault, level 2. */
819#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL2 2
820/** Address size fault, level 3. */
821#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL3 3
822/** Translation fault, level 0. */
823#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL0 4
824/** Translation fault, level 1. */
825#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL1 5
826/** Translation fault, level 2. */
827#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL2 6
828/** Translation fault, level 3. */
829#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL3 7
830/** FEAT_LPA2 - Access flag fault, level 0. */
831#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL0 8
832/** Access flag fault, level 1. */
833#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL1 9
834/** Access flag fault, level 2. */
835#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL2 10
836/** Access flag fault, level 3. */
837#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL3 11
838/** FEAT_LPA2 - Permission fault, level 0. */
839#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL0 12
840/** Permission fault, level 1. */
841#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL1 13
842/** Permission fault, level 2. */
843#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL2 14
844/** Permission fault, level 3. */
845#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL3 15
846/** Synchronous External abort, not a translation table walk or hardware update of translation table. */
847#define ARMV8_EC_ISS_DATA_ABRT_DFSC_SYNC_EXTERNAL 16
848/** FEAT_MTE2 - Synchronous Tag Check Fault. */
849#define ARMV8_EC_ISS_DATA_ABRT_DFSC_MTE2_SYNC_TAG_CHK_FAULT 17
850/** @todo Do the rest (lazy developer). */
851/** @} */
852
853
854/** @name SAS encoding.
855 * @{ */
856/** Byte access. */
857#define ARMV8_EC_ISS_DATA_ABRT_SAS_BYTE 0
858/** Halfword access (uint16_t). */
859#define ARMV8_EC_ISS_DATA_ABRT_SAS_HALFWORD 1
860/** Word access (uint32_t). */
861#define ARMV8_EC_ISS_DATA_ABRT_SAS_WORD 2
862/** Doubleword access (uint64_t). */
863#define ARMV8_EC_ISS_DATA_ABRT_SAS_DWORD 3
864/** @} */
865
866
867/** @name ISS encoding for trapped MSR, MRS or System instruction exceptions.
868 * @{ */
869/** Bit 0 - Direction flag. */
870#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_DIRECTION RT_BIT_32(0)
871#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_DIRECTION_IS_READ(a_Iss) RT_BOOL((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_DIRECTION)
872/** Bit 1 - 4 - CRm value from the instruction. */
873#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRM ( RT_BIT_32(1) | RT_BIT_32(2) | RT_BIT_32(3) \
874 | RT_BIT_32(4))
875#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRM_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRM) >> 1)
876/** Bit 5 - 9 - Rt value from the instruction. */
877#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RT ( RT_BIT_32(5) | RT_BIT_32(6) | RT_BIT_32(7) \
878 | RT_BIT_32(8) | RT_BIT_32(9))
879#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RT_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RT) >> 5)
880/** Bit 10 - 13 - CRn value from the instruction. */
881#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRN ( RT_BIT_32(10) | RT_BIT_32(11) | RT_BIT_32(12) \
882 | RT_BIT_32(13))
883#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRN_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRN) >> 10)
884/** Bit 14 - 16 - Op2 value from the instruction. */
885#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP1 (RT_BIT_32(14) | RT_BIT_32(15) | RT_BIT_32(16))
886#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP1_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP1) >> 14)
887/** Bit 17 - 19 - Op2 value from the instruction. */
888#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP2 (RT_BIT_32(17) | RT_BIT_32(18) | RT_BIT_32(19))
889#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP2_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP2) >> 17)
890/** Bit 20 - 21 - Op0 value from the instruction. */
891#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP0 (RT_BIT_32(20) | RT_BIT_32(21))
892#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP0_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP0) >> 20)
893/** Bit 22 - 24 - Reserved. */
894#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RSVD (RT_BIT_32(22) | RT_BIT_32(23) | RT_BIT_32(24))
895/** @} */
896
897
898/** @name ISS encoding for trapped HVC instruction exceptions.
899 * @{ */
900/** Bit 0 - 15 - imm16 value of the instruction. */
901#define ARMV8_EC_ISS_AARCH64_TRAPPED_HVC_INSN_IMM (UINT16_C(0xffff))
902#define ARMV8_EC_ISS_AARCH64_TRAPPED_HVC_INSN_IMM_GET(a_Iss) ((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_HVC_INSN_IMM)
903/** @} */
904
905
906/** @name TCR_EL1 - Translation Control Register (EL1)
907 * @{
908 */
909/** Bit 0 - 5 - Size offset of the memory region addressed by TTBR0_EL1 (2^(64-T0SZ)). */
910#define ARMV8_TCR_EL1_AARCH64_T0SZ ( RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) \
911 | RT_BIT_64(3) | RT_BIT_64(4) | RT_BIT_64(5))
912#define ARMV8_TCR_EL1_AARCH64_T0SZ_GET(a_Tcr) ((a_Tcr) & ARMV8_TCR_EL1_AARCH64_T1SZ)
913/** Bit 7 - Translation table walk disable for translations using TTBR0_EL1. */
914#define ARMV8_TCR_EL1_AARCH64_EPD0 RT_BIT_64(7)
915#define ARMV8_TCR_EL1_AARCH64_EPD0_BIT 7
916/** Bit 8 - 9 - Inner cacheability attribute for memory associated with translation table walks using TTBR0_EL1. */
917#define ARMV8_TCR_EL1_AARCH64_IRGN0 (RT_BIT_64(8) | RT_BIT_64(9))
918#define ARMV8_TCR_EL1_AARCH64_IRGN0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_IRGN0) >> 8)
919/** Non cacheable. */
920# define ARMV8_TCR_EL1_AARCH64_IRGN0_NON_CACHEABLE 0
921/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
922# define ARMV8_TCR_EL1_AARCH64_IRGN0_WB_RA_WA 1
923/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
924# define ARMV8_TCR_EL1_AARCH64_IRGN0_WT_RA_NWA 2
925/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
926# define ARMV8_TCR_EL1_AARCH64_IRGN0_WB_RA_NWA 3
927/** Bit 27 - 26 - Outer cacheability attribute for memory associated with translation table walks using TTBR0_EL1. */
928#define ARMV8_TCR_EL1_AARCH64_ORGN0 (RT_BIT_64(10) | RT_BIT_64(11))
929#define ARMV8_TCR_EL1_AARCH64_ORGN0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_ORGN0) >> 10)
930/** Non cacheable. */
931# define ARMV8_TCR_EL1_AARCH64_ORGN0_NON_CACHEABLE 0
932/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
933# define ARMV8_TCR_EL1_AARCH64_ORGN0_WB_RA_WA 1
934/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
935# define ARMV8_TCR_EL1_AARCH64_ORGN0_WT_RA_NWA 2
936/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
937# define ARMV8_TCR_EL1_AARCH64_ORGN0_WB_RA_NWA 3
938/** Bit 12 - 13 - Shareability attribute memory associated with translation table walks using TTBR0_EL1. */
939#define ARMV8_TCR_EL1_AARCH64_SH0 (RT_BIT_64(12) | RT_BIT_64(13))
940#define ARMV8_TCR_EL1_AARCH64_SH0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_SH0) >> 12)
941/** Non shareable. */
942# define ARMV8_TCR_EL1_AARCH64_SH0_NON_SHAREABLE 0
943/** Invalid value. */
944# define ARMV8_TCR_EL1_AARCH64_SH0_INVALID 1
945/** Outer Shareable. */
946# define ARMV8_TCR_EL1_AARCH64_SH0_OUTER_SHAREABLE 2
947/** Inner Shareable. */
948# define ARMV8_TCR_EL1_AARCH64_SH0_INNER_SHAREABLE 3
949/** Bit 14 - 15 - Translation Granule Size for TTBR0_EL1. */
950#define ARMV8_TCR_EL1_AARCH64_TG0 (RT_BIT_64(14) | RT_BIT_64(15))
951#define ARMV8_TCR_EL1_AARCH64_TG0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_TG0) >> 14)
952/** Invalid granule size. */
953# define ARMV8_TCR_EL1_AARCH64_TG0_INVALID 0
954/** 16KiB granule size. */
955# define ARMV8_TCR_EL1_AARCH64_TG0_16KB 1
956/** 4KiB granule size. */
957# define ARMV8_TCR_EL1_AARCH64_TG0_4KB 2
958/** 64KiB granule size. */
959# define ARMV8_TCR_EL1_AARCH64_TG0_64KB 3
960/** Bit 16 - 21 - Size offset of the memory region addressed by TTBR1_EL1 (2^(64-T1SZ)). */
961#define ARMV8_TCR_EL1_AARCH64_T1SZ ( RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) \
962 | RT_BIT_64(19) | RT_BIT_64(20) | RT_BIT_64(21))
963#define ARMV8_TCR_EL1_AARCH64_T1SZ_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_T1SZ) >> 16)
964/** Bit 22 - Selects whether TTBR0_EL1 (0) or TTBR1_EL1 (1) defines the ASID. */
965#define ARMV8_TCR_EL1_AARCH64_A1 RT_BIT_64(22)
966#define ARMV8_TCR_EL1_AARCH64_A1_BIT 22
967/** Bit 23 - Translation table walk disable for translations using TTBR1_EL1. */
968#define ARMV8_TCR_EL1_AARCH64_EPD1 RT_BIT_64(23)
969#define ARMV8_TCR_EL1_AARCH64_EPD1_BIT 23
970/** Bit 24 - 25 - Inner cacheability attribute for memory associated with translation table walks using TTBR1_EL1. */
971#define ARMV8_TCR_EL1_AARCH64_IRGN1 (RT_BIT_64(24) | RT_BIT_64(25))
972#define ARMV8_TCR_EL1_AARCH64_IRGN1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_IRGN1) >> 26)
973/** Non cacheable. */
974# define ARMV8_TCR_EL1_AARCH64_IRGN1_NON_CACHEABLE 0
975/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
976# define ARMV8_TCR_EL1_AARCH64_IRGN1_WB_RA_WA 1
977/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
978# define ARMV8_TCR_EL1_AARCH64_IRGN1_WT_RA_NWA 2
979/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
980# define ARMV8_TCR_EL1_AARCH64_IRGN1_WB_RA_NWA 3
981/** Bit 27 - 26 - Outer cacheability attribute for memory associated with translation table walks using TTBR1_EL1. */
982#define ARMV8_TCR_EL1_AARCH64_ORGN1 (RT_BIT_64(26) | RT_BIT_64(27))
983#define ARMV8_TCR_EL1_AARCH64_ORGN1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_ORGN1) >> 26)
984/** Non cacheable. */
985# define ARMV8_TCR_EL1_AARCH64_ORGN1_NON_CACHEABLE 0
986/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
987# define ARMV8_TCR_EL1_AARCH64_ORGN1_WB_RA_WA 1
988/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
989# define ARMV8_TCR_EL1_AARCH64_ORGN1_WT_RA_NWA 2
990/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
991# define ARMV8_TCR_EL1_AARCH64_ORGN1_WB_RA_NWA 3
992/** Bit 28 - 29 - Shareability attribute memory associated with translation table walks using TTBR1_EL1. */
993#define ARMV8_TCR_EL1_AARCH64_SH1 (RT_BIT_64(28) | RT_BIT_64(29))
994#define ARMV8_TCR_EL1_AARCH64_SH1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_SH1) >> 28)
995/** Non shareable. */
996# define ARMV8_TCR_EL1_AARCH64_SH1_NON_SHAREABLE 0
997/** Invalid value. */
998# define ARMV8_TCR_EL1_AARCH64_SH1_INVALID 1
999/** Outer Shareable. */
1000# define ARMV8_TCR_EL1_AARCH64_SH1_OUTER_SHAREABLE 2
1001/** Inner Shareable. */
1002# define ARMV8_TCR_EL1_AARCH64_SH1_INNER_SHAREABLE 3
1003/** Bit 30 - 31 - Translation Granule Size for TTBR1_EL1. */
1004#define ARMV8_TCR_EL1_AARCH64_TG1 (RT_BIT_64(30) | RT_BIT_64(31))
1005#define ARMV8_TCR_EL1_AARCH64_TG1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_TG1) >> 30)
1006/** Invalid granule size. */
1007# define ARMV8_TCR_EL1_AARCH64_TG1_INVALID 0
1008/** 16KiB granule size. */
1009# define ARMV8_TCR_EL1_AARCH64_TG1_16KB 1
1010/** 4KiB granule size. */
1011# define ARMV8_TCR_EL1_AARCH64_TG1_4KB 2
1012/** 64KiB granule size. */
1013# define ARMV8_TCR_EL1_AARCH64_TG1_64KB 3
1014/** Bit 32 - 34 - Intermediate Physical Address Size. */
1015#define ARMV8_TCR_EL1_AARCH64_IPS (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34))
1016#define ARMV8_TCR_EL1_AARCH64_IPS_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_IPS) >> 32)
1017/** IPA - 32 bits, 4GiB. */
1018# define ARMV8_TCR_EL1_AARCH64_IPS_32BITS 0
1019/** IPA - 36 bits, 64GiB. */
1020# define ARMV8_TCR_EL1_AARCH64_IPS_36BITS 1
1021/** IPA - 40 bits, 1TiB. */
1022# define ARMV8_TCR_EL1_AARCH64_IPS_40BITS 2
1023/** IPA - 42 bits, 4TiB. */
1024# define ARMV8_TCR_EL1_AARCH64_IPS_42BITS 3
1025/** IPA - 44 bits, 16TiB. */
1026# define ARMV8_TCR_EL1_AARCH64_IPS_44BITS 4
1027/** IPA - 48 bits, 256TiB. */
1028# define ARMV8_TCR_EL1_AARCH64_IPS_48BITS 5
1029/** IPA - 52 bits, 4PiB. */
1030# define ARMV8_TCR_EL1_AARCH64_IPS_52BITS 6
1031/** Bit 36 - ASID Size (0 - 8 bit, 1 - 16 bit). */
1032#define ARMV8_TCR_EL1_AARCH64_AS RT_BIT_64(36)
1033#define ARMV8_TCR_EL1_AARCH64_AS_BIT 36
1034/** Bit 37 - Top Byte Ignore for translations from TTBR0_EL1. */
1035#define ARMV8_TCR_EL1_AARCH64_TBI0 RT_BIT_64(37)
1036#define ARMV8_TCR_EL1_AARCH64_TBI0_BIT 37
1037/** Bit 38 - Top Byte Ignore for translations from TTBR1_EL1. */
1038#define ARMV8_TCR_EL1_AARCH64_TBI1 RT_BIT_64(38)
1039#define ARMV8_TCR_EL1_AARCH64_TBI1_BIT 38
1040/** Bit 39 - Hardware Access flag update in stage 1 translations from EL0 and EL1. */
1041#define ARMV8_TCR_EL1_AARCH64_HA RT_BIT_64(39)
1042#define ARMV8_TCR_EL1_AARCH64_HA_BIT 39
1043/** Bit 40 - Hardware management of dirty state in stage 1 translations from EL0 and EL1. */
1044#define ARMV8_TCR_EL1_AARCH64_HD RT_BIT_64(40)
1045#define ARMV8_TCR_EL1_AARCH64_HD_BIT 40
1046/** Bit 41 - Hierarchical Permission Disables for TTBR0_EL1. */
1047#define ARMV8_TCR_EL1_AARCH64_HPD0 RT_BIT_64(41)
1048#define ARMV8_TCR_EL1_AARCH64_HPD0_BIT 41
1049/** Bit 42 - Hierarchical Permission Disables for TTBR1_EL1. */
1050#define ARMV8_TCR_EL1_AARCH64_HPD1 RT_BIT_64(42)
1051#define ARMV8_TCR_EL1_AARCH64_HPD1_BIT 42
1052/** Bit 43 - Bit[59] Hardware Use for translations using TTBR0_EL1. */
1053#define ARMV8_TCR_EL1_AARCH64_HWU059 RT_BIT_64(43)
1054#define ARMV8_TCR_EL1_AARCH64_HWU059_BIT 43
1055/** Bit 44 - Bit[60] Hardware Use for translations using TTBR0_EL1. */
1056#define ARMV8_TCR_EL1_AARCH64_HWU060 RT_BIT_64(44)
1057#define ARMV8_TCR_EL1_AARCH64_HWU060_BIT 44
1058/** Bit 46 - Bit[61] Hardware Use for translations using TTBR0_EL1. */
1059#define ARMV8_TCR_EL1_AARCH64_HWU061 RT_BIT_64(45)
1060#define ARMV8_TCR_EL1_AARCH64_HWU061_BIT 45
1061/** Bit 46 - Bit[62] Hardware Use for translations using TTBR0_EL1. */
1062#define ARMV8_TCR_EL1_AARCH64_HWU062 RT_BIT_64(46)
1063#define ARMV8_TCR_EL1_AARCH64_HWU062_BIT 46
1064/** Bit 47 - Bit[59] Hardware Use for translations using TTBR1_EL1. */
1065#define ARMV8_TCR_EL1_AARCH64_HWU159 RT_BIT_64(47)
1066#define ARMV8_TCR_EL1_AARCH64_HWU159_BIT 47
1067/** Bit 48 - Bit[60] Hardware Use for translations using TTBR1_EL1. */
1068#define ARMV8_TCR_EL1_AARCH64_HWU160 RT_BIT_64(48)
1069#define ARMV8_TCR_EL1_AARCH64_HWU160_BIT 48
1070/** Bit 49 - Bit[61] Hardware Use for translations using TTBR1_EL1. */
1071#define ARMV8_TCR_EL1_AARCH64_HWU161 RT_BIT_64(49)
1072#define ARMV8_TCR_EL1_AARCH64_HWU161_BIT 49
1073/** Bit 50 - Bit[62] Hardware Use for translations using TTBR1_EL1. */
1074#define ARMV8_TCR_EL1_AARCH64_HWU162 RT_BIT_64(50)
1075#define ARMV8_TCR_EL1_AARCH64_HWU162_BIT 50
1076/** Bit 51 - Control the use of the top byte of instruction addresses for address matching for translations using TTBR0_EL1. */
1077#define ARMV8_TCR_EL1_AARCH64_TBID0 RT_BIT_64(51)
1078#define ARMV8_TCR_EL1_AARCH64_TBID0_BIT 51
1079/** Bit 52 - Control the use of the top byte of instruction addresses for address matching for translations using TTBR1_EL1. */
1080#define ARMV8_TCR_EL1_AARCH64_TBID1 RT_BIT_64(52)
1081#define ARMV8_TCR_EL1_AARCH64_TBID1_BIT 52
1082/** Bit 53 - Non fault translation table walk disable for stage 1 translations using TTBR0_EL1. */
1083#define ARMV8_TCR_EL1_AARCH64_NFD0 RT_BIT_64(53)
1084#define ARMV8_TCR_EL1_AARCH64_NFD0_BIT 53
1085/** Bit 54 - Non fault translation table walk disable for stage 1 translations using TTBR1_EL1. */
1086#define ARMV8_TCR_EL1_AARCH64_NFD1 RT_BIT_64(54)
1087#define ARMV8_TCR_EL1_AARCH64_NFD1_BIT 54
1088/** Bit 55 - Faulting Control for Unprivileged access to any address translated by TTBR0_EL1. */
1089#define ARMV8_TCR_EL1_AARCH64_E0PD0 RT_BIT_64(55)
1090#define ARMV8_TCR_EL1_AARCH64_E0PD0_BIT 55
1091/** Bit 56 - Faulting Control for Unprivileged access to any address translated by TTBR1_EL1. */
1092#define ARMV8_TCR_EL1_AARCH64_E0PD1 RT_BIT_64(56)
1093#define ARMV8_TCR_EL1_AARCH64_E0PD1_BIT 56
1094/** Bit 57 - TCMA0 */
1095#define ARMV8_TCR_EL1_AARCH64_TCMA0 RT_BIT_64(57)
1096#define ARMV8_TCR_EL1_AARCH64_TCMA0_BIT 57
1097/** Bit 58 - TCMA1 */
1098#define ARMV8_TCR_EL1_AARCH64_TCMA1 RT_BIT_64(58)
1099#define ARMV8_TCR_EL1_AARCH64_TCMA1_BIT 58
1100/** Bit 59 - Data Sharing(?). */
1101#define ARMV8_TCR_EL1_AARCH64_DS RT_BIT_64(59)
1102#define ARMV8_TCR_EL1_AARCH64_DS_BIT 59
1103/** @} */
1104
1105
1106/** @name TTBR<0,1>_EL1 - Translation Table Base Register <0,1> (EL1)
1107 * @{
1108 */
1109/** Bit 0 - Common not Private (FEAT_TTCNP). */
1110#define ARMV8_TTBR_EL1_AARCH64_CNP RT_BIT_64(0)
1111#define ARMV8_TTBR_EL1_AARCH64_CNP_BIT 0
1112/** Bit 1 - 47 - Translation table base address. */
1113#define ARMV8_TTBR_EL1_AARCH64_BADDR UINT64_C(0x0000fffffffffffe)
1114#define ARMV8_TTBR_EL1_AARCH64_BADDR_GET(a_Ttbr) (((a_Ttbr) & ARMV8_TTBR_EL1_AARCH64_BADDR) >> 1)
1115/** Bit 48 - 63 - ASID. */
1116#define ARMV8_TTBR_EL1_AARCH64_ASID UINT64_C(0xffff000000000000)
1117#define ARMV8_TTBR_EL1_AARCH64_ASID_GET(a_Ttbr) (((a_Ttbr) & ARMV8_TTBR_EL1_AARCH64_ASID) >> 48)
1118/** @} */
1119
1120
1121/** @name ICC_PMR_EL1 - Interrupt Controller Interrupt Priority Mask Register
1122 * @{ */
1123/** Bit 0 - 7 - Priority - The priority mask level for the CPU interface. */
1124#define ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY UINT64_C(0xff)
1125#define ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY_GET(a_Pmr) ((a_Pmr) & ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY)
1126#define ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY_SET(a_Prio) ((a_Prio) & ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY)
1127/** @} */
1128
1129
1130/** @name ICC_BPR0_EL1 - The group priority for Group 0 interrupts.
1131 * @{ */
1132/** Bit 0 - 2 - BinaryPoint - Controls how the 8-bit interrupt priority field is split into a group priority and subpriority field. */
1133#define ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2))
1134#define ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT_GET(a_Bpr0) ((a_Bpr0) & ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT)
1135#define ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT_SET(a_BinaryPt) ((a_BinaryPt) & ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT)
1136/** @} */
1137
1138
1139/** @name ICC_BPR1_EL1 - The group priority for Group 1 interrupts.
1140 * @{ */
1141/** Bit 0 - 2 - BinaryPoint - Controls how the 8-bit interrupt priority field is split into a group priority and subpriority field. */
1142#define ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2))
1143#define ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT_GET(a_Bpr1) ((a_Bpr1) & ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT)
1144#define ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT_SET(a_BinaryPt) ((a_BinaryPt) & ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT)
1145/** @} */
1146
1147
1148/** @name ICC_CTLR_EL1 - Interrupt Controller Control Register (EL1)
1149 * @{ */
1150/** Bit 0 - Common Binary Pointer Register - RW. */
1151#define ARMV8_ICC_CTLR_EL1_AARCH64_CBPR RT_BIT_64(0)
1152#define ARMV8_ICC_CTLR_EL1_AARCH64_CBPR_BIT 0
1153/** Bit 1 - EOI mode for current security state, when set ICC_DIR_EL1 provides interrupt deactivation functionality - RW. */
1154#define ARMV8_ICC_CTLR_EL1_AARCH64_EOIMODE RT_BIT_64(1)
1155#define ARMV8_ICC_CTLR_EL1_AARCH64_EOIMODE_BIT 1
1156/** Bit 7 - Priority Mask Hint Enable - RW (under circumstances). */
1157#define ARMV8_ICC_CTLR_EL1_AARCH64_PMHE RT_BIT_64(7)
1158#define ARMV8_ICC_CTLR_EL1_AARCH64_PMHE_BIT 7
1159/** Bit 8 - 10 - Priority bits - RO. */
1160#define ARMV8_ICC_CTLR_EL1_AARCH64_PRIBITS (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10))
1161#define ARMV8_ICC_CTLR_EL1_AARCH64_PRIBITS_SET(a_PriBits) (((a_PriBits) << 8) & ARMV8_ICC_CTLR_EL1_AARCH64_PRIBITS)
1162/** Bit 11 - 13 - Interrupt identifier bits - RO. */
1163#define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS (RT_BIT_64(11) | RT_BIT_64(12) | RT_BIT_64(13))
1164#define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS_SET(a_IdBits) (((a_IdBits) << 11) & ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS)
1165/** INTIDS are 16-bit wide. */
1166# define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS_16BITS 0
1167/** INTIDS are 24-bit wide. */
1168# define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS_24BITS 1
1169/** Bit 14 - SEI Supported - RO. */
1170#define ARMV8_ICC_CTLR_EL1_AARCH64_SEIS RT_BIT_64(14)
1171#define ARMV8_ICC_CTLR_EL1_AARCH64_SEIS_BIT 14
1172/** Bit 15 - Affinity 3 Valid - RO. */
1173#define ARMV8_ICC_CTLR_EL1_AARCH64_A3V RT_BIT_64(15)
1174#define ARMV8_ICC_CTLR_EL1_AARCH64_A3V_BIT 15
1175/** Bit 18 - Range Selector Support - RO. */
1176#define ARMV8_ICC_CTLR_EL1_AARCH64_RSS RT_BIT_64(18)
1177#define ARMV8_ICC_CTLR_EL1_AARCH64_RSS_BIT 18
1178/** Bit 19 - Extended INTID range supported - RO. */
1179#define ARMV8_ICC_CTLR_EL1_AARCH64_EXTRANGE RT_BIT_64(19)
1180#define ARMV8_ICC_CTLR_EL1_AARCH64_EXTRANGE_BIT 19
1181/** All RW bits. */
1182#define ARMV8_ICC_CTLR_EL1_RW (ARMV8_ICC_CTLR_EL1_AARCH64_CBPR | ARMV8_ICC_CTLR_EL1_AARCH64_EOIMODE | ARMV8_ICC_CTLR_EL1_AARCH64_PMHE)
1183/** All RO bits (including Res0). */
1184#define ARMV8_ICC_CTLR_EL1_RO ~ARMV8_ICC_CTLR_EL1_RW
1185/** @} */
1186
1187
1188/** @name ICC_IGRPEN0_EL1 - Interrupt Controller Interrupt Group 0 Enable Register (EL1)
1189 * @{ */
1190/** Bit 0 - Enables Group 0 interrupts for the current Security state. */
1191#define ARMV8_ICC_IGRPEN0_EL1_AARCH64_ENABLE RT_BIT_64(0)
1192#define ARMV8_ICC_IGRPEN0_EL1_AARCH64_ENABLE_BIT 0
1193/** @} */
1194
1195
1196/** @name ICC_IGRPEN1_EL1 - Interrupt Controller Interrupt Group 1 Enable Register (EL1)
1197 * @{ */
1198/** Bit 0 - Enables Group 1 interrupts for the current Security state. */
1199#define ARMV8_ICC_IGRPEN1_EL1_AARCH64_ENABLE RT_BIT_64(0)
1200#define ARMV8_ICC_IGRPEN1_EL1_AARCH64_ENABLE_BIT 0
1201/** @} */
1202
1203
1204/** @name ICC_SGI1R_EL1 - Interrupt Controller Software Generated Interrupt Group 1 Register (EL1) - WO
1205 * @{ */
1206/** Bit 0 - 15 - Target List, the set of PEs for which SGI interrupts will be generated. */
1207#define ARMV8_ICC_SGI1R_EL1_AARCH64_TARGET_LIST (UINT64_C(0x000000000000ffff))
1208#define ARMV8_ICC_SGI1R_EL1_AARCH64_TARGET_LIST_GET(a_Sgi1R) ((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_TARGET_LIST)
1209/** Bit 16 - 23 - The affinity 1 of the affinity path of the cluster for which SGI interrupts will be generated. */
1210#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF1 (UINT64_C(0x00000000007f0000))
1211#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF1_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_AFF1) >> 16)
1212/** Bit 24 - 27 - The INTID of the SGI. */
1213#define ARMV8_ICC_SGI1R_EL1_AARCH64_INTID (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1214#define ARMV8_ICC_SGI1R_EL1_AARCH64_INTID_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_INTID) >> 24)
1215/* Bit 28 - 31 - Reserved. */
1216/** Bit 32 - 39 - The affinity 2 of the affinity path of the cluster for which SGI interrupts will be generated. */
1217#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF2 (UINT64_C(0x000000ff00000000))
1218#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF2_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_AFF2) >> 32)
1219/** Bit 40 - Interrupt Routing Mode - 1 means interrupts to all PEs in the system excluding the generating PE. */
1220#define ARMV8_ICC_SGI1R_EL1_AARCH64_IRM RT_BIT_64(40)
1221#define ARMV8_ICC_SGI1R_EL1_AARCH64_IRM_BIT 40
1222/* Bit 41 - 43 - Reserved. */
1223/** Bit 44 - 47 - Range selector. */
1224#define ARMV8_ICC_SGI1R_EL1_AARCH64_RS (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1225#define ARMV8_ICC_SGI1R_EL1_AARCH64_RS_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_RS) >> 44)
1226/** Bit 48 - 55 - The affinity 3 of the affinity path of the cluster for which SGI interrupts will be generated. */
1227#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF3 (UINT64_C(0x00ff000000000000))
1228#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF3_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_AFF3) >> 48)
1229/* Bit 56 - 63 - Reserved. */
1230/** @} */
1231
1232
1233/** @name CNTV_CTL_EL0 - Counter-timer Virtual Timer Control register.
1234 * @{ */
1235/** Bit 0 - Enables the timer. */
1236#define ARMV8_CNTV_CTL_EL0_AARCH64_ENABLE RT_BIT_64(0)
1237#define ARMV8_CNTV_CTL_EL0_AARCH64_ENABLE_BIT 0
1238/** Bit 1 - Timer interrupt mask bit. */
1239#define ARMV8_CNTV_CTL_EL0_AARCH64_IMASK RT_BIT_64(1)
1240#define ARMV8_CNTV_CTL_EL0_AARCH64_IMASK_BIT 1
1241/** Bit 2 - Timer status bit. */
1242#define ARMV8_CNTV_CTL_EL0_AARCH64_ISTATUS RT_BIT_64(2)
1243#define ARMV8_CNTV_CTL_EL0_AARCH64_ISTATUS_BIT 2
1244/** @} */
1245
1246
1247/** @name OSLAR_EL1 - OS Lock Access Register.
1248 * @{ */
1249/** Bit 0 - The OS Lock status bit. */
1250#define ARMV8_OSLAR_EL1_AARCH64_OSLK RT_BIT_64(0)
1251#define ARMV8_OSLAR_EL1_AARCH64_OSLK_BIT 0
1252/** @} */
1253
1254
1255/** @name OSLSR_EL1 - OS Lock Status Register.
1256 * @{ */
1257/** Bit 0 - OSLM[0] Bit 0 of OS Lock model implemented. */
1258#define ARMV8_OSLSR_EL1_AARCH64_OSLM0 RT_BIT_64(0)
1259#define ARMV8_OSLSR_EL1_AARCH64_OSLM0_BIT 0
1260/** Bit 1 - The OS Lock status bit. */
1261#define ARMV8_OSLSR_EL1_AARCH64_OSLK RT_BIT_64(1)
1262#define ARMV8_OSLSR_EL1_AARCH64_OSLK_BIT 1
1263/** Bit 2 - Not 32-bit access. */
1264#define ARMV8_OSLSR_EL1_AARCH64_NTT RT_BIT_64(2)
1265#define ARMV8_OSLSR_EL1_AARCH64_NTT_BIT 2
1266/** Bit 0 - OSLM[1] Bit 1 of OS Lock model implemented. */
1267#define ARMV8_OSLSR_EL1_AARCH64_OSLM1 RT_BIT_64(3)
1268#define ARMV8_OSLSR_EL1_AARCH64_OSLM1_BIT 3
1269/** @} */
1270
1271
1272/** @name ID_AA64ISAR0_EL1 - AArch64 Instruction Set Attribute Register 0.
1273 * @{ */
1274/* Bit 0 - 3 - Reserved. */
1275/** Bit 4 - 7 - Indicates support for AES instructions in AArch64 state. */
1276#define ARMV8_ID_AA64ISAR0_EL1_AES_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1277#define ARMV8_ID_AA64ISAR0_EL1_AES_SHIFT 4
1278/** No AES instructions implemented. */
1279# define ARMV8_ID_AA64ISAR0_EL1_AES_NOT_IMPL 0
1280/** AES, AESD, AESMC and AESIMC instructions implemented (FEAT_AES). */
1281# define ARMV8_ID_AA64ISAR0_EL1_AES_SUPPORTED 1
1282/** AES, AESD, AESMC and AESIMC instructions implemented and PMULL and PMULL2 instructions operating on 64bit source elements (FEAT_PMULL). */
1283# define ARMV8_ID_AA64ISAR0_EL1_AES_SUPPORTED_PMULL 2
1284/** Bit 8 - 11 - Indicates support for SHA1 instructions in AArch64 state. */
1285#define ARMV8_ID_AA64ISAR0_EL1_SHA1_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1286#define ARMV8_ID_AA64ISAR0_EL1_SHA1_SHIFT 8
1287/** No SHA1 instructions implemented. */
1288# define ARMV8_ID_AA64ISAR0_EL1_SHA1_NOT_IMPL 0
1289/** SHA1C, SHA1P, SHA1M, SHA1H, SHA1SU0 and SHA1SU1 instructions implemented (FEAT_SHA1). */
1290# define ARMV8_ID_AA64ISAR0_EL1_SHA1_SUPPORTED 1
1291/** Bit 12 - 15 - Indicates support for SHA2 instructions in AArch64 state. */
1292#define ARMV8_ID_AA64ISAR0_EL1_SHA2_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1293#define ARMV8_ID_AA64ISAR0_EL1_SHA2_SHIFT 12
1294/** No SHA2 instructions implemented. */
1295# define ARMV8_ID_AA64ISAR0_EL1_SHA2_NOT_IMPL 0
1296/** SHA256 instructions implemented (FEAT_SHA256). */
1297# define ARMV8_ID_AA64ISAR0_EL1_SHA2_SUPPORTED_SHA256 1
1298/** SHA256 and SHA512 instructions implemented (FEAT_SHA512). */
1299# define ARMV8_ID_AA64ISAR0_EL1_SHA2_SUPPORTED_SHA256_SHA512 2
1300/** Bit 16 - 19 - Indicates support for CRC32 instructions in AArch64 state. */
1301#define ARMV8_ID_AA64ISAR0_EL1_CRC32_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1302#define ARMV8_ID_AA64ISAR0_EL1_CRC32_SHIFT 16
1303/** No CRC32 instructions implemented. */
1304# define ARMV8_ID_AA64ISAR0_EL1_CRC32_NOT_IMPL 0
1305/** CRC32 instructions implemented (FEAT_CRC32). */
1306# define ARMV8_ID_AA64ISAR0_EL1_CRC32_SUPPORTED 1
1307/** Bit 20 - 23 - Indicates support for Atomic instructions in AArch64 state. */
1308#define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1309#define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_SHIFT 20
1310/** No Atomic instructions implemented. */
1311# define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_NOT_IMPL 0
1312/** Atomic instructions implemented (FEAT_LSE). */
1313# define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_SUPPORTED 2
1314/** Bit 24 - 27 - Indicates support for TME instructions. */
1315#define ARMV8_ID_AA64ISAR0_EL1_TME_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1316#define ARMV8_ID_AA64ISAR0_EL1_TME_SHIFT 24
1317/** TME instructions are not implemented. */
1318# define ARMV8_ID_AA64ISAR0_EL1_TME_NOT_IMPL 0
1319/** TME instructions are implemented. */
1320# define ARMV8_ID_AA64ISAR0_EL1_TME_SUPPORTED 1
1321/** Bit 28 - 31 - Indicates support for SQRDMLAH and SQRDMLSH instructions in AArch64 state. */
1322#define ARMV8_ID_AA64ISAR0_EL1_RDM_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1323#define ARMV8_ID_AA64ISAR0_EL1_RDM_SHIFT 28
1324/** No RDMA instructions implemented. */
1325# define ARMV8_ID_AA64ISAR0_EL1_RDM_NOT_IMPL 0
1326/** SQRDMLAH and SQRDMLSH instructions implemented (FEAT_RDM). */
1327# define ARMV8_ID_AA64ISAR0_EL1_RDM_SUPPORTED 1
1328/** Bit 32 - 35 - Indicates support for SHA3 instructions in AArch64 state. */
1329#define ARMV8_ID_AA64ISAR0_EL1_SHA3_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1330#define ARMV8_ID_AA64ISAR0_EL1_SHA3_SHIFT 32
1331/** No SHA3 instructions implemented. */
1332# define ARMV8_ID_AA64ISAR0_EL1_SHA3_NOT_IMPL 0
1333/** EOR3, RAX1, XAR and BCAX instructions implemented (FEAT_SHA3). */
1334# define ARMV8_ID_AA64ISAR0_EL1_SHA3_SUPPORTED 1
1335/** Bit 36 - 39 - Indicates support for SM3 instructions in AArch64 state. */
1336#define ARMV8_ID_AA64ISAR0_EL1_SM3_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1337#define ARMV8_ID_AA64ISAR0_EL1_SM3_SHIFT 36
1338/** No SM3 instructions implemented. */
1339# define ARMV8_ID_AA64ISAR0_EL1_SM3_NOT_IMPL 0
1340/** SM3 instructions implemented (FEAT_SM3). */
1341# define ARMV8_ID_AA64ISAR0_EL1_SM3_SUPPORTED 1
1342/** Bit 40 - 43 - Indicates support for SM4 instructions in AArch64 state. */
1343#define ARMV8_ID_AA64ISAR0_EL1_SM4_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
1344#define ARMV8_ID_AA64ISAR0_EL1_SM4_SHIFT 40
1345/** No SM4 instructions implemented. */
1346# define ARMV8_ID_AA64ISAR0_EL1_SM4_NOT_IMPL 0
1347/** SM4 instructions implemented (FEAT_SM4). */
1348# define ARMV8_ID_AA64ISAR0_EL1_SM4_SUPPORTED 1
1349/** Bit 44 - 47 - Indicates support for Dot Product instructions in AArch64 state. */
1350#define ARMV8_ID_AA64ISAR0_EL1_DP_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1351#define ARMV8_ID_AA64ISAR0_EL1_DP_SHIFT 44
1352/** No Dot Product instructions implemented. */
1353# define ARMV8_ID_AA64ISAR0_EL1_DP_NOT_IMPL 0
1354/** UDOT and SDOT instructions implemented (FEAT_DotProd). */
1355# define ARMV8_ID_AA64ISAR0_EL1_DP_SUPPORTED 1
1356/** Bit 48 - 51 - Indicates support for FMLAL and FMLSL instructions. */
1357#define ARMV8_ID_AA64ISAR0_EL1_FHM_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
1358#define ARMV8_ID_AA64ISAR0_EL1_FHM_SHIFT 48
1359/** FMLAL and FMLSL instructions are not implemented. */
1360# define ARMV8_ID_AA64ISAR0_EL1_FHM_NOT_IMPL 0
1361/** FMLAL and FMLSL instructions are implemented (FEAT_FHM). */
1362# define ARMV8_ID_AA64ISAR0_EL1_FHM_SUPPORTED 1
1363/** Bit 52 - 55 - Indicates support for flag manipulation instructions. */
1364#define ARMV8_ID_AA64ISAR0_EL1_TS_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
1365#define ARMV8_ID_AA64ISAR0_EL1_TS_SHIFT 52
1366/** No flag manipulation instructions implemented. */
1367# define ARMV8_ID_AA64ISAR0_EL1_TS_NOT_IMPL 0
1368/** CFINV, RMIF, SETF16 and SETF8 instrutions are implemented (FEAT_FlagM). */
1369# define ARMV8_ID_AA64ISAR0_EL1_TS_SUPPORTED 1
1370/** CFINV, RMIF, SETF16, SETF8, AXFLAG and XAFLAG instrutions are implemented (FEAT_FlagM2). */
1371# define ARMV8_ID_AA64ISAR0_EL1_TS_SUPPORTED_2 2
1372/** Bit 56 - 59 - Indicates support for Outer Shareable and TLB range maintenance instructions. */
1373#define ARMV8_ID_AA64ISAR0_EL1_TLB_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
1374#define ARMV8_ID_AA64ISAR0_EL1_TLB_SHIFT 56
1375/** Outer Sahreable and TLB range maintenance instructions are not implemented. */
1376# define ARMV8_ID_AA64ISAR0_EL1_TLB_NOT_IMPL 0
1377/** Outer Shareable TLB maintenance instructions are implemented (FEAT_TLBIOS). */
1378# define ARMV8_ID_AA64ISAR0_EL1_TLB_SUPPORTED 1
1379/** Outer Shareable and TLB range maintenance instructions are implemented (FEAT_TLBIRANGE). */
1380# define ARMV8_ID_AA64ISAR0_EL1_TLB_SUPPORTED_RANGE 2
1381/** Bit 60 - 63 - Indicates support for Random Number instructons in AArch64 state. */
1382#define ARMV8_ID_AA64ISAR0_EL1_RNDR_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
1383#define ARMV8_ID_AA64ISAR0_EL1_RNDR_SHIFT 60
1384/** No Random Number instructions implemented. */
1385# define ARMV8_ID_AA64ISAR0_EL1_RNDR_NOT_IMPL 0
1386/** RNDR and RDNRRS registers are implemented . */
1387# define ARMV8_ID_AA64ISAR0_EL1_RNDR_SUPPORTED 1
1388/** @} */
1389
1390
1391/** @name ID_AA64ISAR1_EL1 - AArch64 Instruction Set Attribute Register 0.
1392 * @{ */
1393/** Bit 0 - 3 - Indicates support for Data Persistence writeback instructions in AArch64 state. */
1394#define ARMV8_ID_AA64ISAR1_EL1_DPB_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1395#define ARMV8_ID_AA64ISAR1_EL1_DPB_SHIFT 0
1396/** DC CVAP not supported. */
1397# define ARMV8_ID_AA64ISAR1_EL1_DPB_NOT_IMPL 0
1398/** DC CVAP supported (FEAT_DPB). */
1399# define ARMV8_ID_AA64ISAR1_EL1_DPB_SUPPORTED 1
1400/** DC CVAP and DC CVADP supported (FEAT_DPB2). */
1401# define ARMV8_ID_AA64ISAR1_EL1_DPB_SUPPORTED_2 2
1402/** Bit 4 - 7 - Indicates whether QARMA5 algorithm is implemented in the PE for address authentication. */
1403#define ARMV8_ID_AA64ISAR1_EL1_APA_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1404#define ARMV8_ID_AA64ISAR1_EL1_APA_SHIFT 4
1405/** Address Authentication using the QARMA5 algorithm is not implemented. */
1406# define ARMV8_ID_AA64ISAR1_EL1_APA_NOT_IMPL 0
1407/** Address Authentication using the QARMA5 algorithm is implemented (FEAT_PAuth, FEAT_PACQARMA5). */
1408# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_PAUTH 1
1409/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC is supported (FEAT_EPAC, FEAT_PACQARMA5). */
1410# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_EPAC 2
1411/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 is supported (FEAT_PAuth2, FEAT_PACQARMA5). */
1412# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_PAUTH2 3
1413/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and FPAC are supported (FEAT_FPAC, FEAT_PACQARMA5). */
1414# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_FPAC 4
1415/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and combined FPAC are supported (FEAT_FPACCOMBINE, FEAT_PACQARMA5). */
1416# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_FPACCOMBINE 5
1417/** Bit 8 - 11 - Indicates whether an implementation defined algorithm is implemented in the PE for address authentication. */
1418#define ARMV8_ID_AA64ISAR1_EL1_API_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1419#define ARMV8_ID_AA64ISAR1_EL1_API_SHIFT 8
1420/** Address Authentication using the QARMA5 algorithm is not implemented. */
1421# define ARMV8_ID_AA64ISAR1_EL1_API_NOT_IMPL 0
1422/** Address Authentication using the QARMA5 algorithm is implemented (FEAT_PAuth, FEAT_PACIMP). */
1423# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_PAUTH 1
1424/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC is supported (FEAT_EPAC, FEAT_PACIMP). */
1425# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_EPAC 2
1426/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 is supported (FEAT_PAuth2, FEAT_PACIMP). */
1427# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_PAUTH2 3
1428/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and FPAC are supported (FEAT_FPAC, FEAT_PACIMP). */
1429# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_FPAC 4
1430/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and combined FPAC are supported (FEAT_FPACCOMBINE, FEAT_PACIMP). */
1431# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_FPACCOMBINE 5
1432/** Bit 12 - 15 - Indicates support for JavaScript conversion from double precision floating values to integers in AArch64 state. */
1433#define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1434#define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_SHIFT 12
1435/** No FJCVTZS instruction implemented. */
1436# define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_NOT_IMPL 0
1437/** FJCVTZS instruction implemented (FEAT_JSCVT). */
1438# define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_SUPPORTED 1
1439/** Bit 16 - 19 - Indicates support for CRC32 instructions in AArch64 state. */
1440#define ARMV8_ID_AA64ISAR1_EL1_FCMA_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1441#define ARMV8_ID_AA64ISAR1_EL1_FCMA_SHIFT 16
1442/** No FCMLA and FCADD instructions implemented. */
1443# define ARMV8_ID_AA64ISAR1_EL1_FCMA_NOT_IMPL 0
1444/** FCMLA and FCADD instructions implemented (FEAT_FCMA). */
1445# define ARMV8_ID_AA64ISAR1_EL1_FCMA_SUPPORTED 1
1446/** Bit 20 - 23 - Indicates support for weaker release consistency, RCpc, based model. */
1447#define ARMV8_ID_AA64ISAR1_EL1_LRCPC_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1448#define ARMV8_ID_AA64ISAR1_EL1_LRCPC_SHIFT 20
1449/** No RCpc instructions implemented. */
1450# define ARMV8_ID_AA64ISAR1_EL1_LRCPC_NOT_IMPL 0
1451/** The no offset LDAPR, LDAPRB and LDAPRH instructions are implemented (FEAT_LRCPC). */
1452# define ARMV8_ID_AA64ISAR1_EL1_LRCPC_SUPPORTED 1
1453/** The no offset LDAPR, LDAPRB, LDAPRH, LDAPR and STLR instructions are implemented (FEAT_LRCPC2). */
1454# define ARMV8_ID_AA64ISAR1_EL1_LRCPC_SUPPORTED_2 2
1455/** Bit 24 - 27 - Indicates whether the QARMA5 algorithm is implemented in the PE for generic code authentication in AArch64 state. */
1456#define ARMV8_ID_AA64ISAR1_EL1_GPA_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1457#define ARMV8_ID_AA64ISAR1_EL1_GPA_SHIFT 24
1458/** Generic Authentication using the QARMA5 algorithm is not implemented. */
1459# define ARMV8_ID_AA64ISAR1_EL1_GPA_NOT_IMPL 0
1460/** Generic Authentication using the QARMA5 algorithm is implemented (FEAT_PACQARMA5). */
1461# define ARMV8_ID_AA64ISAR1_EL1_GPA_SUPPORTED 1
1462/** Bit 28 - 31 - Indicates whether an implementation defined algorithm is implemented in the PE for generic code authentication in AArch64 state. */
1463#define ARMV8_ID_AA64ISAR1_EL1_GPI_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1464#define ARMV8_ID_AA64ISAR1_EL1_GPI_SHIFT 28
1465/** Generic Authentication using an implementation defined algorithm is not implemented. */
1466# define ARMV8_ID_AA64ISAR1_EL1_GPI_NOT_IMPL 0
1467/** Generic Authentication using an implementation defined algorithm is implemented (FEAT_PACIMP). */
1468# define ARMV8_ID_AA64ISAR1_EL1_GPI_SUPPORTED 1
1469/** Bit 32 - 35 - Indicates support for SHA3 instructions in AArch64 state. */
1470#define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1471#define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_SHIFT 32
1472/** FRINT32Z, FRINT32X, FRINT64Z and FRINT64X instructions are not implemented. */
1473# define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_NOT_IMPL 0
1474/** FRINT32Z, FRINT32X, FRINT64Z and FRINT64X instructions are implemented (FEAT_FRINTTS). */
1475# define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_SUPPORTED 1
1476/** Bit 36 - 39 - Indicates support for SB instructions in AArch64 state. */
1477#define ARMV8_ID_AA64ISAR1_EL1_SB_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1478#define ARMV8_ID_AA64ISAR1_EL1_SB_SHIFT 36
1479/** No SB instructions implemented. */
1480# define ARMV8_ID_AA64ISAR1_EL1_SB_NOT_IMPL 0
1481/** SB instructions implemented (FEAT_SB). */
1482# define ARMV8_ID_AA64ISAR1_EL1_SB_SUPPORTED 1
1483/** Bit 40 - 43 - Indicates support for prediction invalidation instructions in AArch64 state. */
1484#define ARMV8_ID_AA64ISAR1_EL1_SPECRES_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
1485#define ARMV8_ID_AA64ISAR1_EL1_SPECRES_SHIFT 40
1486/** Prediction invalidation instructions are not implemented. */
1487# define ARMV8_ID_AA64ISAR1_EL1_SPECRES_NOT_IMPL 0
1488/** Prediction invalidation instructions are implemented (FEAT_SPECRES). */
1489# define ARMV8_ID_AA64ISAR1_EL1_SPECRES_SUPPORTED 1
1490/** Bit 44 - 47 - Indicates support for Advanced SIMD and Floating-point BFloat16 instructions in AArch64 state. */
1491#define ARMV8_ID_AA64ISAR1_EL1_BF16_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1492#define ARMV8_ID_AA64ISAR1_EL1_BF16_SHIFT 44
1493/** BFloat16 instructions are not implemented. */
1494# define ARMV8_ID_AA64ISAR1_EL1_BF16_NOT_IMPL 0
1495/** BFCVT, BFCVTN, BFCVTN2, BFDOT, BFMLALB, BFMLALT and BFMMLA instructions are implemented (FEAT_BF16). */
1496# define ARMV8_ID_AA64ISAR1_EL1_BF16_SUPPORTED_BF16 1
1497/** BFCVT, BFCVTN, BFCVTN2, BFDOT, BFMLALB, BFMLALT and BFMMLA instructions are implemented and FPCR.EBF is supported (FEAT_EBF16). */
1498# define ARMV8_ID_AA64ISAR1_EL1_BF16_SUPPORTED_EBF16 2
1499/** Bit 48 - 51 - Indicates support for Data Gathering Hint instructions. */
1500#define ARMV8_ID_AA64ISAR1_EL1_DGH_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
1501#define ARMV8_ID_AA64ISAR1_EL1_DGH_SHIFT 48
1502/** Data Gathering Hint instructions are not implemented. */
1503# define ARMV8_ID_AA64ISAR1_EL1_DGH_NOT_IMPL 0
1504/** Data Gathering Hint instructions are implemented (FEAT_DGH). */
1505# define ARMV8_ID_AA64ISAR1_EL1_DGH_SUPPORTED 1
1506/** Bit 52 - 55 - Indicates support for Advanced SIMD and Floating-point Int8 matri multiplication instructions. */
1507#define ARMV8_ID_AA64ISAR1_EL1_I8MM_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
1508#define ARMV8_ID_AA64ISAR1_EL1_I8MM_SHIFT 52
1509/** No Int8 matrix multiplication instructions implemented. */
1510# define ARMV8_ID_AA64ISAR1_EL1_I8MM_NOT_IMPL 0
1511/** SMMLA, SUDOT, UMMLA, USMMLA and USDOT instrutions are implemented (FEAT_I8MM). */
1512# define ARMV8_ID_AA64ISAR1_EL1_I8MM_SUPPORTED 1
1513/** Bit 56 - 59 - Indicates support for the XS attribute, the TLBI and DSB insturctions with the nXS qualifier in AArch64 state. */
1514#define ARMV8_ID_AA64ISAR1_EL1_XS_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
1515#define ARMV8_ID_AA64ISAR1_EL1_XS_SHIFT 56
1516/** The XS attribute and the TLBI and DSB instructions with the nXS qualifier are not supported. */
1517# define ARMV8_ID_AA64ISAR1_EL1_XS_NOT_IMPL 0
1518/** The XS attribute and the TLBI and DSB instructions with the nXS qualifier are supported (FEAT_XS). */
1519# define ARMV8_ID_AA64ISAR1_EL1_XS_SUPPORTED 1
1520/** Bit 60 - 63 - Indicates support LD64B and ST64B* instructons and the ACCDATA_EL1 register. */
1521#define ARMV8_ID_AA64ISAR1_EL1_LS64_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
1522#define ARMV8_ID_AA64ISAR1_EL1_LS64_SHIFT 60
1523/** The LD64B, ST64B, ST64BV and ST64BV0 instructions, the ACCDATA_EL1 register and associated traps are not supported. */
1524# define ARMV8_ID_AA64ISAR1_EL1_LS64_NOT_IMPL 0
1525/** The LD64B and ST64B instructions are supported (FEAT_LS64). */
1526# define ARMV8_ID_AA64ISAR1_EL1_LS64_SUPPORTED 1
1527/** The LD64B, ST64B, ST64BV and associated traps are not supported (FEAT_LS64_V). */
1528# define ARMV8_ID_AA64ISAR1_EL1_LS64_SUPPORTED_V 2
1529/** The LD64B, ST64B, ST64BV and ST64BV0 instructions, the ACCDATA_EL1 register and associated traps are supported (FEAT_LS64_ACCDATA). */
1530# define ARMV8_ID_AA64ISAR1_EL1_LS64_SUPPORTED_ACCDATA 3
1531/** @} */
1532
1533
1534/** @name ID_AA64ISAR2_EL1 - AArch64 Instruction Set Attribute Register 0.
1535 * @{ */
1536/** Bit 0 - 3 - Indicates support for WFET and WFIT instructions in AArch64 state. */
1537#define ARMV8_ID_AA64ISAR2_EL1_WFXT_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1538#define ARMV8_ID_AA64ISAR2_EL1_WFXT_SHIFT 0
1539/** WFET and WFIT are not supported. */
1540# define ARMV8_ID_AA64ISAR2_EL1_WFXT_NOT_IMPL 0
1541/** WFET and WFIT are supported (FEAT_WFxT). */
1542# define ARMV8_ID_AA64ISAR2_EL1_WFXT_SUPPORTED 2
1543/** Bit 4 - 7 - Indicates support for 12 bits of mantissa in reciprocal and reciprocal square root instructions in AArch64 state, when FPCR.AH is 1. */
1544#define ARMV8_ID_AA64ISAR2_EL1_RPRES_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1545#define ARMV8_ID_AA64ISAR2_EL1_RPRES_SHIFT 4
1546/** Reciprocal and reciprocal square root estimates give 8 bits of mantissa when FPCR.AH is 1. */
1547# define ARMV8_ID_AA64ISAR2_EL1_RPRES_NOT_IMPL 0
1548/** Reciprocal and reciprocal square root estimates give 12 bits of mantissa when FPCR.AH is 1 (FEAT_RPRES). */
1549# define ARMV8_ID_AA64ISAR2_EL1_RPRES_SUPPORTED 1
1550/** Bit 8 - 11 - Indicates whether the QARMA3 algorithm is implemented in the PE for generic code authentication in AArch64 state. */
1551#define ARMV8_ID_AA64ISAR2_EL1_GPA3_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1552#define ARMV8_ID_AA64ISAR2_EL1_GPA3_SHIFT 8
1553/** Generic Authentication using the QARMA3 algorithm is not implemented. */
1554# define ARMV8_ID_AA64ISAR2_EL1_GPA3_NOT_IMPL 0
1555/** Generic Authentication using the QARMA3 algorithm is implemented (FEAT_PACQARMA3). */
1556# define ARMV8_ID_AA64ISAR2_EL1_GPA3_SUPPORTED 1
1557/** Bit 12 - 15 - Indicates whether QARMA3 algorithm is implemented in the PE for address authentication. */
1558#define ARMV8_ID_AA64ISAR2_EL1_APA3_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1559#define ARMV8_ID_AA64ISAR2_EL1_APA3_SHIFT 12
1560/** Address Authentication using the QARMA3 algorithm is not implemented. */
1561# define ARMV8_ID_AA64ISAR2_EL1_APA3_NOT_IMPL 0
1562/** Address Authentication using the QARMA5 algorithm is implemented (FEAT_PAuth, FEAT_PACQARMA3). */
1563# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_PAUTH 1
1564/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC is supported (FEAT_EPAC, FEAT_PACQARMA3). */
1565# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_EPAC 2
1566/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 is supported (FEAT_PAuth2, FEAT_PACQARMA3). */
1567# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_PAUTH2 3
1568/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and FPAC are supported (FEAT_FPAC, FEAT_PACQARMA3). */
1569# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_FPAC 4
1570/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and combined FPAC are supported (FEAT_FPACCOMBINE, FEAT_PACQARMA3). */
1571# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_FPACCOMBINE 5
1572/** Bit 16 - 19 - Indicates support for Memory Copy and Memory Set instructions in AArch64 state. */
1573#define ARMV8_ID_AA64ISAR2_EL1_MOPS_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1574#define ARMV8_ID_AA64ISAR2_EL1_MOPS_SHIFT 16
1575/** No Memory Copy and Memory Set instructions implemented. */
1576# define ARMV8_ID_AA64ISAR2_EL1_MOPS_NOT_IMPL 0
1577/** Memory Copy and Memory Set instructions implemented (FEAT_MOPS). */
1578# define ARMV8_ID_AA64ISAR2_EL1_MOPS_SUPPORTED 1
1579/** Bit 20 - 23 - Indicates support for weaker release consistency, RCpc, based model. */
1580#define ARMV8_ID_AA64ISAR2_EL1_BC_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1581#define ARMV8_ID_AA64ISAR2_EL1_BC_SHIFT 20
1582/** BC instruction is not implemented. */
1583# define ARMV8_ID_AA64ISAR2_EL1_BC_NOT_IMPL 0
1584/** BC instruction is implemented (FEAT_HBC). */
1585# define ARMV8_ID_AA64ISAR2_EL1_BC_SUPPORTED 1
1586/** Bit 24 - 27 - Indicates whether the ConstPACField() functions used as part of PAC additions returns FALSE or TRUE. */
1587#define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1588#define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_SHIFT 24
1589/** ConstPACField() returns FALSE. */
1590# define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_FALSE 0
1591/** ConstPACField() returns TRUE (FEAT_CONSTPACFIELD). */
1592# define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_TRUE 1
1593/* Bit 28 - 63 - Reserved. */
1594/** @} */
1595
1596
1597/** @name ID_AA64PFR0_EL1 - AArch64 Processor Feature Register 0.
1598 * @{ */
1599/** Bit 0 - 3 - EL0 Exception level handling. */
1600#define ARMV8_ID_AA64PFR0_EL1_EL0_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1601#define ARMV8_ID_AA64PFR0_EL1_EL0_SHIFT 0
1602/** EL0 can be executed in AArch64 state only. */
1603# define ARMV8_ID_AA64PFR0_EL1_EL0_AARCH64_ONLY 1
1604/** EL0 can be executed in AArch64 and AArch32 state. */
1605# define ARMV8_ID_AA64PFR0_EL1_EL0_AARCH64_AARCH32 2
1606/** Bit 4 - 7 - EL1 Exception level handling. */
1607#define ARMV8_ID_AA64PFR0_EL1_EL1_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1608#define ARMV8_ID_AA64PFR0_EL1_EL1_SHIFT 4
1609/** EL1 can be executed in AArch64 state only. */
1610# define ARMV8_ID_AA64PFR0_EL1_EL1_AARCH64_ONLY 1
1611/** EL1 can be executed in AArch64 and AArch32 state. */
1612# define ARMV8_ID_AA64PFR0_EL1_EL1_AARCH64_AARCH32 2
1613/** Bit 8 - 11 - EL2 Exception level handling. */
1614#define ARMV8_ID_AA64PFR0_EL1_EL2_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1615#define ARMV8_ID_AA64PFR0_EL1_EL2_SHIFT 8
1616/** EL2 is not implemented. */
1617# define ARMV8_ID_AA64PFR0_EL1_EL2_NOT_IMPL 0
1618/** EL2 can be executed in AArch64 state only. */
1619# define ARMV8_ID_AA64PFR0_EL1_EL2_AARCH64_ONLY 1
1620/** EL2 can be executed in AArch64 and AArch32 state. */
1621# define ARMV8_ID_AA64PFR0_EL1_EL2_AARCH64_AARCH32 2
1622/** Bit 12 - 15 - EL3 Exception level handling. */
1623#define ARMV8_ID_AA64PFR0_EL1_EL3_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1624#define ARMV8_ID_AA64PFR0_EL1_EL3_SHIFT 12
1625/** EL3 is not implemented. */
1626# define ARMV8_ID_AA64PFR0_EL1_EL3_NOT_IMPL 0
1627/** EL3 can be executed in AArch64 state only. */
1628# define ARMV8_ID_AA64PFR0_EL1_EL3_AARCH64_ONLY 1
1629/** EL3 can be executed in AArch64 and AArch32 state. */
1630# define ARMV8_ID_AA64PFR0_EL1_EL3_AARCH64_AARCH32 2
1631/** Bit 16 - 19 - Floating-point support. */
1632#define ARMV8_ID_AA64PFR0_EL1_FP_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1633#define ARMV8_ID_AA64PFR0_EL1_FP_SHIFT 16
1634/** Floating-point is implemented and support single and double precision. */
1635# define ARMV8_ID_AA64PFR0_EL1_FP_IMPL_SP_DP 0
1636/** Floating-point is implemented and support single, double and half precision. */
1637# define ARMV8_ID_AA64PFR0_EL1_FP_IMPL_SP_DP_HP 1
1638/** Floating-point is not implemented. */
1639# define ARMV8_ID_AA64PFR0_EL1_FP_NOT_IMPL 0xf
1640/** Bit 20 - 23 - Advanced SIMD support. */
1641#define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1642#define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_SHIFT 20
1643/** Advanced SIMD is implemented and support single and double precision. */
1644# define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_IMPL_SP_DP 0
1645/** Advanced SIMD is implemented and support single, double and half precision. */
1646# define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_IMPL_SP_DP_HP 1
1647/** Advanced SIMD is not implemented. */
1648# define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_NOT_IMPL 0xf
1649/** Bit 24 - 27 - System register GIC CPU interface support. */
1650#define ARMV8_ID_AA64PFR0_EL1_GIC_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1651#define ARMV8_ID_AA64PFR0_EL1_GIC_SHIFT 24
1652/** GIC CPU interface system registers are not implemented. */
1653# define ARMV8_ID_AA64PFR0_EL1_GIC_NOT_IMPL 0
1654/** System register interface to versions 3.0 and 4.0 of the GIC CPU interface is supported. */
1655# define ARMV8_ID_AA64PFR0_EL1_GIC_V3_V4 1
1656/** System register interface to version 4.1 of the GIC CPU interface is supported. */
1657# define ARMV8_ID_AA64PFR0_EL1_GIC_V4_1 3
1658/** Bit 28 - 31 - RAS Extension version. */
1659#define ARMV8_ID_AA64PFR0_EL1_RAS_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1660#define ARMV8_ID_AA64PFR0_EL1_RAS_SHIFT 28
1661/** No RAS extension. */
1662# define ARMV8_ID_AA64PFR0_EL1_RAS_NOT_IMPL 0
1663/** RAS Extension implemented. */
1664# define ARMV8_ID_AA64PFR0_EL1_RAS_SUPPORTED 1
1665/** FEAT_RASv1p1 implemented. */
1666# define ARMV8_ID_AA64PFR0_EL1_RAS_V1P1 2
1667/** Bit 32 - 35 - Scalable Vector Extension (SVE) support. */
1668#define ARMV8_ID_AA64PFR0_EL1_SVE_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1669#define ARMV8_ID_AA64PFR0_EL1_SVE_SHIFT 32
1670/** SVE is not supported. */
1671# define ARMV8_ID_AA64PFR0_EL1_SVE_NOT_IMPL 0
1672/** SVE is supported. */
1673# define ARMV8_ID_AA64PFR0_EL1_SVE_SUPPORTED 1
1674/** Bit 36 - 39 - Secure EL2 support. */
1675#define ARMV8_ID_AA64PFR0_EL1_SEL2_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1676#define ARMV8_ID_AA64PFR0_EL1_SEL2_SHIFT 36
1677/** Secure EL2 is not supported. */
1678# define ARMV8_ID_AA64PFR0_EL1_SEL2_NOT_IMPL 0
1679/** Secure EL2 is implemented. */
1680# define ARMV8_ID_AA64PFR0_EL1_SEL2_SUPPORTED 1
1681/** Bit 40 - 43 - MPAM support. */
1682#define ARMV8_ID_AA64PFR0_EL1_MPAM_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
1683#define ARMV8_ID_AA64PFR0_EL1_MPAM_SHIFT 40
1684/** MPAM extension major version number is 0. */
1685# define ARMV8_ID_AA64PFR0_EL1_MPAM_MAJOR_V0 0
1686/** MPAM extension major version number is 1. */
1687# define ARMV8_ID_AA64PFR0_EL1_MPAM_MAJOR_V1 1
1688/** Bit 44 - 47 - Activity Monitor Extension support. */
1689#define ARMV8_ID_AA64PFR0_EL1_AMU_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1690#define ARMV8_ID_AA64PFR0_EL1_AMU_SHIFT 44
1691/** Activity Monitor extension is not implemented. */
1692# define ARMV8_ID_AA64PFR0_EL1_AMU_NOT_IMPL 0
1693/** Activity Monitor extension is implemented as of FEAT_AMUv1. */
1694# define ARMV8_ID_AA64PFR0_EL1_AMU_V1 1
1695/** Activity Monitor extension is implemented as of FEAT_AMUv1p1 including virtualization support. */
1696# define ARMV8_ID_AA64PFR0_EL1_AMU_V1P1 2
1697/** Bit 48 - 51 - Data Independent Timing support. */
1698#define ARMV8_ID_AA64PFR0_EL1_DIT_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
1699#define ARMV8_ID_AA64PFR0_EL1_DIT_SHIFT 48
1700/** AArch64 does not guarantee constant execution time of any instructions. */
1701# define ARMV8_ID_AA64PFR0_EL1_DIT_NOT_IMPL 0
1702/** AArch64 provides the PSTATE.DIT mechanism to guarantee constant execution time of certain instructions (FEAT_DIT). */
1703# define ARMV8_ID_AA64PFR0_EL1_DIT_SUPPORTED 1
1704/** Bit 52 - 55 - Realm Management Extension support. */
1705#define ARMV8_ID_AA64PFR0_EL1_RME_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
1706#define ARMV8_ID_AA64PFR0_EL1_RME_SHIFT 52
1707/** Realm Management Extension not implemented. */
1708# define ARMV8_ID_AA64PFR0_EL1_RME_NOT_IMPL 0
1709/** RMEv1 is implemented (FEAT_RME). */
1710# define ARMV8_ID_AA64PFR0_EL1_RME_SUPPORTED 1
1711/** Bit 56 - 59 - Speculative use out of context branch targets support. */
1712#define ARMV8_ID_AA64PFR0_EL1_CSV2_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
1713#define ARMV8_ID_AA64PFR0_EL1_CSV2_SHIFT 56
1714/** Implementation does not disclose whether FEAT_CSV2 is implemented. */
1715# define ARMV8_ID_AA64PFR0_EL1_CSV2_NOT_EXPOSED 0
1716/** FEAT_CSV2 is implemented. */
1717# define ARMV8_ID_AA64PFR0_EL1_CSV2_SUPPORTED 1
1718/** FEAT_CSV2_2 is implemented. */
1719# define ARMV8_ID_AA64PFR0_EL1_CSV2_2_SUPPORTED 2
1720/** FEAT_CSV2_3 is implemented. */
1721# define ARMV8_ID_AA64PFR0_EL1_CSV2_3_SUPPORTED 3
1722/** Bit 60 - 63 - Speculative use of faulting data support. */
1723#define ARMV8_ID_AA64PFR0_EL1_CSV3_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
1724#define ARMV8_ID_AA64PFR0_EL1_CSV3_SHIFT 60
1725/** Implementation does not disclose whether data loaded under speculation with a permission or domain fault can be used. */
1726# define ARMV8_ID_AA64PFR0_EL1_CSV3_NOT_EXPOSED 0
1727/** FEAT_CSV3 is supported . */
1728# define ARMV8_ID_AA64PFR0_EL1_CSV3_SUPPORTED 1
1729/** @} */
1730
1731
1732/** @name ID_AA64PFR1_EL1 - AArch64 Processor Feature Register 1.
1733 * @{ */
1734/** Bit 0 - 3 - Branch Target Identification support. */
1735#define ARMV8_ID_AA64PFR1_EL1_BT_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1736#define ARMV8_ID_AA64PFR1_EL1_BT_SHIFT 0
1737/** The Branch Target Identification mechanism is not implemented. */
1738# define ARMV8_ID_AA64PFR1_EL1_BT_NOT_IMPL 0
1739/** The Branch Target Identifcation mechanism is implemented. */
1740# define ARMV8_ID_AA64PFR1_EL1_BT_SUPPORTED 1
1741/** Bit 4 - 7 - Speculative Store Bypassing control support. */
1742#define ARMV8_ID_AA64PFR1_EL1_SSBS_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1743#define ARMV8_ID_AA64PFR1_EL1_SSBS_SHIFT 4
1744/** AArch64 provides no mechanism to control the use of Speculative Store Bypassing. */
1745# define ARMV8_ID_AA64PFR1_EL1_SSBS_NOT_IMPL 0
1746/** AArch64 provides the PSTATE.SSBS mechanism to mark regions that are Speculative Store Bypass Safe. */
1747# define ARMV8_ID_AA64PFR1_EL1_SSBS_SUPPORTED 1
1748/** AArch64 provides the PSTATE.SSBS mechanism to mark regions that are Speculative Store Bypass Safe and adds MSR and MRS instructions
1749 * to directly read and write the PSTATE.SSBS field. */
1750# define ARMV8_ID_AA64PFR1_EL1_SSBS_SUPPORTED_MSR_MRS 2
1751/** Bit 8 - 11 - Memory Tagging Extension support. */
1752#define ARMV8_ID_AA64PFR1_EL1_MTE_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1753#define ARMV8_ID_AA64PFR1_EL1_MTE_SHIFT 8
1754/** MTE is not implemented. */
1755# define ARMV8_ID_AA64PFR1_EL1_MTE_NOT_IMPL 0
1756/** Instruction only Memory Tagging Extensions implemented. */
1757# define ARMV8_ID_AA64PFR1_EL1_MTE_INSN_ONLY 1
1758/** Full Memory Tagging Extension implemented. */
1759# define ARMV8_ID_AA64PFR1_EL1_MTE_FULL 2
1760/** Full Memory Tagging Extension with asymmetric Tag Check Fault handling implemented. */
1761# define ARMV8_ID_AA64PFR1_EL1_MTE_FULL_ASYM_TAG_FAULT_CHK 3
1762/** Bit 12 - 15 - RAS Extension fractional field. */
1763#define ARMV8_ID_AA64PFR1_EL1_RASFRAC_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1764#define ARMV8_ID_AA64PFR1_EL1_RASFRAC_SHIFT 12
1765/** RAS Extension is implemented. */
1766# define ARMV8_ID_AA64PFR1_EL1_RASFRAC_IMPL 0
1767/** FEAT_RASv1p1 is implemented. */
1768# define ARMV8_ID_AA64PFR1_EL1_RASFRAC_RASV1P1 1
1769/** Bit 16 - 19 - MPAM minor version number. */
1770#define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1771#define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_SHIFT 16
1772/** The minor version of number of the MPAM extension is 0. */
1773# define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_0 0
1774/** The minor version of number of the MPAM extension is 1. */
1775# define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_1 1
1776/* Bit 20 - 23 - Reserved. */
1777/** Bit 24 - 27 - Scalable Matrix Extension support. */
1778#define ARMV8_ID_AA64PFR1_EL1_SME_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1779#define ARMV8_ID_AA64PFR1_EL1_SME_SHIFT 24
1780/** Scalable Matrix Extensions are not implemented. */
1781# define ARMV8_ID_AA64PFR1_EL1_SME_NOT_IMPL 0
1782/** Scalable Matrix Extensions are implemented (FEAT_SME). */
1783# define ARMV8_ID_AA64PFR1_EL1_SME_SUPPORTED 1
1784/** Scalable Matrix Extensions are implemented + SME2 ZT0 register(FEAT_SME2). */
1785# define ARMV8_ID_AA64PFR1_EL1_SME_SME2 2
1786/** Bit 28 - 31 - Random Number trap to EL3 support. */
1787#define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1788#define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_SHIFT 28
1789/** Trapping of RNDR and RNDRRS to EL3 is not supported. */
1790# define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_NOT_IMPL 0
1791/** Trapping of RNDR and RDNRRS to EL3 is supported. */
1792# define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_SUPPORTED 1
1793/** Bit 32 - 35 - CSV2 fractional field. */
1794#define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1795#define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_SHIFT 32
1796/** Either CSV2 not exposed or implementation does not expose whether FEAT_CSV2_1p1 is implemented. */
1797# define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_NOT_EXPOSED 0
1798/** FEAT_CSV2_1p1 is implemented. */
1799# define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_1P1 1
1800/** FEAT_CSV2_1p2 is implemented. */
1801# define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_1P2 2
1802/** Bit 36 - 39 - Non-maskable Interrupt support. */
1803#define ARMV8_ID_AA64PFR1_EL1_NMI_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1804#define ARMV8_ID_AA64PFR1_EL1_NMI_SHIFT 36
1805/** SCTLR_ELx.{SPINTMASK, NMI} and PSTATE.ALLINT and associated instructions are not supported. */
1806# define ARMV8_ID_AA64PFR1_EL1_NMI_NOT_IMPL 0
1807/** SCTLR_ELx.{SPINTMASK, NMI} and PSTATE.ALLINT and associated instructions are supported (FEAT_NMI). */
1808# define ARMV8_ID_AA64PFR1_EL1_NMI_SUPPORTED 1
1809/** @} */
1810
1811
1812/** @name ID_AA64MMFR0_EL1 - AArch64 Memory Model Feature Register 0.
1813 * @{ */
1814/** Bit 0 - 3 - Physical Address range supported. */
1815#define ARMV8_ID_AA64MMFR0_EL1_PARANGE_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1816#define ARMV8_ID_AA64MMFR0_EL1_PARANGE_SHIFT 0
1817/** Physical Address range is 32 bits, 4GiB. */
1818# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_32BITS 0
1819/** Physical Address range is 36 bits, 64GiB. */
1820# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_36BITS 1
1821/** Physical Address range is 40 bits, 1TiB. */
1822# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_40BITS 2
1823/** Physical Address range is 42 bits, 4TiB. */
1824# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_42BITS 3
1825/** Physical Address range is 44 bits, 16TiB. */
1826# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_44BITS 4
1827/** Physical Address range is 48 bits, 256TiB. */
1828# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_48BITS 5
1829/** Physical Address range is 52 bits, 4PiB. */
1830# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_52BITS 6
1831/** Bit 4 - 7 - Number of ASID bits. */
1832#define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1833#define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_SHIFT 4
1834/** ASID bits is 8. */
1835# define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_8 0
1836/** ASID bits is 16. */
1837# define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_16 2
1838/** Bit 8 - 11 - Indicates support for mixed-endian configuration. */
1839#define ARMV8_ID_AA64MMFR0_EL1_BIGEND_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1840#define ARMV8_ID_AA64MMFR0_EL1_BIGEND_SHIFT 8
1841/** No mixed-endian support. */
1842# define ARMV8_ID_AA64MMFR0_EL1_BIGEND_NOT_IMPL 0
1843/** Mixed-endian supported. */
1844# define ARMV8_ID_AA64MMFR0_EL1_BIGEND_SUPPORTED 1
1845/** Bit 12 - 15 - Indicates support for a distinction between Secure and Non-secure Memory. */
1846#define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1847#define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_SHIFT 12
1848/** No distinction between Secure and Non-secure Memory supported. */
1849# define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_NOT_IMPL 0
1850/** Distinction between Secure and Non-secure Memory supported. */
1851# define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_SUPPORTED 1
1852/** Bit 16 - 19 - Indicates support for mixed-endian at EL0 only. */
1853#define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1854#define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_SHIFT 16
1855/** No mixed-endian support at EL0. */
1856# define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_NOT_IMPL 0
1857/** Mixed-endian support at EL0. */
1858# define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_SUPPORTED 1
1859/** Bit 20 - 23 - Indicates support for 16KiB memory translation granule size. */
1860#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1861#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_SHIFT 20
1862/** 16KiB granule size not supported. */
1863# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_NOT_IMPL 0
1864/** 16KiB granule size is supported. */
1865# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_SUPPORTED 1
1866/** 16KiB granule size is supported and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
1867# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_SUPPORTED_52BIT 2
1868/** Bit 24 - 27 - Indicates support for 64KiB memory translation granule size. */
1869#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1870#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_SHIFT 24
1871/** 64KiB granule supported. */
1872# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_SUPPORTED 0
1873/** 64KiB granule not supported. */
1874# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_NOT_IMPL 0xf
1875/** Bit 28 - 31 - Indicates support for 4KiB memory translation granule size. */
1876#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1877#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_SHIFT 28
1878/** 4KiB granule supported. */
1879# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_SUPPORTED 0
1880/** 4KiB granule size is supported and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
1881# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_SUPPORTED_52BIT 1
1882/** 4KiB granule not supported. */
1883# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_NOT_IMPL 0xf
1884/** Bit 32 - 35 - Indicates support for 16KiB granule size at stage 2. */
1885#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1886#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SHIFT 32
1887/** Support for 16KiB granule at stage 2 is identified in the ID_AA64MMFR0_EL1.TGran16 field. */
1888# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SUPPORT_BY_TGRAN16 0
1889/** 16KiB granule not supported at stage 2. */
1890# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_NOT_IMPL 1
1891/** 16KiB granule supported at stage 2. */
1892# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SUPPORTED 2
1893/** 16KiB granule supported at stage 2 and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
1894# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SUPPORTED_52BIT 3
1895/** Bit 36 - 39 - Indicates support for 64KiB granule size at stage 2. */
1896#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1897#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_SHIFT 36
1898/** Support for 64KiB granule at stage 2 is identified in the ID_AA64MMFR0_EL1.TGran64 field. */
1899# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_SUPPORT_BY_TGRAN64 0
1900/** 64KiB granule not supported at stage 2. */
1901# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_NOT_IMPL 1
1902/** 64KiB granule supported at stage 2. */
1903# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_SUPPORTED 2
1904/** Bit 40 - 43 - Indicates HCRX_EL2 and its associated EL3 trap support. */
1905#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
1906#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SHIFT 40
1907/** Support for 4KiB granule at stage 2 is identified in the ID_AA64MMFR0_EL1.TGran4 field. */
1908# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SUPPORT_BY_TGRAN16 0
1909/** 4KiB granule not supported at stage 2. */
1910# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_NOT_IMPL 1
1911/** 4KiB granule supported at stage 2. */
1912# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SUPPORTED 2
1913/** 4KiB granule supported at stage 2 and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
1914# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SUPPORTED_52BIT 3
1915/** Bit 44 - 47 - Indicates support for disabling context synchronizing exception entry and exit. */
1916#define ARMV8_ID_AA64MMFR0_EL1_EXS_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1917#define ARMV8_ID_AA64MMFR0_EL1_EXS_SHIFT 44
1918/** All exception entries and exits are context synchronization events. */
1919# define ARMV8_ID_AA64MMFR0_EL1_EXS_NOT_IMPL 0
1920/** Non-context synchronizing exception entry and exit are supported (FEAT_ExS). */
1921# define ARMV8_ID_AA64MMFR0_EL1_EXS_SUPPORTED 1
1922/* Bit 48 - 55 - Reserved. */
1923/** Bit 56 - 59 - Indicates the presence of the Fine-Grained Trap controls. */
1924#define ARMV8_ID_AA64MMFR0_EL1_FGT_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
1925#define ARMV8_ID_AA64MMFR0_EL1_FGT_SHIFT 56
1926/** Fine-grained trap controls are not implemented. */
1927# define ARMV8_ID_AA64MMFR0_EL1_FGT_NOT_IMPL 0
1928/** Fine-grained trap controls are implemented (FEAT_FGT). */
1929# define ARMV8_ID_AA64MMFR0_EL1_FGT_SUPPORTED 1
1930/** Bit 60 - 63 - Indicates the presence of Enhanced Counter Virtualization. */
1931#define ARMV8_ID_AA64MMFR0_EL1_ECV_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
1932#define ARMV8_ID_AA64MMFR0_EL1_ECV_SHIFT 60
1933/** Enhanced Counter Virtualization is not implemented. */
1934# define ARMV8_ID_AA64MMFR0_EL1_ECV_NOT_IMPL 0
1935/** Enhanced Counter Virtualization is implemented (FEAT_ECV). */
1936# define ARMV8_ID_AA64MMFR0_EL1_ECV_SUPPORTED 1
1937/** Enhanced Counter Virtualization is implemented and includes support for CNTHCTL_EL2.ECV and CNTPOFF_EL2 (FEAT_ECV). */
1938# define ARMV8_ID_AA64MMFR0_EL1_ECV_SUPPORTED_2 2
1939/** @} */
1940
1941
1942/** @name ID_AA64MMFR1_EL1 - AArch64 Memory Model Feature Register 1.
1943 * @{ */
1944/** Bit 0 - 3 - Hardware updates to Access flag and Dirty state in translation tables. */
1945#define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1946#define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_SHIFT 0
1947/** Hardware update of the Access flag and dirty state are not supported. */
1948# define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_NOT_IMPL 0
1949/** Support for hardware update of the Access flag for Block and Page descriptors. */
1950# define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_SUPPORTED 1
1951/** Support for hardware update of the Access flag for Block and Page descriptors, hardware update of dirty state supported. */
1952# define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_DIRTY_SUPPORTED 2
1953/** Bit 4 - 7 - EL1 Exception level handling. */
1954#define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1955#define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_SHIFT 4
1956/** VMID bits is 8. */
1957# define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_8 0
1958/** VMID bits is 16 (FEAT_VMID16). */
1959# define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_16 2
1960/** Bit 8 - 11 - Virtualization Host Extensions support. */
1961#define ARMV8_ID_AA64MMFR1_EL1_VHE_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1962#define ARMV8_ID_AA64MMFR1_EL1_VHE_SHIFT 8
1963/** Virtualization Host Extensions are not supported. */
1964# define ARMV8_ID_AA64MMFR1_EL1_VHE_NOT_IMPL 0
1965/** Virtualization Host Extensions are supported. */
1966# define ARMV8_ID_AA64MMFR1_EL1_VHE_SUPPORTED 1
1967/** Bit 12 - 15 - Hierarchical Permission Disables. */
1968#define ARMV8_ID_AA64MMFR1_EL1_HPDS_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1969#define ARMV8_ID_AA64MMFR1_EL1_HPDS_SHIFT 12
1970/** Disabling of hierarchical controls not supported. */
1971# define ARMV8_ID_AA64MMFR1_EL1_HPDS_NOT_IMPL 0
1972/** Disabling of hierarchical controls supported (FEAT_HPDS). */
1973# define ARMV8_ID_AA64MMFR1_EL1_HPDS_SUPPORTED 1
1974/** FEAT_HPDS + possible hardware allocation of bits[62:59] of the translation table descriptors from the final lookup level (FEAT_HPDS2). */
1975# define ARMV8_ID_AA64MMFR1_EL1_HPDS_SUPPORTED_2 2
1976/** Bit 16 - 19 - LORegions support. */
1977#define ARMV8_ID_AA64MMFR1_EL1_LO_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1978#define ARMV8_ID_AA64MMFR1_EL1_LO_SHIFT 16
1979/** LORegions not supported. */
1980# define ARMV8_ID_AA64MMFR1_EL1_LO_NOT_IMPL 0
1981/** LORegions supported. */
1982# define ARMV8_ID_AA64MMFR1_EL1_LO_SUPPORTED 1
1983/** Bit 20 - 23 - Privileged Access Never support. */
1984#define ARMV8_ID_AA64MMFR1_EL1_PAN_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1985#define ARMV8_ID_AA64MMFR1_EL1_PAN_SHIFT 20
1986/** PAN not supported. */
1987# define ARMV8_ID_AA64MMFR1_EL1_PAN_NOT_IMPL 0
1988/** PAN supported (FEAT_PAN). */
1989# define ARMV8_ID_AA64MMFR1_EL1_PAN_SUPPORTED 1
1990/** PAN supported and AT S1E1RP and AT S1E1WP instructions supported (FEAT_PAN2). */
1991# define ARMV8_ID_AA64MMFR1_EL1_PAN_SUPPORTED_2 2
1992/** PAN supported and AT S1E1RP and AT S1E1WP instructions and SCTRL_EL1.EPAN and SCTRL_EL2.EPAN supported (FEAT_PAN3). */
1993# define ARMV8_ID_AA64MMFR1_EL1_PAN_SUPPORTED_3 3
1994/** Bit 24 - 27 - Describes whether the PE can generate SError interrupt exceptions. */
1995#define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1996#define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_SHIFT 24
1997/** The PE never generates an SError interrupt due to an External abort on a speculative read. */
1998# define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_NOT_IMPL 0
1999/** The PE might generate an SError interrupt due to an External abort on a speculative read. */
2000# define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_SUPPORTED 1
2001/** Bit 28 - 31 - Indicates support for execute-never control distinction by Exception level at stage 2. */
2002#define ARMV8_ID_AA64MMFR1_EL1_XNX_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2003#define ARMV8_ID_AA64MMFR1_EL1_XNX_SHIFT 28
2004/** Distinction between EL0 and EL1 execute-never control at stage 2 not supported. */
2005# define ARMV8_ID_AA64MMFR1_EL1_XNX_NOT_IMPL 0
2006/** Distinction between EL0 and EL1 execute-never control at stage 2 supported (FEAT_XNX). */
2007# define ARMV8_ID_AA64MMFR1_EL1_XNX_SUPPORTED 1
2008/** Bit 32 - 35 - Indicates support for the configurable delayed trapping of WFE. */
2009#define ARMV8_ID_AA64MMFR1_EL1_TWED_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2010#define ARMV8_ID_AA64MMFR1_EL1_TWED_SHIFT 32
2011/** Configurable delayed trapping of WFE is not supported. */
2012# define ARMV8_ID_AA64MMFR1_EL1_TWED_NOT_IMPL 0
2013/** Configurable delayed trapping of WFE is supported (FEAT_TWED). */
2014# define ARMV8_ID_AA64MMFR1_EL1_TWED_SUPPORTED 1
2015/** Bit 36 - 39 - Indicates support for Enhanced Translation Synchronization. */
2016#define ARMV8_ID_AA64MMFR1_EL1_ETS_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2017#define ARMV8_ID_AA64MMFR1_EL1_ETS_SHIFT 36
2018/** Enhanced Translation Synchronization is not supported. */
2019# define ARMV8_ID_AA64MMFR1_EL1_ETS_NOT_IMPL 0
2020/** Enhanced Translation Synchronization is implemented. */
2021# define ARMV8_ID_AA64MMFR1_EL1_ETS_SUPPORTED 1
2022/** Bit 40 - 43 - Indicates HCRX_EL2 and its associated EL3 trap support. */
2023#define ARMV8_ID_AA64MMFR1_EL1_HCX_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2024#define ARMV8_ID_AA64MMFR1_EL1_HCX_SHIFT 40
2025/** HCRX_EL2 and its associated EL3 trap are not supported. */
2026# define ARMV8_ID_AA64MMFR1_EL1_HCX_NOT_IMPL 0
2027/** HCRX_EL2 and its associated EL3 trap are supported (FEAT_HCX). */
2028# define ARMV8_ID_AA64MMFR1_EL1_HCX_SUPPORTED 1
2029/** Bit 44 - 47 - Indicates support for FPCR.{AH,FIZ,NEP}. */
2030#define ARMV8_ID_AA64MMFR1_EL1_AFP_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2031#define ARMV8_ID_AA64MMFR1_EL1_AFP_SHIFT 44
2032/** The FPCR.{AH,FIZ,NEP} fields are not supported. */
2033# define ARMV8_ID_AA64MMFR1_EL1_AFP_NOT_IMPL 0
2034/** The FPCR.{AH,FIZ,NEP} fields are supported (FEAT_AFP). */
2035# define ARMV8_ID_AA64MMFR1_EL1_AFP_SUPPORTED 1
2036/** Bit 48 - 51 - Indicates support for intermediate caching of translation table walks. */
2037#define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2038#define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_SHIFT 48
2039/** The intermediate caching of translation table walks might include non-coherent physical translation caches. */
2040# define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_INCLUDE_NON_COHERENT 0
2041/** The intermediate caching of translation table walks does not include non-coherent physical translation caches (FEAT_nTLBPA). */
2042# define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_INCLUDE_COHERENT_ONLY 1
2043/** Bit 52 - 55 - Indicates whether SCTLR_EL1.TIDCP and SCTLR_EL2.TIDCP are implemented in AArch64 state. */
2044#define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2045#define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_SHIFT 52
2046/** SCTLR_EL1.TIDCP and SCTLR_EL2.TIDCP bits are not implemented. */
2047# define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_NOT_IMPL 0
2048/** SCTLR_EL1.TIDCP and SCTLR_EL2.TIDCP bits are implemented (FEAT_TIDCP1). */
2049# define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_SUPPORTED 1
2050/** Bit 56 - 59 - Indicates support for cache maintenance instruction permission. */
2051#define ARMV8_ID_AA64MMFR1_EL1_CMOW_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2052#define ARMV8_ID_AA64MMFR1_EL1_CMOW_SHIFT 56
2053/** SCTLR_EL1.CMOW, SCTLR_EL2.CMOW and HCRX_EL2.CMOW bits are not implemented. */
2054# define ARMV8_ID_AA64MMFR1_EL1_CMOW_NOT_IMPL 0
2055/** SCTLR_EL1.CMOW, SCTLR_EL2.CMOW and HCRX_EL2.CMOW bits are implemented (FEAT_CMOW). */
2056# define ARMV8_ID_AA64MMFR1_EL1_CMOW_SUPPORTED 1
2057/* Bit 60 - 63 - Reserved. */
2058/** @} */
2059
2060
2061/** @name ID_AA64MMFR2_EL1 - AArch64 Memory Model Feature Register 2.
2062 * @{ */
2063/** Bit 0 - 3 - Indicates support for Common not Private translations. */
2064#define ARMV8_ID_AA64MMFR2_EL1_CNP_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2065#define ARMV8_ID_AA64MMFR2_EL1_CNP_SHIFT 0
2066/** Common not Private translations are not supported. */
2067# define ARMV8_ID_AA64MMFR2_EL1_CNP_NOT_IMPL 0
2068/** Support for Common not Private translations (FEAT_TTNCP). */
2069# define ARMV8_ID_AA64MMFR2_EL1_CNP_SUPPORTED 1
2070/** Bit 4 - 7 - Indicates support for User Access Override. */
2071#define ARMV8_ID_AA64MMFR2_EL1_UAO_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2072#define ARMV8_ID_AA64MMFR2_EL1_UAO_SHIFT 4
2073/** User Access Override is not supported. */
2074# define ARMV8_ID_AA64MMFR2_EL1_UAO_NOT_IMPL 0
2075/** User Access Override is supported (FEAT_UAO). */
2076# define ARMV8_ID_AA64MMFR2_EL1_UAO_SUPPORTED 1
2077/** Bit 8 - 11 - Indicates support for LSMAOE and nTLSMD bits in SCTLR_ELx. */
2078#define ARMV8_ID_AA64MMFR2_EL1_LSM_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2079#define ARMV8_ID_AA64MMFR2_EL1_LSM_SHIFT 8
2080/** LSMAOE and nTLSMD bits are not supported. */
2081# define ARMV8_ID_AA64MMFR2_EL1_LSM_NOT_IMPL 0
2082/** LSMAOE and nTLSMD bits are supported (FEAT_LSMAOC). */
2083# define ARMV8_ID_AA64MMFR2_EL1_LSM_SUPPORTED 1
2084/** Bit 12 - 15 - Indicates support for the IESB bit in SCTLR_ELx registers. */
2085#define ARMV8_ID_AA64MMFR2_EL1_IESB_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2086#define ARMV8_ID_AA64MMFR2_EL1_IESB_SHIFT 12
2087/** IESB bit is not supported. */
2088# define ARMV8_ID_AA64MMFR2_EL1_IESB_NOT_IMPL 0
2089/** IESB bit is supported (FEAT_IESB). */
2090# define ARMV8_ID_AA64MMFR2_EL1_IESB_SUPPORTED 1
2091/** Bit 16 - 19 - Indicates support for larger virtual address. */
2092#define ARMV8_ID_AA64MMFR2_EL1_VARANGE_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
2093#define ARMV8_ID_AA64MMFR2_EL1_VARANGE_SHIFT 16
2094/** Virtual address range is 48 bits. */
2095# define ARMV8_ID_AA64MMFR2_EL1_VARANGE_48BITS 0
2096/** 52 bit virtual addresses supported for 64KiB granules (FEAT_LVA). */
2097# define ARMV8_ID_AA64MMFR2_EL1_VARANGE_52BITS_64KB_GRAN 1
2098/** Bit 20 - 23 - Revised CCSIDR_EL1 register format supported. */
2099#define ARMV8_ID_AA64MMFR2_EL1_CCIDX_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2100#define ARMV8_ID_AA64MMFR2_EL1_CCIDX_SHIFT 20
2101/** CCSIDR_EL1 register format is 32-bit. */
2102# define ARMV8_ID_AA64MMFR2_EL1_CCIDX_32BIT 0
2103/** CCSIDR_EL1 register format is 64-bit (FEAT_CCIDX). */
2104# define ARMV8_ID_AA64MMFR2_EL1_CCIDX_64BIT 1
2105/** Bit 24 - 27 - Indicates support for nested virtualization. */
2106#define ARMV8_ID_AA64MMFR2_EL1_NV_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
2107#define ARMV8_ID_AA64MMFR2_EL1_NV_SHIFT 24
2108/** Nested virtualization is not supported. */
2109# define ARMV8_ID_AA64MMFR2_EL1_NV_NOT_IMPL 0
2110/** The HCR_EL2.{AT,NV1,NV} bits are implemented (FEAT_NV). */
2111# define ARMV8_ID_AA64MMFR2_EL1_NV_SUPPORTED 1
2112/** The VNCR_EL2 register and HCR_EL2.{NV2,AT,NV1,NV} bits are implemented (FEAT_NV2). */
2113# define ARMV8_ID_AA64MMFR2_EL1_NV_SUPPORTED_2 2
2114/** Bit 28 - 31 - Indicates support for small translation tables. */
2115#define ARMV8_ID_AA64MMFR2_EL1_ST_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2116#define ARMV8_ID_AA64MMFR2_EL1_ST_SHIFT 28
2117/** The maximum value of TCR_ELx.{T0SZ,T1SZ} is 39. */
2118# define ARMV8_ID_AA64MMFR2_EL1_ST_NOT_IMPL 0
2119/** The maximum value of TCR_ELx.{T0SZ,T1SZ} is 48 for 4KiB and 16KiB, and 47 for 64KiB granules (FEAT_TTST). */
2120# define ARMV8_ID_AA64MMFR2_EL1_ST_SUPPORTED 1
2121/** Bit 32 - 35 - Indicates support for unaligned single-copy atomicity and atomic functions. */
2122#define ARMV8_ID_AA64MMFR2_EL1_AT_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2123#define ARMV8_ID_AA64MMFR2_EL1_AT_SHIFT 32
2124/** Unaligned single-copy atomicity and atomic functions are not supported. */
2125# define ARMV8_ID_AA64MMFR2_EL1_AT_NOT_IMPL 0
2126/** Unaligned single-copy atomicity and atomic functions are supported (FEAT_LSE2). */
2127# define ARMV8_ID_AA64MMFR2_EL1_AT_SUPPORTED 1
2128/** Bit 36 - 39 - Indicates value of ESR_ELx.EC that reports an exception generated by a read access to the feature ID space. */
2129#define ARMV8_ID_AA64MMFR2_EL1_IDS_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2130#define ARMV8_ID_AA64MMFR2_EL1_IDS_SHIFT 36
2131/** ESR_ELx.EC is 0 for traps generated by a read access to the feature ID space. */
2132# define ARMV8_ID_AA64MMFR2_EL1_IDS_EC_0 0
2133/** ESR_ELx.EC is 0x18 for traps generated by a read access to the feature ID space (FEAT_IDST). */
2134# define ARMV8_ID_AA64MMFR2_EL1_IDS_EC_18H 1
2135/** Bit 40 - 43 - Indicates support for the HCR_EL2.FWB bit. */
2136#define ARMV8_ID_AA64MMFR2_EL1_FWB_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2137#define ARMV8_ID_AA64MMFR2_EL1_FWB_SHIFT 40
2138/** HCR_EL2.FWB bit is not supported. */
2139# define ARMV8_ID_AA64MMFR2_EL1_FWB_NOT_IMPL 0
2140/** HCR_EL2.FWB bit is supported (FEAT_S2FWB). */
2141# define ARMV8_ID_AA64MMFR2_EL1_FWB_SUPPORTED 1
2142/* Bit 44 - 47 - Reserved. */
2143/** Bit 48 - 51 - Indicates support for TTL field in address operations. */
2144#define ARMV8_ID_AA64MMFR2_EL1_TTL_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2145#define ARMV8_ID_AA64MMFR2_EL1_TTL_SHIFT 48
2146/** TLB maintenance instructions by address have bits [47:44] Res0. */
2147# define ARMV8_ID_AA64MMFR2_EL1_TTL_NOT_IMPL 0
2148/** TLB maintenance instructions by address have bits [47:44] holding the TTL field (FEAT_TTL). */
2149# define ARMV8_ID_AA64MMFR2_EL1_TTL_SUPPORTED 1
2150/** Bit 52 - 55 - Identification of the hardware requirements of the hardware to have break-before-make sequences when
2151 * changing block size for a translation. */
2152#define ARMV8_ID_AA64MMFR2_EL1_BBM_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2153#define ARMV8_ID_AA64MMFR2_EL1_BBM_SHIFT 52
2154/** Level 0 support for changing block size is supported (FEAT_BBM). */
2155# define ARMV8_ID_AA64MMFR2_EL1_BBM_LVL0 0
2156/** Level 1 support for changing block size is supported (FEAT_BBM). */
2157# define ARMV8_ID_AA64MMFR2_EL1_BBM_LVL1 1
2158/** Level 2 support for changing block size is supported (FEAT_BBM). */
2159# define ARMV8_ID_AA64MMFR2_EL1_BBM_LVL2 2
2160/** Bit 56 - 59 - Indicates support for Enhanced Virtualization Traps. */
2161#define ARMV8_ID_AA64MMFR2_EL1_EVT_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2162#define ARMV8_ID_AA64MMFR2_EL1_EVT_SHIFT 56
2163/** Enhanced Virtualization Traps are not supported. */
2164# define ARMV8_ID_AA64MMFR2_EL1_EVT_NOT_IMPL 0
2165/** Enhanced Virtualization Traps are supported (FEAT_EVT). */
2166# define ARMV8_ID_AA64MMFR2_EL1_EVT_SUPPORTED 1
2167/** Enhanced Virtualization Traps are supported with additional traps (FEAT_EVT). */
2168# define ARMV8_ID_AA64MMFR2_EL1_EVT_SUPPORTED_2 2
2169/** Bit 60 - 63 - Indicates support for E0PDx mechanism. */
2170#define ARMV8_ID_AA64MMFR2_EL1_E0PD_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2171#define ARMV8_ID_AA64MMFR2_EL1_E0PD_SHIFT 60
2172/** E0PDx mechanism is not supported. */
2173# define ARMV8_ID_AA64MMFR2_EL1_E0PD_NOT_IMPL 0
2174/** E0PDx mechanism is supported (FEAT_E0PD). */
2175# define ARMV8_ID_AA64MMFR2_EL1_E0PD_SUPPORTED 1
2176/** @} */
2177
2178
2179/** @name ID_AA64DFR0_EL1 - AArch64 Debug Feature Register 0.
2180 * @{ */
2181/** Bit 0 - 3 - Indicates the Debug Architecture version supported. */
2182#define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2183#define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_SHIFT 0
2184/** Armv8 debug architecture version. */
2185# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8 6
2186/** Armv8 debug architecture version with virtualization host extensions. */
2187# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8_VHE 7
2188/** Armv8.2 debug architecture version (FEAT_Debugv8p2). */
2189# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8p2 8
2190/** Armv8.4 debug architecture version (FEAT_Debugv8p4). */
2191# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8p4 9
2192/** Armv8.8 debug architecture version (FEAT_Debugv8p8). */
2193# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8p8 10
2194/** Bit 4 - 7 - Indicates trace support. */
2195#define ARMV8_ID_AA64DFR0_EL1_TRACEVER_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2196#define ARMV8_ID_AA64DFR0_EL1_TRACEVER_SHIFT 4
2197/** Trace unit System registers not implemented. */
2198# define ARMV8_ID_AA64DFR0_EL1_TRACEVER_NOT_IMPL 0
2199/** Trace unit System registers supported. */
2200# define ARMV8_ID_AA64DFR0_EL1_TRACEVER_SUPPORTED 1
2201/** Bit 8 - 11 - Performance Monitors Extension version. */
2202#define ARMV8_ID_AA64DFR0_EL1_PMUVER_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2203#define ARMV8_ID_AA64DFR0_EL1_PMUVER_SHIFT 8
2204/** Performance Monitors Extension not supported. */
2205# define ARMV8_ID_AA64DFR0_EL1_PMUVER_NOT_IMPL 0
2206/** Performance Monitors Extension v3 supported (FEAT_PMUv3). */
2207# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3 1
2208/** Performance Monitors Extension v3 supported (FEAT_PMUv3p1). */
2209# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P1 4
2210/** Performance Monitors Extension v3 supported (FEAT_PMUv3p4). */
2211# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P4 5
2212/** Performance Monitors Extension v3 supported (FEAT_PMUv3p5). */
2213# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P5 6
2214/** Performance Monitors Extension v3 supported (FEAT_PMUv3p7). */
2215# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P7 7
2216/** Performance Monitors Extension v3 supported (FEAT_PMUv3p8). */
2217# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P8 8
2218/** Bit 12 - 15 - Number of breakpoints, minus 1. */
2219#define ARMV8_ID_AA64DFR0_EL1_BRPS_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2220#define ARMV8_ID_AA64DFR0_EL1_BRPS_SHIFT 12
2221/* Bit 16 - 19 - Reserved 0. */
2222/** Bit 20 - 23 - Number of watchpoints, minus 1. */
2223#define ARMV8_ID_AA64DFR0_EL1_WRPS_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2224#define ARMV8_ID_AA64DFR0_EL1_WRPS_SHIFT 20
2225/* Bit 24 - 27 - Reserved 0. */
2226/** Bit 28 - 31 - Number of context-aware breakpoints. */
2227#define ARMV8_ID_AA64DFR0_EL1_CTXCMPS_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2228#define ARMV8_ID_AA64DFR0_EL1_CTXCMPS_SHIFT 28
2229/** Bit 32 - 35 - Statistical Profiling Extension version. */
2230#define ARMV8_ID_AA64DFR0_EL1_PMSVER_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2231#define ARMV8_ID_AA64DFR0_EL1_PMSVER_SHIFT 32
2232/** Statistical Profiling Extension not implemented. */
2233# define ARMV8_ID_AA64DFR0_EL1_PMSVER_NOT_IMPL 0
2234/** Statistical Profiling Extension supported (FEAT_SPE). */
2235# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED 1
2236/** Statistical Profiling Extension supported, version 1.1 (FEAT_SPEv1p1). */
2237# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED_V1P1 2
2238/** Statistical Profiling Extension supported, version 1.2 (FEAT_SPEv1p2). */
2239# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED_V1P2 3
2240/** Statistical Profiling Extension supported, version 1.2 (FEAT_SPEv1p3). */
2241# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED_V1P3 4
2242/** Bit 36 - 39 - OS Double Lock implemented. */
2243#define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2244#define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_SHIFT 36
2245/** OS Double Lock is not implemented. */
2246# define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_NOT_IMPL 0xf
2247/** OS Double Lock is supported (FEAT_DoubleLock). */
2248# define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_SUPPORTED 0
2249/** Bit 40 - 43 - Indicates the Armv8.4 self-hosted Trace Extension. */
2250#define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2251#define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_SHIFT 40
2252/** Armv8.4 self-hosted Trace Extension not implemented. */
2253# define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_NOT_IMPL 0
2254/** Armv8.4 self-hosted Trace Extension is supported (FEAT_TRF). */
2255# define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_SUPPORTED 1
2256/** Bit 44 - 47 - Indicates support for the Trace Buffer Extension. */
2257#define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2258#define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_SHIFT 44
2259/** Trace Buffer Extension is not implemented. */
2260# define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_NOT_IMPL 0
2261/** Trace Buffer Extension is supported (FEAT_TRBE). */
2262# define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_SUPPORTED 1
2263/** Bit 48 - 51 - Indicates support for the multi-threaded PMU extension. */
2264#define ARMV8_ID_AA64DFR0_EL1_MTPMU_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2265#define ARMV8_ID_AA64DFR0_EL1_MTPMU_SHIFT 48
2266/** Multi-threaded PMU extension is not implemented. */
2267# define ARMV8_ID_AA64DFR0_EL1_MTPMU_NOT_IMPL 0
2268/** Multi-threaded PMU extension is supported (FEAT_MTPMU). */
2269# define ARMV8_ID_AA64DFR0_EL1_MTPMU_SUPPORTED 1
2270/** Multi-threaded PMU extension is not implemented. */
2271# define ARMV8_ID_AA64DFR0_EL1_MTPMU_NOT_IMPL_2 0xf
2272/** Bit 52 - 55 - Indicates support for the Branch Record Buffer extension. */
2273#define ARMV8_ID_AA64DFR0_EL1_BRBE_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2274#define ARMV8_ID_AA64DFR0_EL1_BRBE_SHIFT 52
2275/** Branch Record Buffer extension is not implemented. */
2276# define ARMV8_ID_AA64DFR0_EL1_BRBE_NOT_IMPL 0
2277/** Branch Record Buffer extension is supported (FEAT_BRBE). */
2278# define ARMV8_ID_AA64DFR0_EL1_BRBE_SUPPORTED 1
2279/** Branch Record Buffer extension is supported and supports branch recording at EL3 (FEAT_BRBEv1p1). */
2280# define ARMV8_ID_AA64DFR0_EL1_BRBE_SUPPORTED_V1P1 2
2281/* Bit 56 - 59 - Reserved. */
2282/** Bit 60 - 63 - Indicates support for Zero PMU event counters for guest operating systems. */
2283#define ARMV8_ID_AA64DFR0_EL1_HPMN0_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2284#define ARMV8_ID_AA64DFR0_EL1_HPMN0_SHIFT 60
2285/** Setting MDCE_EL2.HPMN to zero has CONSTRAINED UNPREDICTABLE behavior. */
2286# define ARMV8_ID_AA64DFR0_EL1_HPMN0_NOT_IMPL 0
2287/** Setting MDCE_EL2.HPMN to zero has defined behavior (FEAT_HPMN0). */
2288# define ARMV8_ID_AA64DFR0_EL1_HPMN0_SUPPORTED 1
2289/** @} */
2290
2291
2292#if (!defined(VBOX_FOR_DTRACE_LIB) && defined(__cplusplus) && !defined(ARMV8_WITHOUT_MK_INSTR)) || defined(DOXYGEN_RUNNING)
2293/** @defgroup grp_rt_armv8_mkinstr Instruction Encoding Helpers
2294 * @ingroup grp_rt_armv8
2295 *
2296 * A few inlined functions and macros for assiting in encoding common ARMv8
2297 * instructions.
2298 *
2299 * @{ */
2300
2301/** A64: Return instruction. */
2302#define ARMV8_A64_INSTR_RET UINT32_C(0xd65f03c0)
2303/** A64: Return instruction with LR pointer authentication using SP and key A. */
2304#define ARMV8_A64_INSTR_RETAA UINT32_C(0xd65f0bff)
2305/** A64: Return instruction with LR pointer authentication using SP and key B. */
2306#define ARMV8_A64_INSTR_RETAB UINT32_C(0xd65f0fff)
2307/** A64: Insert pointer authentication code into X17 using X16 and key B. */
2308#define ARMV8_A64_INSTR_PACIB1716 UINT32_C(0xd503215f)
2309/** A64: Insert pointer authentication code into LR using SP and key B. */
2310#define ARMV8_A64_INSTR_PACIBSP UINT32_C(0xd503237f)
2311/** A64: Insert pointer authentication code into LR using XZR and key B. */
2312#define ARMV8_A64_INSTR_PACIBZ UINT32_C(0xd503235f)
2313/** A64: Invert the carry flag (PSTATE.C). */
2314#define ARMV8_A64_INSTR_CFINV UINT32_C(0xd500401f)
2315
2316
2317typedef enum
2318{
2319 /** Add @a iImm7*sizeof(reg) to @a iBaseReg after the store/load,
2320 * and update the register. */
2321 kArm64InstrStLdPairType_PostIndex = 1,
2322 /** Add @a iImm7*sizeof(reg) to @a iBaseReg before the store/load,
2323 * but don't update the register. */
2324 kArm64InstrStLdPairType_Signed = 2,
2325 /** Add @a iImm7*sizeof(reg) to @a iBaseReg before the store/load,
2326 * and update the register. */
2327 kArm64InstrStLdPairType_PreIndex = 3
2328} ARM64INSTRSTLDPAIRTYPE;
2329
2330/**
2331 * A64: Encodes either stp (store register pair) or ldp (load register pair).
2332 *
2333 * @returns The encoded instruction.
2334 * @param fLoad true for ldp, false of stp.
2335 * @param u2Opc When @a fSimdFp is @c false:
2336 * - 0 for 32-bit GPRs (Wt).
2337 * - 1 for encoding stgp or ldpsw.
2338 * - 2 for 64-bit GRPs (Xt).
2339 * - 3 illegal.
2340 * When @a fSimdFp is @c true:
2341 * - 0 for 32-bit SIMD&FP registers (St).
2342 * - 1 for 64-bit SIMD&FP registers (Dt).
2343 * - 2 for 128-bit SIMD&FP regsiters (Qt).
2344 * @param enmType The instruction variant wrt addressing and updating of the
2345 * addressing register.
2346 * @param iReg1 The first register to store/load.
2347 * @param iReg2 The second register to store/load.
2348 * @param iBaseReg The base register to use when addressing. SP is allowed.
2349 * @param iImm7 Signed addressing immediate value scaled, range -64..63,
2350 * will be multiplied by the register size.
2351 * @param fSimdFp true for SIMD&FP registers, false for GPRs and
2352 * stgp/ldpsw instructions.
2353 */
2354DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdPair(bool fLoad, uint32_t u2Opc, ARM64INSTRSTLDPAIRTYPE enmType,
2355 uint32_t iReg1, uint32_t iReg2, uint32_t iBaseReg, int32_t iImm7 = 0,
2356 bool fSimdFp = false)
2357{
2358 Assert(u2Opc < 3); Assert(iReg1 <= 31); Assert(iReg2 <= 31); Assert(iBaseReg <= 31); Assert(iImm7 < 64 && iImm7 >= -64);
2359 return (u2Opc << 30)
2360 | UINT32_C(0x28000000) /* 0b101000000000000000000000000000 */
2361 | ((uint32_t)fSimdFp << 26) /* VR bit, see "Top-level encodings for A64" */
2362 | ((uint32_t)enmType << 23)
2363 | ((uint32_t)fLoad << 22)
2364 | (((uint32_t)iImm7 & UINT32_C(0x7f)) << 15)
2365 | (iReg2 << 10)
2366 | (iBaseReg << 5)
2367 | iReg1;
2368}
2369
2370
2371/** A64: ldp x1, x2, [x3] */
2372DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLdPairGpr(uint32_t iReg1, uint32_t iReg2, uint32_t iBaseReg, int32_t iImm7 = 0,
2373 ARM64INSTRSTLDPAIRTYPE enmType = kArm64InstrStLdPairType_Signed,
2374 bool f64Bit = true)
2375{
2376 return Armv8A64MkInstrStLdPair(true /*fLoad*/, f64Bit ? 2 : 0, enmType, iReg1, iReg2, iBaseReg, iImm7);
2377}
2378
2379
2380/** A64: stp x1, x2, [x3] */
2381DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStPairGpr(uint32_t iReg1, uint32_t iReg2, uint32_t iBaseReg, int32_t iImm7 = 0,
2382 ARM64INSTRSTLDPAIRTYPE enmType = kArm64InstrStLdPairType_Signed,
2383 bool f64Bit = true)
2384{
2385 return Armv8A64MkInstrStLdPair(false /*fLoad*/, f64Bit ? 2 : 0, enmType, iReg1, iReg2, iBaseReg, iImm7);
2386}
2387
2388
2389typedef enum /* Size VR Opc */
2390{ /* \ | / */
2391 kArmv8A64InstrLdStType_Mask_Size = 0x300,
2392 kArmv8A64InstrLdStType_Mask_VR = 0x010,
2393 kArmv8A64InstrLdStType_Mask_Opc = 0x003,
2394 kArmv8A64InstrLdStType_Shift_Size = 8,
2395 kArmv8A64InstrLdStType_Shift_VR = 4,
2396 kArmv8A64InstrLdStType_Shift_Opc = 0,
2397
2398 kArmv8A64InstrLdStType_St_Byte = 0x000,
2399 kArmv8A64InstrLdStType_Ld_Byte = 0x001,
2400 kArmv8A64InstrLdStType_Ld_SignByte64 = 0x002,
2401 kArmv8A64InstrLdStType_Ld_SignByte32 = 0x003,
2402
2403 kArmv8A64InstrLdStType_St_Half = 0x100, /**< Half = 16-bit */
2404 kArmv8A64InstrLdStType_Ld_Half = 0x101, /**< Half = 16-bit */
2405 kArmv8A64InstrLdStType_Ld_SignHalf64 = 0x102, /**< Half = 16-bit */
2406 kArmv8A64InstrLdStType_Ld_SignHalf32 = 0x103, /**< Half = 16-bit */
2407
2408 kArmv8A64InstrLdStType_St_Word = 0x200, /**< Word = 32-bit */
2409 kArmv8A64InstrLdStType_Ld_Word = 0x201, /**< Word = 32-bit */
2410 kArmv8A64InstrLdStType_Ld_SignWord64 = 0x202, /**< Word = 32-bit */
2411
2412 kArmv8A64InstrLdStType_St_Dword = 0x300, /**< Dword = 64-bit */
2413 kArmv8A64InstrLdStType_Ld_Dword = 0x301, /**< Dword = 64-bit */
2414
2415 kArmv8A64InstrLdStType_Prefetch = 0x302, /**< Not valid in all variations, check docs. */
2416
2417 kArmv8A64InstrLdStType_St_Vr_Byte = 0x010,
2418 kArmv8A64InstrLdStType_Ld_Vr_Byte = 0x011,
2419 kArmv8A64InstrLdStType_St_Vr_128 = 0x012,
2420 kArmv8A64InstrLdStType_Ld_Vr_128 = 0x013,
2421
2422 kArmv8A64InstrLdStType_St_Vr_Half = 0x110, /**< Half = 16-bit */
2423 kArmv8A64InstrLdStType_Ld_Vr_Half = 0x111, /**< Half = 16-bit */
2424
2425 kArmv8A64InstrLdStType_St_Vr_Word = 0x210, /**< Word = 32-bit */
2426 kArmv8A64InstrLdStType_Ld_Vr_Word = 0x211, /**< Word = 32-bit */
2427
2428 kArmv8A64InstrLdStType_St_Vr_Dword = 0x310, /**< Dword = 64-bit */
2429 kArmv8A64InstrLdStType_Ld_Vr_Dword = 0x311 /**< Dword = 64-bit */
2430
2431} ARMV8A64INSTRLDSTTYPE;
2432/** Checks if a ARMV8A64INSTRLDSTTYPE value is a store operation or not. */
2433#define ARMV8A64INSTRLDSTTYPE_IS_STORE(a_enmLdStType) (((unsigned)a_enmLdStType & (unsigned)kArmv8A64InstrLdStType_Mask_Opc) == 0)
2434
2435
2436/**
2437 * A64: Encodes load/store with unscaled 9-bit signed immediate.
2438 *
2439 * @returns The encoded instruction.
2440 * @param u32Opcode The base opcode value.
2441 * @param enmType The load/store instruction type. Prefech valid (PRFUM)
2442 * @param iReg The register to load into / store.
2443 * @param iBaseReg The base register to use when addressing. SP is allowed.
2444 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
2445 */
2446DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdImm9Ex(uint32_t u32Opcode, ARMV8A64INSTRLDSTTYPE enmType,
2447 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
2448{
2449 Assert(i9ImmDisp >= -256 && i9ImmDisp < 256); Assert(iReg < 32); Assert(iBaseReg < 32);
2450 return u32Opcode
2451 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Size) << (30 - kArmv8A64InstrLdStType_Shift_Size))
2452 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR) << (26 - kArmv8A64InstrLdStType_Shift_VR))
2453 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Opc) << (22 - kArmv8A64InstrLdStType_Shift_Opc))
2454 | (((uint32_t)i9ImmDisp & UINT32_C(0x1ff)) << 12)
2455 | (iBaseReg << 5)
2456 | iReg;
2457}
2458
2459
2460/**
2461 * A64: Encodes load/store with unscaled 9-bit signed immediate.
2462 *
2463 * @returns The encoded instruction.
2464 * @param enmType The load/store instruction type. Prefech valid (PRFUM)
2465 * @param iReg The register to load into / store.
2466 * @param iBaseReg The base register to use when addressing. SP is allowed.
2467 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
2468 */
2469DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSturLdur(ARMV8A64INSTRLDSTTYPE enmType,
2470 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
2471{
2472 /* 3 2 1 0 */
2473 /* 10987654321098765432109876543210 */
2474 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000000) /* 0b00111000000000000000000000000000 */,
2475 enmType, iReg, iBaseReg, i9ImmDisp);
2476}
2477
2478/**
2479 * A64: Encodes load/store with unscaled 9-bit signed immediate, post-indexed.
2480 *
2481 * @returns The encoded instruction.
2482 * @param enmType The load/store instruction type. Prefech not valid.
2483 * @param iReg The register to load into / store.
2484 * @param iBaseReg The base register to use when addressing. SP is allowed.
2485 * Written back.
2486 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
2487 */
2488DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStrLdrPostIndex9(ARMV8A64INSTRLDSTTYPE enmType,
2489 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
2490{
2491 Assert(enmType != kArmv8A64InstrLdStType_Prefetch); /* 3 2 1 0 */
2492 /* 10987654321098765432109876543210 */
2493 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000400) /* 0b00111000000000000000010000000000 */,
2494 enmType, iReg, iBaseReg, i9ImmDisp);
2495}
2496
2497/**
2498 * A64: Encodes load/store with unscaled 9-bit signed immediate, pre-indexed
2499 *
2500 * @returns The encoded instruction.
2501 * @param enmType The load/store instruction type. Prefech valid (PRFUM)
2502 * @param iReg The register to load into / store.
2503 * @param iBaseReg The base register to use when addressing. SP is allowed.
2504 * Written back.
2505 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
2506 */
2507DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStrLdrPreIndex9(ARMV8A64INSTRLDSTTYPE enmType,
2508 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
2509{
2510 Assert(enmType != kArmv8A64InstrLdStType_Prefetch); /* 3 2 1 0 */
2511 /* 10987654321098765432109876543210 */
2512 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000c00) /* 0b00111000000000000000110000000000 */,
2513 enmType, iReg, iBaseReg, i9ImmDisp);
2514}
2515
2516/**
2517 * A64: Encodes unprivileged load/store with unscaled 9-bit signed immediate.
2518 *
2519 * @returns The encoded instruction.
2520 * @param enmType The load/store instruction type. Prefech not valid,
2521 * nor any SIMD&FP variants.
2522 * @param iReg The register to load into / store.
2523 * @param iBaseReg The base register to use when addressing. SP is allowed.
2524 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
2525 */
2526DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSttrLdtr(ARMV8A64INSTRLDSTTYPE enmType,
2527 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
2528{
2529 Assert(enmType != kArmv8A64InstrLdStType_Prefetch);
2530 Assert(!((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR));
2531 /* 3 2 1 0 */
2532 /* 10987654321098765432109876543210 */
2533 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000800) /* 0b00111000000000000000100000000000 */,
2534 enmType, iReg, iBaseReg, i9ImmDisp);
2535}
2536
2537
2538/**
2539 * A64: Encodes load/store w/ scaled 12-bit unsigned address displacement.
2540 *
2541 * @returns The encoded instruction.
2542 * @param enmType The load/store instruction type. Prefech not valid,
2543 * nor any SIMD&FP variants.
2544 * @param iReg The register to load into / store.
2545 * @param iBaseReg The base register to use when addressing. SP is allowed.
2546 * @param u12ImmDisp Addressing displacement, scaled by size.
2547 */
2548DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdRUOff(ARMV8A64INSTRLDSTTYPE enmType,
2549 uint32_t iReg, uint32_t iBaseReg, uint32_t u12ImmDisp)
2550{
2551 Assert(u12ImmDisp < 4096U);
2552 Assert(iReg < 32); /* 3 2 1 0 */
2553 Assert(iBaseReg < 32); /* 10987654321098765432109876543210 */
2554 return UINT32_C(0x39000000) /* 0b00111001000000000000000000000000 */
2555 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Size) << (30 - kArmv8A64InstrLdStType_Shift_Size))
2556 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR) << (26 - kArmv8A64InstrLdStType_Shift_VR))
2557 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Opc) << (22 - kArmv8A64InstrLdStType_Shift_Opc))
2558 | (u12ImmDisp << 10)
2559 | (iBaseReg << 5)
2560 | iReg;
2561}
2562
2563typedef enum
2564{
2565 kArmv8A64InstrLdStExtend_Uxtw = 2, /**< Zero-extend (32-bit) word. */
2566 kArmv8A64InstrLdStExtend_Lsl = 3, /**< Shift left (64-bit). */
2567 kArmv8A64InstrLdStExtend_Sxtw = 6, /**< Sign-extend (32-bit) word. */
2568 kArmv8A64InstrLdStExtend_Sxtx = 7 /**< Sign-extend (64-bit) dword (to 128-bit SIMD&FP reg, presumably). */
2569} ARMV8A64INSTRLDSTEXTEND;
2570
2571/**
2572 * A64: Encodes load/store w/ index register.
2573 *
2574 * @returns The encoded instruction.
2575 * @param enmType The load/store instruction type.
2576 * @param iReg The register to load into / store.
2577 * @param iBaseReg The base register to use when addressing. SP is allowed.
2578 * @param iRegIndex The index register.
2579 * @param enmExtend The extending to apply to @a iRegIndex.
2580 * @param fShifted Whether to shift the index. The shift amount corresponds
2581 * to the access size (thus irrelevant for byte accesses).
2582 */
2583DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdRegIdx(ARMV8A64INSTRLDSTTYPE enmType,
2584 uint32_t iReg, uint32_t iBaseReg, uint32_t iRegIndex,
2585 ARMV8A64INSTRLDSTEXTEND enmExtend = kArmv8A64InstrLdStExtend_Lsl,
2586 bool fShifted = false)
2587{
2588 Assert(iRegIndex < 32);
2589 Assert(iReg < 32); /* 3 2 1 0 */
2590 Assert(iBaseReg < 32); /* 10987654321098765432109876543210 */
2591 return UINT32_C(0x38200800) /* 0b00111000001000000000100000000000 */
2592 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Size) << (30 - kArmv8A64InstrLdStType_Shift_Size))
2593 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR) << (26 - kArmv8A64InstrLdStType_Shift_VR))
2594 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Opc) << (22 - kArmv8A64InstrLdStType_Shift_Opc))
2595 | (iRegIndex << 16)
2596 | ((uint32_t)enmExtend << 13)
2597 | ((uint32_t)fShifted << 12)
2598 | (iBaseReg << 5)
2599 | iReg;
2600}
2601
2602typedef enum /* VR Opc */
2603{ /* \ | */
2604 kArmv8A64InstrLdrLitteral_Mask_Vr = 0x10,
2605 kArmv8A64InstrLdrLitteral_Mask_Opc = 0x03,
2606 kArmv8A64InstrLdrLitteral_Shift_Vr = 4,
2607 kArmv8A64InstrLdrLitteral_Shift_Opc = 0,
2608
2609 kArmv8A64InstrLdrLitteral_Word = 0x00, /**< word = 32-bit */
2610 kArmv8A64InstrLdrLitteral_Dword = 0x01, /**< dword = 64-bit */
2611 kArmv8A64InstrLdrLitteral_SignWord64 = 0x02, /**< Loads word, signextending it to 64-bit */
2612 kArmv8A64InstrLdrLitteral_Prefetch = 0x03, /**< prfm */
2613
2614 kArmv8A64InstrLdrLitteral_Vr_Word = 0x10, /**< word = 32-bit */
2615 kArmv8A64InstrLdrLitteral_Vr_Dword = 0x11, /**< dword = 64-bit */
2616 kArmv8A64InstrLdrLitteral_Vr_128 = 0x12
2617} ARMV8A64INSTRLDRLITTERAL;
2618
2619
2620/**
2621 * A64: Encodes load w/ a PC relative 19-bit signed immediate.
2622 *
2623 * @returns The encoded instruction.
2624 * @param enmType The load instruction type.
2625 * @param iReg The register to load into.
2626 * @param i19Imm The signed immediate value, multiplied by 4 regardless
2627 * of access size.
2628 */
2629DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLdrLitteral(ARMV8A64INSTRLDRLITTERAL enmType, uint32_t iReg, int32_t i19Imm)
2630{
2631 Assert(i19Imm >= -262144 && i19Imm < 262144);
2632 Assert(iReg < 32); /* 3 2 1 0 */
2633 /* 10987654321098765432109876543210 */
2634 return UINT32_C(0x30000000) /* 0b00110000000000000000000000000000 */
2635 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdrLitteral_Mask_Vr) << (26 - kArmv8A64InstrLdrLitteral_Shift_Vr))
2636 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdrLitteral_Mask_Opc) << (30 - kArmv8A64InstrLdrLitteral_Shift_Opc))
2637 | (((uint32_t)i19Imm & UINT32_C(0x00ffffe0)) << 5)
2638 | iReg;
2639}
2640
2641
2642typedef enum
2643{
2644 kArmv8A64InstrMovWide_Not = 0, /**< MOVN - reg = ~(imm16 << hw*16; */
2645 kArmv8A64InstrMovWide_Zero = 2, /**< MOVZ - reg = imm16 << hw*16; */
2646 kArmv8A64InstrMovWide_Keep = 3 /**< MOVK - keep the other halfwords. */
2647} ARMV8A64INSTRMOVWIDE;
2648
2649/**
2650 * A64: Encode a move wide immediate instruction.
2651 *
2652 * @returns The encoded instruction.
2653 * @param enmType The load instruction type.
2654 * @param iRegDst The register to mov the immediate into.
2655 * @param uImm16 The immediate value.
2656 * @param iHalfWord Which of the 4 (@a f64Bit = true) or 2 register (16-bit)
2657 * half-words to target:
2658 * - 0 for bits 15:00,
2659 * - 1 for bits 31:16,
2660 * - 2 for bits 47:32 (f64Bit=true only),
2661 * - 3 for bits 63:48 (f64Bit=true only).
2662 * @param f64Bit true for 64-bit GPRs (default), @c false for 32-bit GPRs.
2663 */
2664DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovWide(ARMV8A64INSTRMOVWIDE enmType, uint32_t iRegDst, uint32_t uImm16,
2665 uint32_t iHalfWord = 0, bool f64Bit = true)
2666{
2667 Assert(iRegDst < 32U); Assert(uImm16 <= (uint32_t)UINT16_MAX); Assert(iHalfWord < 2U + (2U * f64Bit));
2668 return ((uint32_t)f64Bit << 31)
2669 | ((uint32_t)enmType << 29)
2670 | UINT32_C(0x12800000)
2671 | (iHalfWord << 21)
2672 | (uImm16 << 5)
2673 | iRegDst;
2674}
2675
2676/** A64: Encodes a MOVN instruction.
2677 * @see Armv8A64MkInstrMovWide for parameter details. */
2678DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovN(uint32_t iRegDst, uint32_t uImm16, uint32_t iHalfWord = 0, bool f64Bit = true)
2679{
2680 return Armv8A64MkInstrMovWide(kArmv8A64InstrMovWide_Not, iRegDst, uImm16, iHalfWord, f64Bit);
2681}
2682
2683/** A64: Encodes a MOVZ instruction.
2684 * @see Armv8A64MkInstrMovWide for parameter details. */
2685DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovZ(uint32_t iRegDst, uint32_t uImm16, uint32_t iHalfWord = 0, bool f64Bit = true)
2686{
2687 return Armv8A64MkInstrMovWide(kArmv8A64InstrMovWide_Zero, iRegDst, uImm16, iHalfWord, f64Bit);
2688}
2689
2690/** A64: Encodes a MOVK instruction.
2691 * @see Armv8A64MkInstrMovWide for parameter details. */
2692DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovK(uint32_t iRegDst, uint32_t uImm16, uint32_t iHalfWord = 0, bool f64Bit = true)
2693{
2694 return Armv8A64MkInstrMovWide(kArmv8A64InstrMovWide_Keep, iRegDst, uImm16, iHalfWord, f64Bit);
2695}
2696
2697
2698typedef enum
2699{
2700 kArmv8A64InstrShift_Lsl = 0,
2701 kArmv8A64InstrShift_Lsr,
2702 kArmv8A64InstrShift_Asr,
2703 kArmv8A64InstrShift_Ror
2704} ARMV8A64INSTRSHIFT;
2705
2706
2707/**
2708 * A64: Encodes a logical instruction with a shifted 2nd register operand.
2709 *
2710 * @returns The encoded instruction.
2711 * @param u2Opc The logical operation to perform.
2712 * @param fNot Whether to complement the 2nd operand.
2713 * @param iRegResult The output register.
2714 * @param iReg1 The 1st register operand.
2715 * @param iReg2Shifted The 2nd register operand, to which the optional
2716 * shifting is applied.
2717 * @param f64Bit true for 64-bit GPRs (default), @c false for 32-bit
2718 * GPRs.
2719 * @param offShift6 The shift amount (default: none).
2720 * @param enmShift The shift operation (default: LSL).
2721 */
2722DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLogicalShiftedReg(uint32_t u2Opc, bool fNot,
2723 uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted,
2724 bool f64Bit, uint32_t offShift6, ARMV8A64INSTRSHIFT enmShift)
2725{
2726 Assert(u2Opc < 4); Assert(offShift6 < (f64Bit ? UINT32_C(64) : UINT32_C(32)));
2727 Assert(iRegResult < 32); Assert(iReg1 < 32); Assert(iReg2Shifted < 32);
2728 return ((uint32_t)f64Bit << 31)
2729 | (u2Opc << 29)
2730 | UINT32_C(0x0a000000)
2731 | ((uint32_t)enmShift << 22)
2732 | ((uint32_t)fNot << 21)
2733 | (iReg2Shifted << 16)
2734 | (offShift6 << 10)
2735 | (iReg1 << 5)
2736 | iRegResult;
2737}
2738
2739
2740/** A64: Encodes an AND instruction.
2741 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
2742DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAnd(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
2743 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
2744{
2745 return Armv8A64MkInstrLogicalShiftedReg(0, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
2746}
2747
2748
2749/** A64: Encodes an BIC instruction.
2750 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
2751DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBic(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
2752 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
2753{
2754 return Armv8A64MkInstrLogicalShiftedReg(0, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
2755}
2756
2757
2758/** A64: Encodes an ORR instruction.
2759 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
2760DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrOrr(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
2761 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
2762{
2763 return Armv8A64MkInstrLogicalShiftedReg(1, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
2764}
2765
2766
2767/** A64: Encodes an MOV instruction.
2768 * This is an alias for "orr dst, xzr, src". */
2769DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMov(uint32_t iRegResult, uint32_t idxRegSrc, bool f64Bit = true)
2770{
2771 return Armv8A64MkInstrOrr(iRegResult, ARMV8_A64_REG_XZR, idxRegSrc, f64Bit);
2772}
2773
2774
2775/** A64: Encodes an ORN instruction.
2776 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
2777DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrOrn(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
2778 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
2779{
2780 return Armv8A64MkInstrLogicalShiftedReg(1, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
2781}
2782
2783
2784/** A64: Encodes an EOR instruction.
2785 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
2786DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrEor(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
2787 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
2788{
2789 return Armv8A64MkInstrLogicalShiftedReg(2, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
2790}
2791
2792
2793/** A64: Encodes an EON instruction.
2794 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
2795DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrEon(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
2796 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
2797{
2798 return Armv8A64MkInstrLogicalShiftedReg(2, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
2799}
2800
2801
2802/** A64: Encodes an ANDS instruction.
2803 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
2804DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAnds(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
2805 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
2806{
2807 return Armv8A64MkInstrLogicalShiftedReg(3, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
2808}
2809
2810
2811/** A64: Encodes an BICS instruction.
2812 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
2813DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBics(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
2814 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
2815{
2816 return Armv8A64MkInstrLogicalShiftedReg(3, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
2817}
2818
2819
2820
2821/*
2822 * Data processing instructions with two source register operands.
2823 */
2824
2825
2826/** A64: Encodes an SUBP instruction. */
2827DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubP(uint32_t iRegResult, uint32_t iRegMinuend, uint32_t iRegSubtrahend)
2828{
2829 Assert(iRegResult < 32); Assert(iRegMinuend < 32); Assert(iRegSubtrahend < 32);
2830 return UINT32_C(0x80000000)
2831 | UINT32_C(0x1ac00000)
2832 | (UINT32_C(0) << 10)
2833 | (iRegSubtrahend << 16)
2834 | (iRegMinuend << 5)
2835 | iRegResult;
2836}
2837
2838
2839/** A64: Encodes an SUBPS instruction. */
2840DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubPS(uint32_t iRegResult, uint32_t iRegMinuend, uint32_t iRegSubtrahend)
2841{
2842 Assert(iRegResult < 32); Assert(iRegMinuend < 32); Assert(iRegSubtrahend < 32);
2843 return UINT32_C(0x80000000)
2844 | UINT32_C(0x20000000)
2845 | UINT32_C(0x1ac00000)
2846 | (UINT32_C(0) << 10)
2847 | (iRegSubtrahend << 16)
2848 | (iRegMinuend << 5)
2849 | iRegResult;
2850}
2851
2852
2853/** A64: Encodes an UDIV instruction. */
2854DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUDiv(uint32_t iRegResult, uint32_t iRegDividend, uint32_t iRegDivisor, bool f64Bit = true)
2855{
2856 Assert(iRegResult < 32); Assert(iRegDividend < 32); Assert(iRegDivisor < 32);
2857 return ((uint32_t)f64Bit << 31)
2858 | UINT32_C(0x1ac00000)
2859 | (UINT32_C(2) << 10)
2860 | (iRegDivisor << 16)
2861 | (iRegDividend << 5)
2862 | iRegResult;
2863}
2864
2865
2866/** A64: Encodes an SDIV instruction. */
2867DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSDiv(uint32_t iRegResult, uint32_t iRegDividend, uint32_t iRegDivisor, bool f64Bit = true)
2868{
2869 Assert(iRegResult < 32); Assert(iRegDividend < 32); Assert(iRegDivisor < 32);
2870 return ((uint32_t)f64Bit << 31)
2871 | UINT32_C(0x1ac00000)
2872 | (UINT32_C(3) << 10)
2873 | (iRegDivisor << 16)
2874 | (iRegDividend << 5)
2875 | iRegResult;
2876}
2877
2878
2879/** A64: Encodes an IRG instruction. */
2880DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrIrg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2)
2881{
2882 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
2883 return UINT32_C(0x80000000)
2884 | UINT32_C(0x1ac00000)
2885 | (UINT32_C(4) << 10)
2886 | (iRegSrc2 << 16)
2887 | (iRegSrc1 << 5)
2888 | iRegResult;
2889}
2890
2891
2892/** A64: Encodes a GMI instruction. */
2893DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrGmi(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2)
2894{
2895 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
2896 return UINT32_C(0x80000000)
2897 | UINT32_C(0x1ac00000)
2898 | (UINT32_C(5) << 10)
2899 | (iRegSrc2 << 16)
2900 | (iRegSrc1 << 5)
2901 | iRegResult;
2902}
2903
2904
2905/** A64: Encodes an LSLV instruction. */
2906DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLslv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
2907{
2908 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
2909 return ((uint32_t)f64Bit << 31)
2910 | UINT32_C(0x1ac00000)
2911 | (UINT32_C(8) << 10)
2912 | (iRegCount << 16)
2913 | (iRegSrc << 5)
2914 | iRegResult;
2915}
2916
2917
2918/** A64: Encodes an LSRV instruction. */
2919DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLsrv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
2920{
2921 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
2922 return ((uint32_t)f64Bit << 31)
2923 | UINT32_C(0x1ac00000)
2924 | (UINT32_C(9) << 10)
2925 | (iRegCount << 16)
2926 | (iRegSrc << 5)
2927 | iRegResult;
2928}
2929
2930
2931/** A64: Encodes an ASRV instruction. */
2932DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAsrv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
2933{
2934 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
2935 return ((uint32_t)f64Bit << 31)
2936 | UINT32_C(0x1ac00000)
2937 | (UINT32_C(10) << 10)
2938 | (iRegCount << 16)
2939 | (iRegSrc << 5)
2940 | iRegResult;
2941}
2942
2943
2944/** A64: Encodes a RORV instruction. */
2945DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRorv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
2946{
2947 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
2948 return ((uint32_t)f64Bit << 31)
2949 | UINT32_C(0x1ac00000)
2950 | (UINT32_C(11) << 10)
2951 | (iRegCount << 16)
2952 | (iRegSrc << 5)
2953 | iRegResult;
2954}
2955
2956
2957/** A64: Encodes a PACGA instruction. */
2958DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrPacga(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2)
2959{
2960 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
2961 return UINT32_C(0x80000000)
2962 | UINT32_C(0x1ac00000)
2963 | (UINT32_C(12) << 10)
2964 | (iRegSrc2 << 16)
2965 | (iRegSrc1 << 5)
2966 | iRegResult;
2967}
2968
2969
2970/** A64: Encodes a CRC32* instruction. */
2971DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue, uint32_t uSize)
2972{
2973 Assert(iRegResult < 32); Assert(iRegCrc < 32); Assert(iRegValue < 32); Assert(uSize < 4);
2974 return ((uint32_t)(uSize == 3) << 31)
2975 | UINT32_C(0x1ac00000)
2976 | (UINT32_C(16) << 10)
2977 | (uSize << 10)
2978 | (iRegValue << 16)
2979 | (iRegCrc << 5)
2980 | iRegResult;
2981}
2982
2983
2984/** A64: Encodes a CRC32B instruction. */
2985DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32B(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
2986{
2987 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 0);
2988}
2989
2990
2991/** A64: Encodes a CRC32H instruction. */
2992DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32H(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
2993{
2994 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 1);
2995}
2996
2997
2998/** A64: Encodes a CRC32W instruction. */
2999DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32W(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3000{
3001 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 2);
3002}
3003
3004
3005/** A64: Encodes a CRC32X instruction. */
3006DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32X(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3007{
3008 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 3);
3009}
3010
3011
3012/** A64: Encodes a CRC32C* instruction. */
3013DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32c(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue, uint32_t uSize)
3014{
3015 Assert(iRegResult < 32); Assert(iRegCrc < 32); Assert(iRegValue < 32); Assert(uSize < 4);
3016 return ((uint32_t)(uSize == 3) << 31)
3017 | UINT32_C(0x1ac00000)
3018 | (UINT32_C(20) << 10)
3019 | (uSize << 10)
3020 | (iRegValue << 16)
3021 | (iRegCrc << 5)
3022 | iRegResult;
3023}
3024
3025
3026/** A64: Encodes a CRC32B instruction. */
3027DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cB(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3028{
3029 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 0);
3030}
3031
3032
3033/** A64: Encodes a CRC32CH instruction. */
3034DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cH(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3035{
3036 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 1);
3037}
3038
3039
3040/** A64: Encodes a CRC32CW instruction. */
3041DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cW(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3042{
3043 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 2);
3044}
3045
3046
3047/** A64: Encodes a CRC32CX instruction. */
3048DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cX(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3049{
3050 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 3);
3051}
3052
3053
3054/** A64: Encodes an SMAX instruction. */
3055DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSMax(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3056{
3057 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3058 return ((uint32_t)f64Bit << 31)
3059 | UINT32_C(0x1ac00000)
3060 | (UINT32_C(24) << 10)
3061 | (iRegSrc2 << 16)
3062 | (iRegSrc1 << 5)
3063 | iRegResult;
3064}
3065
3066
3067/** A64: Encodes an UMAX instruction. */
3068DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUMax(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3069{
3070 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3071 return ((uint32_t)f64Bit << 31)
3072 | UINT32_C(0x1ac00000)
3073 | (UINT32_C(25) << 10)
3074 | (iRegSrc2 << 16)
3075 | (iRegSrc1 << 5)
3076 | iRegResult;
3077}
3078
3079
3080/** A64: Encodes an SMIN instruction. */
3081DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSMin(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3082{
3083 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3084 return ((uint32_t)f64Bit << 31)
3085 | UINT32_C(0x1ac00000)
3086 | (UINT32_C(26) << 10)
3087 | (iRegSrc2 << 16)
3088 | (iRegSrc1 << 5)
3089 | iRegResult;
3090}
3091
3092
3093/** A64: Encodes an UMIN instruction. */
3094DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUMin(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3095{
3096 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3097 return ((uint32_t)f64Bit << 31)
3098 | UINT32_C(0x1ac00000)
3099 | (UINT32_C(27) << 10)
3100 | (iRegSrc2 << 16)
3101 | (iRegSrc1 << 5)
3102 | iRegResult;
3103}
3104
3105
3106# ifdef IPRT_INCLUDED_asm_h /* don't want this to be automatically included here. */
3107
3108/**
3109 * Converts immS and immR values (to logical instructions) to a 32-bit mask.
3110 *
3111 * @returns The decoded mask.
3112 * @param uImm6SizeLen The immS value from the instruction. (No N part
3113 * here, as that must be zero for instructions
3114 * operating on 32-bit wide registers.)
3115 * @param uImm6Rotations The immR value from the instruction.
3116 */
3117DECLINLINE(uint32_t) Armv8A64ConvertImmRImmS2Mask32(uint32_t uImm6SizeLen, uint32_t uImm6Rotations)
3118{
3119 Assert(uImm6SizeLen < 64); Assert(uImm6Rotations < 64);
3120
3121 /* Determine the element size. */
3122 unsigned const cBitsElementLog2 = ASMBitLastSetU32(uImm6SizeLen ^ 0x3f) - 1U;
3123 Assert(cBitsElementLog2 + 1U != 0U);
3124
3125 unsigned const cBitsElement = RT_BIT_32(cBitsElementLog2);
3126 Assert(uImm6Rotations < cBitsElement);
3127
3128 /* Extract the number of bits set to 1: */
3129 unsigned const cBitsSetTo1 = (uImm6SizeLen & (cBitsElement - 1U)) + 1;
3130 Assert(cBitsSetTo1 < cBitsElement);
3131 uint32_t const uElement = RT_BIT_32(cBitsSetTo1) - 1U;
3132
3133 /* Produce the unrotated pattern. */
3134 static const uint32_t s_auReplicate[]
3135 = { UINT32_MAX, UINT32_MAX / 3, UINT32_MAX / 15, UINT32_MAX / 255, UINT32_MAX / 65535, 1 };
3136 uint32_t const uPattern = s_auReplicate[cBitsElementLog2] * uElement;
3137
3138 /* Rotate it and return. */
3139 return ASMRotateRightU32(uPattern, uImm6Rotations & (cBitsElement - 1U));
3140}
3141
3142
3143/**
3144 * Converts N+immS and immR values (to logical instructions) to a 64-bit mask.
3145 *
3146 * @returns The decoded mask.
3147 * @param uImm7SizeLen The N:immS value from the instruction.
3148 * @param uImm6Rotations The immR value from the instruction.
3149 */
3150DECLINLINE(uint64_t) Armv8A64ConvertImmRImmS2Mask64(uint32_t uImm7SizeLen, uint32_t uImm6Rotations)
3151{
3152 Assert(uImm7SizeLen < 128); Assert(uImm6Rotations < 64);
3153
3154 /* Determine the element size. */
3155 unsigned const cBitsElementLog2 = ASMBitLastSetU32(uImm7SizeLen ^ 0x3f) - 1U;
3156 Assert(cBitsElementLog2 + 1U != 0U);
3157
3158 unsigned const cBitsElement = RT_BIT_32(cBitsElementLog2);
3159 Assert(uImm6Rotations < cBitsElement);
3160
3161 /* Extract the number of bits set to 1: */
3162 unsigned const cBitsSetTo1 = (uImm7SizeLen & (cBitsElement - 1U)) + 1;
3163 Assert(cBitsSetTo1 < cBitsElement);
3164 uint64_t const uElement = RT_BIT_64(cBitsSetTo1) - 1U;
3165
3166 /* Produce the unrotated pattern. */
3167 static const uint64_t s_auReplicate[]
3168 = { UINT64_MAX, UINT64_MAX / 3, UINT64_MAX / 15, UINT64_MAX / 255, UINT64_MAX / 65535, UINT64_MAX / UINT32_MAX, 1 };
3169 uint64_t const uPattern = s_auReplicate[cBitsElementLog2] * uElement;
3170
3171 /* Rotate it and return. */
3172 return ASMRotateRightU64(uPattern, uImm6Rotations & (cBitsElement - 1U));
3173}
3174
3175
3176/**
3177 * Variant of Armv8A64ConvertImmRImmS2Mask64 where the N bit is separate from
3178 * the immS value.
3179 */
3180DECLINLINE(uint64_t) Armv8A64ConvertImmRImmS2Mask64(uint32_t uN, uint32_t uImm6SizeLen, uint32_t uImm6Rotations)
3181{
3182 return Armv8A64ConvertImmRImmS2Mask64((uN << 6) | uImm6SizeLen, uImm6Rotations);
3183}
3184
3185
3186/**
3187 * Helper for Armv8A64MkInstrLogicalImm and friends that tries to convert a
3188 * 32-bit bitmask to a set of immediates for those instructions.
3189 *
3190 * @returns true if successful, false if not.
3191 * @param fMask The mask value to convert.
3192 * @param puImm6SizeLen Where to return the immS part (N is always zero for
3193 * 32-bit wide masks).
3194 * @param puImm6Rotations Where to return the immR.
3195 */
3196DECLINLINE(bool) Armv8A64ConvertMask32ToImmRImmS(uint32_t fMask, uint32_t *puImm6SizeLen, uint32_t *puImm6Rotations)
3197{
3198 /* Fend off 0 and UINT32_MAX as these cannot be represented. */
3199 if ((uint32_t)(fMask + 1U) <= 1)
3200 return false;
3201
3202 /* Rotate the value will we get all 1s at the bottom and the zeros at the top. */
3203 unsigned const cRor = ASMCountTrailingZerosU32(fMask);
3204 unsigned const cRol = ASMCountLeadingZerosU32(~fMask);
3205 if (cRor)
3206 fMask = ASMRotateRightU32(fMask, cRor);
3207 else
3208 fMask = ASMRotateLeftU32(fMask, cRol);
3209 Assert(fMask & RT_BIT_32(0));
3210 Assert(!(fMask & RT_BIT_32(31)));
3211
3212 /* Count the trailing ones and leading zeros. */
3213 unsigned const cOnes = ASMCountTrailingZerosU32(~fMask);
3214 unsigned const cZeros = ASMCountLeadingZerosU32(fMask);
3215
3216 /* The potential element length is then the sum of the two above. */
3217 unsigned const cBitsElement = cOnes + cZeros;
3218 if (!RT_IS_POWER_OF_TWO(cBitsElement) || cBitsElement < 2)
3219 return false;
3220
3221 /* Special case: 32 bits element size. Since we're done here. */
3222 if (cBitsElement == 32)
3223 *puImm6SizeLen = cOnes - 1;
3224 else
3225 {
3226 /* Extract the element bits and check that these are replicated in the whole pattern. */
3227 uint32_t const uElement = RT_BIT_32(cOnes) - 1U;
3228 unsigned const cBitsElementLog2 = ASMBitFirstSetU32(cBitsElement) - 1;
3229
3230 static const uint32_t s_auReplicate[]
3231 = { UINT32_MAX, UINT32_MAX / 3, UINT32_MAX / 15, UINT32_MAX / 255, UINT32_MAX / 65535, 1 };
3232 if (s_auReplicate[cBitsElementLog2] * uElement == fMask)
3233 *puImm6SizeLen = (cOnes - 1) | ((0x3e << cBitsElementLog2) & 0x3f);
3234 else
3235 return false;
3236 }
3237 *puImm6Rotations = cRor ? cBitsElement - cRor : cRol;
3238
3239 return true;
3240}
3241
3242
3243/**
3244 * Helper for Armv8A64MkInstrLogicalImm and friends that tries to convert a
3245 * 64-bit bitmask to a set of immediates for those instructions.
3246 *
3247 * @returns true if successful, false if not.
3248 * @param fMask The mask value to convert.
3249 * @param puImm7SizeLen Where to return the N:immS part.
3250 * @param puImm6Rotations Where to return the immR.
3251 */
3252DECLINLINE(bool) Armv8A64ConvertMask64ToImmRImmS(uint64_t fMask, uint32_t *puImm7SizeLen, uint32_t *puImm6Rotations)
3253{
3254 /* Fend off 0 and UINT64_MAX as these cannot be represented. */
3255 if ((uint64_t)(fMask + 1U) <= 1)
3256 return false;
3257
3258 /* Rotate the value will we get all 1s at the bottom and the zeros at the top. */
3259 unsigned const cRor = ASMCountTrailingZerosU64(fMask);
3260 unsigned const cRol = ASMCountLeadingZerosU64(~fMask);
3261 if (cRor)
3262 fMask = ASMRotateRightU64(fMask, cRor);
3263 else
3264 fMask = ASMRotateLeftU64(fMask, cRol);
3265 Assert(fMask & RT_BIT_64(0));
3266 Assert(!(fMask & RT_BIT_64(63)));
3267
3268 /* Count the trailing ones and leading zeros. */
3269 unsigned const cOnes = ASMCountTrailingZerosU64(~fMask);
3270 unsigned const cZeros = ASMCountLeadingZerosU64(fMask);
3271
3272 /* The potential element length is then the sum of the two above. */
3273 unsigned const cBitsElement = cOnes + cZeros;
3274 if (!RT_IS_POWER_OF_TWO(cBitsElement) || cBitsElement < 2)
3275 return false;
3276
3277 /* Special case: 64 bits element size. Since we're done here. */
3278 if (cBitsElement == 64)
3279 *puImm7SizeLen = (cOnes - 1) | 0x40 /*N*/;
3280 else
3281 {
3282 /* Extract the element bits and check that these are replicated in the whole pattern. */
3283 uint64_t const uElement = RT_BIT_64(cOnes) - 1U;
3284 unsigned const cBitsElementLog2 = ASMBitFirstSetU64(cBitsElement) - 1;
3285
3286 static const uint64_t s_auReplicate[]
3287 = { UINT64_MAX, UINT64_MAX / 3, UINT64_MAX / 15, UINT64_MAX / 255, UINT64_MAX / 65535, UINT64_MAX / UINT32_MAX, 1 };
3288 if (s_auReplicate[cBitsElementLog2] * uElement == fMask)
3289 *puImm7SizeLen = (cOnes - 1) | ((0x3e << cBitsElementLog2) & 0x3f);
3290 else
3291 return false;
3292 }
3293 *puImm6Rotations = cRor ? cBitsElement - cRor : cRol;
3294
3295 return true;
3296}
3297
3298# endif /* IPRT_INCLUDED_asm_h */
3299
3300/**
3301 * A64: Encodes a logical instruction with an complicated immediate mask.
3302 *
3303 * The @a uImm7SizeLen parameter specifies two things:
3304 * 1. the element size and
3305 * 2. the number of bits set to 1 in the pattern.
3306 *
3307 * The element size is extracted by NOT'ing bits 5:0 (excludes the N bit at the
3308 * top) and using the position of the first bit set as a power of two.
3309 *
3310 * | N | 5 | 4 | 3 | 2 | 1 | 0 | element size |
3311 * |---|---|---|---|---|---|---|--------------|
3312 * | 0 | 1 | 1 | 1 | 1 | 0 | x | 2 bits |
3313 * | 0 | 1 | 1 | 1 | 0 | x | x | 4 bits |
3314 * | 0 | 1 | 1 | 0 | x | x | x | 8 bits |
3315 * | 0 | 1 | 0 | x | x | x | x | 16 bits |
3316 * | 0 | 0 | x | x | x | x | x | 32 bits |
3317 * | 1 | x | x | x | x | x | x | 64 bits |
3318 *
3319 * The 'x' forms the number of 1 bits in the pattern, minus one (i.e.
3320 * there is always one zero bit in the pattern).
3321 *
3322 * The @a uImm6Rotations parameter specifies how many bits to the right,
3323 * the element pattern is rotated. The rotation count must be less than the
3324 * element bit count (size).
3325 *
3326 * @returns The encoded instruction.
3327 * @param u2Opc The logical operation to perform.
3328 * @param iRegResult The output register.
3329 * @param iRegSrc The 1st register operand.
3330 * @param uImm7SizeLen The size/pattern length. We've combined the 1-bit N
3331 * field at the top of the 6-bit 'imms' field.
3332 *
3333 * @param uImm6Rotations The rotation count.
3334 * @param f64Bit true for 64-bit GPRs, @c false for 32-bit GPRs.
3335 * @see https://dinfuehr.github.io/blog/encoding-of-immediate-values-on-aarch64/
3336 * https://gist.githubusercontent.com/dinfuehr/51a01ac58c0b23e4de9aac313ed6a06a/raw/1892a274aa3238d55f83eec5b3828da2aec5f229/aarch64-logical-immediates.txt
3337 */
3338DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLogicalImm(uint32_t u2Opc, uint32_t iRegResult, uint32_t iRegSrc,
3339 uint32_t uImm7SizeLen, uint32_t uImm6Rotations, bool f64Bit)
3340{
3341 Assert(u2Opc < 4); Assert(uImm7SizeLen < (f64Bit ? UINT32_C(0x7f) : UINT32_C(0x3f)));
3342 Assert(uImm6Rotations <= UINT32_C(0x3f)); Assert(iRegResult < 32); Assert(iRegSrc < 32);
3343 return ((uint32_t)f64Bit << 31)
3344 | (u2Opc << 29)
3345 | UINT32_C(0x12000000)
3346 | ((uImm7SizeLen & UINT32_C(0x40)) << (22 - 6))
3347 | (uImm6Rotations << 16)
3348 | ((uImm7SizeLen & UINT32_C(0x3f)) << 10)
3349 | (iRegSrc << 5)
3350 | iRegResult;
3351}
3352
3353
3354/** A64: Encodes an AND instruction w/ complicated immediate mask.
3355 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3356DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAndImm(uint32_t iRegResult, uint32_t iRegSrc,
3357 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3358{
3359 return Armv8A64MkInstrLogicalImm(0, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3360}
3361
3362
3363/** A64: Encodes an ORR instruction w/ complicated immediate mask.
3364 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3365DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrOrrImm(uint32_t iRegResult, uint32_t iRegSrc,
3366 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3367{
3368 return Armv8A64MkInstrLogicalImm(1, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3369}
3370
3371
3372/** A64: Encodes an EOR instruction w/ complicated immediate mask.
3373 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3374DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrEorImm(uint32_t iRegResult, uint32_t iRegSrc,
3375 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3376{
3377 return Armv8A64MkInstrLogicalImm(2, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3378}
3379
3380
3381/** A64: Encodes an ANDS instruction w/ complicated immediate mask.
3382 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3383DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAndsImm(uint32_t iRegResult, uint32_t iRegSrc,
3384 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3385{
3386 return Armv8A64MkInstrLogicalImm(3, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3387}
3388
3389
3390/** A64: Encodes an TST instruction w/ complicated immediate mask.
3391 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3392DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTstImm(uint32_t iRegSrc,
3393 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3394{
3395 return Armv8A64MkInstrAndsImm(ARMV8_A64_REG_XZR, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3396}
3397
3398
3399/**
3400 * A64: Encodes a bitfield instruction.
3401 *
3402 * @returns The encoded instruction.
3403 * @param u2Opc The bitfield operation to perform.
3404 * @param iRegResult The output register.
3405 * @param iRegSrc The 1st register operand.
3406 * @param cImm6Ror The right rotation count.
3407 * @param uImm6S The leftmost bit to be moved.
3408 * @param f64Bit true for 64-bit GPRs, @c false for 32-bit GPRs.
3409 * @param uN1 This must match @a f64Bit for all instructions
3410 * currently specified.
3411 * @see https://dinfuehr.github.io/blog/encoding-of-immediate-values-on-aarch64/
3412 * https://gist.githubusercontent.com/dinfuehr/51a01ac58c0b23e4de9aac313ed6a06a/raw/1892a274aa3238d55f83eec5b3828da2aec5f229/aarch64-logical-immediates.txt
3413 */
3414DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBitfieldImm(uint32_t u2Opc, uint32_t iRegResult, uint32_t iRegSrc,
3415 uint32_t cImm6Ror, uint32_t uImm6S, bool f64Bit, uint32_t uN1)
3416{
3417 Assert(cImm6Ror <= (f64Bit ? UINT32_C(0x3f) : UINT32_C(0x1f))); Assert(iRegResult < 32); Assert(u2Opc < 4);
3418 Assert(uImm6S <= (f64Bit ? UINT32_C(0x3f) : UINT32_C(0x1f))); Assert(iRegSrc < 32); Assert(uN1 <= (unsigned)f64Bit);
3419 return ((uint32_t)f64Bit << 31)
3420 | (u2Opc << 29)
3421 | UINT32_C(0x13000000)
3422 | (uN1 << 22)
3423 | (cImm6Ror << 16)
3424 | (uImm6S << 10)
3425 | (iRegSrc << 5)
3426 | iRegResult;
3427}
3428
3429
3430/** A64: Encodes a SBFM instruction.
3431 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3432DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSbfm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cImm6Ror, uint32_t uImm6S,
3433 bool f64Bit = true, uint32_t uN1 = UINT32_MAX)
3434{
3435 return Armv8A64MkInstrBitfieldImm(0, iRegResult, iRegSrc, cImm6Ror, uImm6S, f64Bit, uN1 == UINT32_MAX ? f64Bit : uN1);
3436}
3437
3438
3439/** A64: Encodes a SXTB instruction (sign-extend 8-bit value to 32/64-bit).
3440 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3441DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSxtb(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = true)
3442{
3443 return Armv8A64MkInstrSbfm(iRegResult, iRegSrc, 0, 7, f64Bit);
3444}
3445
3446
3447/** A64: Encodes a SXTH instruction (sign-extend 16-bit value to 32/64-bit).
3448 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3449DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSxth(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = true)
3450{
3451 return Armv8A64MkInstrSbfm(iRegResult, iRegSrc, 0, 15, f64Bit);
3452}
3453
3454
3455/** A64: Encodes a SXTH instruction (sign-extend 32-bit value to 64-bit).
3456 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3457DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSxtw(uint32_t iRegResult, uint32_t iRegSrc)
3458{
3459 return Armv8A64MkInstrSbfm(iRegResult, iRegSrc, 0, 31, true /*f64Bit*/);
3460}
3461
3462
3463/** A64: Encodes an ASR instruction w/ immediate shift value.
3464 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3465DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAsrImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
3466{
3467 uint32_t const cWidth = f64Bit ? 63 : 31;
3468 Assert(cShift > 0); Assert(cShift <= cWidth);
3469 return Armv8A64MkInstrBitfieldImm(0, iRegResult, iRegSrc, cShift, cWidth /*uImm6S*/, f64Bit, f64Bit);
3470}
3471
3472
3473/** A64: Encodes a BFM instruction.
3474 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3475DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cImm6Ror, uint32_t uImm6S,
3476 bool f64Bit = true, uint32_t uN1 = UINT32_MAX)
3477{
3478 return Armv8A64MkInstrBitfieldImm(1, iRegResult, iRegSrc, cImm6Ror, uImm6S, f64Bit, uN1 == UINT32_MAX ? f64Bit : uN1);
3479}
3480
3481
3482/** A64: Encodes a BFI instruction (insert).
3483 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3484DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfi(uint32_t iRegResult, uint32_t iRegSrc,
3485 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
3486{
3487 Assert(cBitsWidth > 0U); Assert(cBitsWidth < (f64Bit ? 64U : 32U)); Assert(offFirstBit < (f64Bit ? 64U : 32U));
3488 return Armv8A64MkInstrBfm(iRegResult, iRegSrc, (uint32_t)-(int32_t)offFirstBit & (f64Bit ? 0x3f : 0x1f),
3489 cBitsWidth - 1, f64Bit);
3490}
3491
3492
3493/** A64: Encodes a BFXIL instruction (insert low).
3494 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3495DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfxil(uint32_t iRegResult, uint32_t iRegSrc,
3496 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
3497{
3498 Assert(cBitsWidth > 0U); Assert(cBitsWidth < (f64Bit ? 64U : 32U)); Assert(offFirstBit < (f64Bit ? 64U : 32U));
3499 Assert(offFirstBit + cBitsWidth <= (f64Bit ? 64U : 32U));
3500 return Armv8A64MkInstrBfm(iRegResult, iRegSrc, (uint32_t)offFirstBit, offFirstBit + cBitsWidth - 1, f64Bit);
3501}
3502
3503
3504/** A64: Encodes an UBFM instruction.
3505 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3506DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUbfm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cImm6Ror, uint32_t uImm6S,
3507 bool f64Bit = true, uint32_t uN1 = UINT32_MAX)
3508{
3509 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, cImm6Ror, uImm6S, f64Bit, uN1 == UINT32_MAX ? f64Bit : uN1);
3510}
3511
3512
3513/** A64: Encodes an UBFX instruction (zero extending extract).
3514 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3515DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUbfx(uint32_t iRegResult, uint32_t iRegSrc,
3516 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
3517{
3518 return Armv8A64MkInstrUbfm(iRegResult, iRegSrc, offFirstBit, offFirstBit + cBitsWidth - 1, f64Bit);
3519}
3520
3521
3522/** A64: Encodes an UBFIZ instruction (zero extending extract from bit zero,
3523 * shifted into destination).
3524 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3525DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUbfiz(uint32_t iRegResult, uint32_t iRegSrc,
3526 uint32_t offFirstBitDst, uint32_t cBitsWidth, bool f64Bit = true)
3527{
3528 uint32_t fMask = f64Bit ? 0x3f : 0x1f;
3529 return Armv8A64MkInstrUbfm(iRegResult, iRegSrc, -(int32_t)offFirstBitDst & fMask, cBitsWidth - 1, f64Bit);
3530}
3531
3532
3533/** A64: Encodes an LSL instruction w/ immediate shift value.
3534 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3535DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLslImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
3536{
3537 uint32_t const cWidth = f64Bit ? 63 : 31;
3538 Assert(cShift > 0); Assert(cShift <= cWidth);
3539 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, (uint32_t)(0 - cShift) & cWidth,
3540 cWidth - cShift /*uImm6S*/, f64Bit, f64Bit);
3541}
3542
3543
3544/** A64: Encodes an LSR instruction w/ immediate shift value.
3545 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3546DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLsrImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
3547{
3548 uint32_t const cWidth = f64Bit ? 63 : 31;
3549 Assert(cShift > 0); Assert(cShift <= cWidth);
3550 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, cShift, cWidth /*uImm6S*/, f64Bit, f64Bit);
3551}
3552
3553
3554/** A64: Encodes an UXTB instruction - zero extend byte (8-bit).
3555 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3556DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUxtb(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = false)
3557{
3558 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, 0, 7, f64Bit, f64Bit);
3559}
3560
3561
3562/** A64: Encodes an UXTH instruction - zero extend half word (16-bit).
3563 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3564DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUxth(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = false)
3565{
3566 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, 0, 15, f64Bit, f64Bit);
3567}
3568
3569
3570/**
3571 * A64: Encodes an EXTR instruction with an immediate.
3572 *
3573 * @returns The encoded instruction.
3574 * @param iRegResult The register to store the result in. ZR is valid.
3575 * @param iRegLow The register holding the least significant bits in the
3576 * extraction. ZR is valid.
3577 * @param iRegHigh The register holding the most significant bits in the
3578 * extraction. ZR is valid.
3579 * @param uLsb The bit number of the least significant bit, or where in
3580 * @a iRegLow to start the
3581 * extraction.
3582 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
3583 */
3584DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrExtrImm(uint32_t iRegResult, uint32_t iRegLow, uint32_t iRegHigh, uint32_t uLsb,
3585 bool f64Bit = true)
3586{
3587 Assert(uLsb < (uint32_t)(f64Bit ? 64 : 32)); Assert(iRegHigh < 32); Assert(iRegLow < 32); Assert(iRegResult < 32);
3588 return ((uint32_t)f64Bit << 31)
3589 | UINT32_C(0x13800000)
3590 | ((uint32_t)f64Bit << 22) /*N*/
3591 | (iRegHigh << 16)
3592 | (uLsb << 10)
3593 | (iRegLow << 5)
3594 | iRegResult;
3595}
3596
3597
3598/** A64: Rotates the value of a register (alias for EXTR). */
3599DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRorImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
3600{
3601 return Armv8A64MkInstrExtrImm(iRegResult, iRegSrc, iRegSrc, cShift, f64Bit);
3602}
3603
3604
3605/**
3606 * A64: Encodes either add, adds, sub or subs with unsigned 12-bit immediate.
3607 *
3608 * @returns The encoded instruction.
3609 * @param fSub true for sub and subs, false for add and
3610 * adds.
3611 * @param iRegResult The register to store the result in.
3612 * SP is valid when @a fSetFlags = false,
3613 * and ZR is valid otherwise.
3614 * @param iRegSrc The register containing the augend (@a fSub
3615 * = false) or minuend (@a fSub = true). SP is
3616 * a valid registers for all variations.
3617 * @param uImm12AddendSubtrahend The addend (@a fSub = false) or subtrahend
3618 * (@a fSub = true).
3619 * @param f64Bit true for 64-bit GRPs (default), false for
3620 * 32-bit GPRs.
3621 * @param fSetFlags Whether to set flags (adds / subs) or not
3622 * (add / sub - default).
3623 * @param fShift12 Whether to shift uImm12AddendSubtrahend 12
3624 * bits to the left, or not (default).
3625 */
3626DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddSubUImm12(bool fSub, uint32_t iRegResult, uint32_t iRegSrc,
3627 uint32_t uImm12AddendSubtrahend, bool f64Bit = true,
3628 bool fSetFlags = false, bool fShift12 = false)
3629{
3630 Assert(uImm12AddendSubtrahend < 4096); Assert(iRegSrc < 32); Assert(iRegResult < 32);
3631 return ((uint32_t)f64Bit << 31)
3632 | ((uint32_t)fSub << 30)
3633 | ((uint32_t)fSetFlags << 29)
3634 | UINT32_C(0x11000000)
3635 | ((uint32_t)fShift12 << 22)
3636 | (uImm12AddendSubtrahend << 10)
3637 | (iRegSrc << 5)
3638 | iRegResult;
3639}
3640
3641
3642/** Alias for sub zxr, reg, \#uimm12. */
3643DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCmpUImm12(uint32_t iRegSrc, uint32_t uImm12Comprahend,
3644 bool f64Bit = true, bool fShift12 = false)
3645{
3646 return Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iRegSrc, uImm12Comprahend,
3647 f64Bit, true /*fSetFlags*/, fShift12);
3648}
3649
3650
3651/** ADD dst, src, \#uimm12 */
3652DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddUImm12(uint32_t iRegResult, uint32_t iRegSrc, uint32_t uImm12Addend,
3653 bool f64Bit = true, bool fSetFlags = false, bool fShift12 = false)
3654{
3655 return Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iRegResult, iRegSrc, uImm12Addend, f64Bit, fSetFlags, fShift12);
3656}
3657
3658
3659/** SUB dst, src, \#uimm12 */
3660DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubUImm12(uint32_t iRegResult, uint32_t iRegSrc, uint32_t uImm12Subtrahend,
3661 bool f64Bit = true, bool fSetFlags = false, bool fShift12 = false)
3662{
3663 return Armv8A64MkInstrAddSubUImm12(true /*fSub*/, iRegResult, iRegSrc, uImm12Subtrahend, f64Bit, fSetFlags, fShift12);
3664}
3665
3666
3667/**
3668 * A64: Encodes either add, adds, sub or subs with shifted register.
3669 *
3670 * @returns The encoded instruction.
3671 * @param fSub true for sub and subs, false for add and
3672 * adds.
3673 * @param iRegResult The register to store the result in.
3674 * SP is NOT valid, but ZR is.
3675 * @param iRegSrc1 The register containing the augend (@a fSub
3676 * = false) or minuend (@a fSub = true).
3677 * SP is NOT valid, but ZR is.
3678 * @param iRegSrc2 The register containing the addened (@a fSub
3679 * = false) or subtrahend (@a fSub = true).
3680 * SP is NOT valid, but ZR is.
3681 * @param f64Bit true for 64-bit GRPs (default), false for
3682 * 32-bit GPRs.
3683 * @param fSetFlags Whether to set flags (adds / subs) or not
3684 * (add / sub - default).
3685 * @param cShift The shift count to apply to @a iRegSrc2.
3686 * @param enmShift The shift type to apply to the @a iRegSrc2
3687 * register. kArmv8A64InstrShift_Ror is
3688 * reserved.
3689 */
3690DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddSubReg(bool fSub, uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
3691 bool f64Bit = true, bool fSetFlags = false, uint32_t cShift = 0,
3692 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3693{
3694 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3695 Assert(cShift < (f64Bit ? 64U : 32U)); Assert(enmShift != kArmv8A64InstrShift_Ror);
3696
3697 return ((uint32_t)f64Bit << 31)
3698 | ((uint32_t)fSub << 30)
3699 | ((uint32_t)fSetFlags << 29)
3700 | UINT32_C(0x0b000000)
3701 | ((uint32_t)enmShift << 22)
3702 | (iRegSrc2 << 16)
3703 | (cShift << 10)
3704 | (iRegSrc1 << 5)
3705 | iRegResult;
3706}
3707
3708
3709/** Alias for sub zxr, reg1, reg2 [, LSL/LSR/ASR/ROR \#xx]. */
3710DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCmpReg(uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true, uint32_t cShift = 0,
3711 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3712{
3713 return Armv8A64MkInstrAddSubReg(true /*fSub*/, ARMV8_A64_REG_XZR, iRegSrc1, iRegSrc2,
3714 f64Bit, true /*fSetFlags*/, cShift, enmShift);
3715}
3716
3717
3718/** ADD dst, reg1, reg2 [, LSL/LSR/ASR/ROR \#xx] */
3719DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddReg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
3720 bool f64Bit = true, bool fSetFlags = false, uint32_t cShift = 0,
3721 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3722{
3723 return Armv8A64MkInstrAddSubReg(false /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags, cShift, enmShift);
3724}
3725
3726
3727/** SUB dst, reg1, reg2 [, LSL/LSR/ASR/ROR \#xx] */
3728DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubReg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
3729 bool f64Bit = true, bool fSetFlags = false, uint32_t cShift = 0,
3730 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3731{
3732 return Armv8A64MkInstrAddSubReg(true /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags, cShift, enmShift);
3733}
3734
3735
3736/** NEG dst */
3737DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrNeg(uint32_t iRegResult, bool f64Bit = true, bool fSetFlags = false)
3738{
3739 return Armv8A64MkInstrAddSubReg(true /*fSub*/, iRegResult, ARMV8_A64_REG_XZR, iRegResult, f64Bit, fSetFlags);
3740}
3741
3742
3743/** Extension option for 'extended register' instructions. */
3744typedef enum ARMV8A64INSTREXTEND
3745{
3746 kArmv8A64InstrExtend_UxtB = 0,
3747 kArmv8A64InstrExtend_UxtH,
3748 kArmv8A64InstrExtend_UxtW,
3749 kArmv8A64InstrExtend_UxtX,
3750 kArmv8A64InstrExtend_SxtB,
3751 kArmv8A64InstrExtend_SxtH,
3752 kArmv8A64InstrExtend_SxtW,
3753 kArmv8A64InstrExtend_SxtX,
3754 /** The default is either UXTW or UXTX depending on whether the instruction
3755 * is in 32-bit or 64-bit mode. Thus, this needs to be resolved according
3756 * to the f64Bit value. */
3757 kArmv8A64InstrExtend_Default
3758} ARMV8A64INSTREXTEND;
3759
3760
3761/**
3762 * A64: Encodes either add, adds, sub or subs with extended register encoding.
3763 *
3764 * @returns The encoded instruction.
3765 * @param fSub true for sub and subs, false for add and
3766 * adds.
3767 * @param iRegResult The register to store the result in.
3768 * SP is NOT valid, but ZR is.
3769 * @param iRegSrc1 The register containing the augend (@a fSub
3770 * = false) or minuend (@a fSub = true).
3771 * SP is valid, but ZR is NOT.
3772 * @param iRegSrc2 The register containing the addened (@a fSub
3773 * = false) or subtrahend (@a fSub = true).
3774 * SP is NOT valid, but ZR is.
3775 * @param f64Bit true for 64-bit GRPs (default), false for
3776 * 32-bit GPRs.
3777 * @param fSetFlags Whether to set flags (adds / subs) or not
3778 * (add / sub - default).
3779 * @param enmExtend The type of extension to apply to @a
3780 * iRegSrc2.
3781 * @param cShift The left shift count to apply to @a iRegSrc2
3782 * after enmExtend processing is done.
3783 * Max shift is 4 for some reason.
3784 */
3785DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddSubRegExtend(bool fSub, uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
3786 bool f64Bit = true, bool fSetFlags = false,
3787 ARMV8A64INSTREXTEND enmExtend = kArmv8A64InstrExtend_Default,
3788 uint32_t cShift = 0)
3789{
3790 if (enmExtend == kArmv8A64InstrExtend_Default)
3791 enmExtend = f64Bit ? kArmv8A64InstrExtend_UxtW : kArmv8A64InstrExtend_UxtX;
3792 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32); Assert(cShift <= 4);
3793
3794 return ((uint32_t)f64Bit << 31)
3795 | ((uint32_t)fSub << 30)
3796 | ((uint32_t)fSetFlags << 29)
3797 | UINT32_C(0x0b200000)
3798 | (iRegSrc2 << 16)
3799 | ((uint32_t)enmExtend << 13)
3800 | (cShift << 10)
3801 | (iRegSrc1 << 5)
3802 | iRegResult;
3803}
3804
3805
3806/**
3807 * A64: Encodes either adc, adcs, sbc or sbcs with two source registers.
3808 *
3809 * @returns The encoded instruction.
3810 * @param fSub true for sbc and sbcs, false for adc and
3811 * adcs.
3812 * @param iRegResult The register to store the result in. SP is
3813 * NOT valid, but ZR is.
3814 * @param iRegSrc1 The register containing the augend (@a fSub
3815 * = false) or minuend (@a fSub = true).
3816 * SP is NOT valid, but ZR is.
3817 * @param iRegSrc2 The register containing the addened (@a fSub
3818 * = false) or subtrahend (@a fSub = true).
3819 * SP is NOT valid, but ZR is.
3820 * @param f64Bit true for 64-bit GRPs (default), false for
3821 * 32-bit GPRs.
3822 * @param fSetFlags Whether to set flags (adds / subs) or not
3823 * (add / sub - default).
3824 */
3825DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAdcSbc(bool fSub, uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
3826 bool f64Bit = true, bool fSetFlags = false)
3827{
3828 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3829
3830 return ((uint32_t)f64Bit << 31)
3831 | ((uint32_t)fSub << 30)
3832 | ((uint32_t)fSetFlags << 29)
3833 | UINT32_C(0x1a000000)
3834 | (iRegSrc2 << 16)
3835 | (iRegSrc1 << 5)
3836 | iRegResult;
3837}
3838
3839
3840/** ADC dst, reg1, reg2 */
3841DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAdc(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
3842 bool f64Bit = true, bool fSetFlags = false)
3843{
3844 return Armv8A64MkInstrAdcSbc(false /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags);
3845}
3846
3847
3848/** ADCS dst, reg1, reg2 */
3849DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAdcs(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3850{
3851 return Armv8A64MkInstrAdcSbc(false /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, true /*fSetFlags*/);
3852}
3853
3854
3855/** SBC dst, reg1, reg2 */
3856DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSbc(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
3857 bool f64Bit = true, bool fSetFlags = false)
3858{
3859 return Armv8A64MkInstrAdcSbc(true /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags);
3860}
3861
3862
3863/** SBCS dst, reg1, reg2 */
3864DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSbcs(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3865{
3866 return Armv8A64MkInstrAdcSbc(true /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, true /*fSetFlags*/);
3867}
3868
3869
3870/**
3871 * A64: Encodes a B (unconditional branch w/ imm) instruction.
3872 *
3873 * @returns The encoded instruction.
3874 * @param iImm26 Signed number of instruction to jump (i.e. *4).
3875 */
3876DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrB(int32_t iImm26)
3877{
3878 Assert(iImm26 >= -67108864 && iImm26 < 67108864);
3879 return UINT32_C(0x14000000) | ((uint32_t)iImm26 & UINT32_C(0x3ffffff));
3880}
3881
3882
3883/**
3884 * A64: Encodes a BL (unconditional call w/ imm) instruction.
3885 *
3886 * @returns The encoded instruction.
3887 * @param iImm26 Signed number of instruction to jump (i.e. *4).
3888 */
3889DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBl(int32_t iImm26)
3890{
3891 return Armv8A64MkInstrB(iImm26) | RT_BIT_32(31);
3892}
3893
3894
3895/**
3896 * A64: Encodes a BR (unconditional branch w/ register) instruction.
3897 *
3898 * @returns The encoded instruction.
3899 * @param iReg The register containing the target address.
3900 */
3901DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBr(uint32_t iReg)
3902{
3903 Assert(iReg < 32);
3904 return UINT32_C(0xd61f0000) | (iReg << 5);
3905}
3906
3907
3908/**
3909 * A64: Encodes a BLR instruction.
3910 *
3911 * @returns The encoded instruction.
3912 * @param iReg The register containing the target address.
3913 */
3914DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBlr(uint32_t iReg)
3915{
3916 return Armv8A64MkInstrBr(iReg) | RT_BIT_32(21);
3917}
3918
3919
3920/**
3921 * A64: Encodes CBZ and CBNZ (conditional branch w/ immediate) instructions.
3922 *
3923 * @returns The encoded instruction.
3924 * @param fJmpIfNotZero false to jump if register is zero, true to jump if
3925 * its not zero.
3926 * @param iImm19 Signed number of instruction to jump (i.e. *4).
3927 * @param iReg The GPR to check for zero / non-zero value.
3928 * @param f64Bit true for 64-bit register, false for 32-bit.
3929 */
3930DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCbzCbnz(bool fJmpIfNotZero, int32_t iImm19, uint32_t iReg, bool f64Bit = true)
3931{
3932 Assert(iReg < 32); Assert(iImm19 >= -262144 && iImm19 < 262144);
3933 return ((uint32_t)f64Bit << 31)
3934 | UINT32_C(0x34000000)
3935 | ((uint32_t)fJmpIfNotZero << 24)
3936 | (((uint32_t)iImm19 & 0x7ffff) << 5)
3937 | iReg;
3938}
3939
3940
3941/** A64: Encodes the CBZ instructions. */
3942DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCbz(int32_t iImm19, uint32_t iReg, bool f64Bit = true)
3943{
3944 return Armv8A64MkInstrCbzCbnz(false /*fJmpIfNotZero*/, iImm19, iReg, f64Bit);
3945}
3946
3947
3948/** A64: Encodes the CBNZ instructions. */
3949DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCbnz(int32_t iImm19, uint32_t iReg, bool f64Bit = true)
3950{
3951 return Armv8A64MkInstrCbzCbnz(true /*fJmpIfNotZero*/, iImm19, iReg, f64Bit);
3952}
3953
3954
3955/**
3956 * A64: Encodes TBZ and TBNZ (conditional branch w/ immediate) instructions.
3957 *
3958 * @returns The encoded instruction.
3959 * @param fJmpIfNotZero false to jump if register is zero, true to jump if
3960 * its not zero.
3961 * @param iImm14 Signed number of instruction to jump (i.e. *4).
3962 * @param iReg The GPR to check for zero / non-zero value.
3963 * @param iBitNo The bit to test for.
3964 */
3965DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTbzTbnz(bool fJmpIfNotZero, int32_t iImm14, uint32_t iReg, uint32_t iBitNo)
3966{
3967 Assert(iReg < 32); Assert(iImm14 >= -8192 && iImm14 < 8192); Assert(iBitNo < 64);
3968 return ((uint32_t)(iBitNo & 0x20) << (31-5))
3969 | UINT32_C(0x36000000)
3970 | ((uint32_t)fJmpIfNotZero << 24)
3971 | ((iBitNo & 0x1f) << 19)
3972 | (((uint32_t)iImm14 & 0x3fff) << 5)
3973 | iReg;
3974}
3975
3976
3977
3978/** Armv8 Condition codes. */
3979typedef enum ARMV8INSTRCOND
3980{
3981 kArmv8InstrCond_Eq = 0, /**< 0 - Equal - Zero set. */
3982 kArmv8InstrCond_Ne, /**< 1 - Not equal - Zero clear. */
3983
3984 kArmv8InstrCond_Cs, /**< 2 - Carry set (also known as 'HS'). */
3985 kArmv8InstrCond_Hs = kArmv8InstrCond_Cs, /**< 2 - Unsigned higher or same. */
3986 kArmv8InstrCond_Cc, /**< 3 - Carry clear (also known as 'LO'). */
3987 kArmv8InstrCond_Lo = kArmv8InstrCond_Cc, /**< 3 - Unsigned lower. */
3988
3989 kArmv8InstrCond_Mi, /**< 4 - Negative result (minus). */
3990 kArmv8InstrCond_Pl, /**< 5 - Positive or zero result (plus). */
3991
3992 kArmv8InstrCond_Vs, /**< 6 - Overflow set. */
3993 kArmv8InstrCond_Vc, /**< 7 - Overflow clear. */
3994
3995 kArmv8InstrCond_Hi, /**< 8 - Unsigned higher. */
3996 kArmv8InstrCond_Ls, /**< 9 - Unsigned lower or same. */
3997
3998 kArmv8InstrCond_Ge, /**< a - Signed greater or equal. */
3999 kArmv8InstrCond_Lt, /**< b - Signed less than. */
4000
4001 kArmv8InstrCond_Gt, /**< c - Signed greater than. */
4002 kArmv8InstrCond_Le, /**< d - Signed less or equal. */
4003
4004 kArmv8InstrCond_Al, /**< e - Condition is always true. */
4005 kArmv8InstrCond_Al1 /**< f - Condition is always true. */
4006} ARMV8INSTRCOND;
4007
4008/**
4009 * A64: Encodes conditional branch instruction w/ immediate target.
4010 *
4011 * @returns The encoded instruction.
4012 * @param enmCond The branch condition.
4013 * @param iImm19 Signed number of instruction to jump (i.e. *4).
4014 */
4015DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBCond(ARMV8INSTRCOND enmCond, int32_t iImm19)
4016{
4017 Assert((unsigned)enmCond < 16);
4018 return UINT32_C(0x54000000)
4019 | (((uint32_t)iImm19 & 0x7ffff) << 5)
4020 | (uint32_t)enmCond;
4021}
4022
4023
4024/**
4025 * A64: Encodes the BRK instruction.
4026 *
4027 * @returns The encoded instruction.
4028 * @param uImm16 Unsigned immediate value.
4029 */
4030DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBrk(uint32_t uImm16)
4031{
4032 Assert(uImm16 < _64K);
4033 return UINT32_C(0xd4200000)
4034 | (uImm16 << 5);
4035}
4036
4037/** @name RMA64_NZCV_F_XXX - readable NZCV mask for CCMP and friends.
4038 * @{ */
4039#define ARMA64_NZCV_F_N0_Z0_C0_V0 UINT32_C(0x0)
4040#define ARMA64_NZCV_F_N0_Z0_C0_V1 UINT32_C(0x1)
4041#define ARMA64_NZCV_F_N0_Z0_C1_V0 UINT32_C(0x2)
4042#define ARMA64_NZCV_F_N0_Z0_C1_V1 UINT32_C(0x3)
4043#define ARMA64_NZCV_F_N0_Z1_C0_V0 UINT32_C(0x4)
4044#define ARMA64_NZCV_F_N0_Z1_C0_V1 UINT32_C(0x5)
4045#define ARMA64_NZCV_F_N0_Z1_C1_V0 UINT32_C(0x6)
4046#define ARMA64_NZCV_F_N0_Z1_C1_V1 UINT32_C(0x7)
4047
4048#define ARMA64_NZCV_F_N1_Z0_C0_V0 UINT32_C(0x8)
4049#define ARMA64_NZCV_F_N1_Z0_C0_V1 UINT32_C(0x9)
4050#define ARMA64_NZCV_F_N1_Z0_C1_V0 UINT32_C(0xa)
4051#define ARMA64_NZCV_F_N1_Z0_C1_V1 UINT32_C(0xb)
4052#define ARMA64_NZCV_F_N1_Z1_C0_V0 UINT32_C(0xc)
4053#define ARMA64_NZCV_F_N1_Z1_C0_V1 UINT32_C(0xd)
4054#define ARMA64_NZCV_F_N1_Z1_C1_V0 UINT32_C(0xe)
4055#define ARMA64_NZCV_F_N1_Z1_C1_V1 UINT32_C(0xf)
4056/** @} */
4057
4058/**
4059 * A64: Encodes CCMP or CCMN with two register operands.
4060 *
4061 * @returns The encoded instruction.
4062 * @param iRegSrc1 The 1st register. SP is NOT valid, but ZR is.
4063 * @param iRegSrc2 The 2nd register. SP is NOT valid, but ZR is.
4064 * @param fNzcv The N, Z, C & V flags values to load if the condition
4065 * does not match. See RMA64_NZCV_F_XXX.
4066 * @param enmCond The condition guarding the compare.
4067 * @param fCCmp Set for CCMP (default), clear for CCMN.
4068 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4069 */
4070DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpCmnReg(uint32_t iRegSrc1, uint32_t iRegSrc2, uint32_t fNzcv,
4071 ARMV8INSTRCOND enmCond, bool fCCmp = true, bool f64Bit = true)
4072{
4073 Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32); Assert(fNzcv < 16);
4074
4075 return ((uint32_t)f64Bit << 31)
4076 | ((uint32_t)fCCmp << 30)
4077 | UINT32_C(0x3a400000)
4078 | (iRegSrc2 << 16)
4079 | ((uint32_t)enmCond << 12)
4080 | (iRegSrc1 << 5)
4081 | fNzcv;
4082}
4083
4084/** CCMP w/ reg. */
4085DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpReg(uint32_t iRegSrc1, uint32_t iRegSrc2, uint32_t fNzcv,
4086 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4087{
4088 return Armv8A64MkInstrCCmpCmnReg(iRegSrc1, iRegSrc2, fNzcv, enmCond, true /*fCCmp*/, f64Bit);
4089}
4090
4091
4092/** CCMN w/ reg. */
4093DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmnReg(uint32_t iRegSrc1, uint32_t iRegSrc2, uint32_t fNzcv,
4094 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4095{
4096 return Armv8A64MkInstrCCmpCmnReg(iRegSrc1, iRegSrc2, fNzcv, enmCond, false /*fCCmp*/, f64Bit);
4097}
4098
4099
4100/**
4101 * A64: Encodes CCMP or CCMN with register and 5-bit immediate.
4102 *
4103 * @returns The encoded instruction.
4104 * @param iRegSrc The register. SP is NOT valid, but ZR is.
4105 * @param uImm5 The immediate, to compare iRegSrc with.
4106 * @param fNzcv The N, Z, C & V flags values to load if the condition
4107 * does not match. See RMA64_NZCV_F_XXX.
4108 * @param enmCond The condition guarding the compare.
4109 * @param fCCmp Set for CCMP (default), clear for CCMN.
4110 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4111 */
4112DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpCmnImm(uint32_t iRegSrc, uint32_t uImm5, uint32_t fNzcv, ARMV8INSTRCOND enmCond,
4113 bool fCCmp = true, bool f64Bit = true)
4114{
4115 Assert(iRegSrc < 32); Assert(uImm5 < 32); Assert(fNzcv < 16);
4116
4117 return ((uint32_t)f64Bit << 31)
4118 | ((uint32_t)fCCmp << 30)
4119 | UINT32_C(0x3a400800)
4120 | (uImm5 << 16)
4121 | ((uint32_t)enmCond << 12)
4122 | (iRegSrc << 5)
4123 | fNzcv;
4124}
4125
4126/** CCMP w/ immediate. */
4127DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpImm(uint32_t iRegSrc, uint32_t uImm5, uint32_t fNzcv,
4128 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4129{
4130 return Armv8A64MkInstrCCmpCmnImm(iRegSrc, uImm5, fNzcv, enmCond, true /*fCCmp*/, f64Bit);
4131}
4132
4133
4134/** CCMN w/ immediate. */
4135DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmnImm(uint32_t iRegSrc, uint32_t uImm5, uint32_t fNzcv,
4136 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4137{
4138 return Armv8A64MkInstrCCmpCmnImm(iRegSrc, uImm5, fNzcv, enmCond, false /*fCCmp*/, f64Bit);
4139}
4140
4141
4142/**
4143 * A64: Encodes CSEL, CSINC, CSINV and CSNEG (three registers)
4144 *
4145 * @returns The encoded instruction.
4146 * @param uOp Opcode bit 30.
4147 * @param uOp2 Opcode bits 11:10.
4148 * @param iRegResult The result register. SP is NOT valid, but ZR is.
4149 * @param iRegSrc1 The 1st source register. SP is NOT valid, but ZR is.
4150 * @param iRegSrc2 The 2nd source register. SP is NOT valid, but ZR is.
4151 * @param enmCond The condition guarding the compare.
4152 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4153 */
4154DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCondSelect(uint32_t uOp, uint32_t uOp2, uint32_t iRegResult, uint32_t iRegSrc1,
4155 uint32_t iRegSrc2, ARMV8INSTRCOND enmCond, bool f64Bit = true)
4156{
4157 Assert(uOp <= 1); Assert(uOp2 <= 1); Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
4158
4159 return ((uint32_t)f64Bit << 31)
4160 | (uOp << 30)
4161 | UINT32_C(0x1a800000)
4162 | (iRegSrc2 << 16)
4163 | ((uint32_t)enmCond << 12)
4164 | (uOp2 << 10)
4165 | (iRegSrc1 << 5)
4166 | iRegResult;
4167}
4168
4169
4170/** A64: Encodes CSEL.
4171 * @see Armv8A64MkInstrCondSelect for details. */
4172DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSel(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4173 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4174{
4175 return Armv8A64MkInstrCondSelect(0, 0, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4176}
4177
4178
4179/** A64: Encodes CSINC.
4180 * @see Armv8A64MkInstrCondSelect for details. */
4181DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSInc(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4182 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4183{
4184 return Armv8A64MkInstrCondSelect(0, 1, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4185}
4186
4187
4188/** A64: Encodes CSET.
4189 * @see Armv8A64MkInstrCondSelect for details. */
4190DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSet(uint32_t iRegResult, ARMV8INSTRCOND enmCond, bool f64Bit = true)
4191{
4192 Assert(enmCond != kArmv8InstrCond_Al && enmCond != kArmv8InstrCond_Al1);
4193 enmCond = (ARMV8INSTRCOND)((uint32_t)enmCond ^ 1);
4194 return Armv8A64MkInstrCSInc(iRegResult, ARMV8_A64_REG_XZR, ARMV8_A64_REG_XZR, enmCond, f64Bit);
4195}
4196
4197
4198/** A64: Encodes CSINV.
4199 * @see Armv8A64MkInstrCondSelect for details. */
4200DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSInv(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4201 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4202{
4203 return Armv8A64MkInstrCondSelect(1, 0, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4204}
4205
4206/** A64: Encodes CSETM.
4207 * @see Armv8A64MkInstrCondSelect for details. */
4208DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSetM(uint32_t iRegResult, ARMV8INSTRCOND enmCond, bool f64Bit = true)
4209{
4210 Assert(enmCond != kArmv8InstrCond_Al && enmCond != kArmv8InstrCond_Al1);
4211 enmCond = (ARMV8INSTRCOND)((uint32_t)enmCond ^ 1);
4212 return Armv8A64MkInstrCSInv(iRegResult, ARMV8_A64_REG_XZR, ARMV8_A64_REG_XZR, enmCond, f64Bit);
4213}
4214
4215
4216/** A64: Encodes CSNEG.
4217 * @see Armv8A64MkInstrCondSelect for details. */
4218DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSNeg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4219 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4220{
4221 return Armv8A64MkInstrCondSelect(1, 1, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4222}
4223
4224
4225/**
4226 * A64: Encodes REV instruction.
4227 *
4228 * @returns The encoded instruction.
4229 * @param iRegDst The destination register. SP is NOT valid.
4230 * @param iRegSrc The source register. SP is NOT valid, but ZR is
4231 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4232 */
4233DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRev(uint32_t iRegDst, uint32_t iRegSrc, bool f64Bit = true)
4234{
4235 Assert(iRegDst < 32); Assert(iRegSrc < 32);
4236
4237 return ((uint32_t)f64Bit << 31)
4238 | UINT32_C(0x5ac00800)
4239 | ((uint32_t)f64Bit << 10)
4240 | (iRegSrc << 5)
4241 | iRegDst;
4242}
4243
4244
4245/**
4246 * A64: Encodes REV16 instruction.
4247 *
4248 * @returns The encoded instruction.
4249 * @param iRegDst The destination register. SP is NOT valid.
4250 * @param iRegSrc The source register. SP is NOT valid, but ZR is
4251 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4252 */
4253DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRev16(uint32_t iRegDst, uint32_t iRegSrc, bool f64Bit = true)
4254{
4255 Assert(iRegDst < 32); Assert(iRegSrc < 32);
4256
4257 return ((uint32_t)f64Bit << 31)
4258 | UINT32_C(0x5ac00400)
4259 | (iRegSrc << 5)
4260 | iRegDst;
4261}
4262
4263
4264/**
4265 * A64: Encodes SETF8 & SETF16.
4266 *
4267 * @returns The encoded instruction.
4268 * @param iRegResult The register holding the result. SP is NOT valid.
4269 * @param f16Bit Set for SETF16, clear for SETF8.
4270 */
4271DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSetF8SetF16(uint32_t iRegResult, bool f16Bit)
4272{
4273 Assert(iRegResult < 32);
4274
4275 return UINT32_C(0x3a00080d)
4276 | ((uint32_t)f16Bit << 14)
4277 | (iRegResult << 5);
4278}
4279
4280
4281/**
4282 * A64: Encodes RMIF.
4283 *
4284 * @returns The encoded instruction.
4285 * @param iRegSrc The source register to get flags from.
4286 * @param cRotateRight The right rotate count (LSB bit offset).
4287 * @param fMask Mask of which flag bits to set:
4288 * - bit 0: V
4289 * - bit 1: C
4290 * - bit 2: Z
4291 * - bit 3: N
4292 */
4293DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRmif(uint32_t iRegSrc, uint32_t cRotateRight, uint32_t fMask)
4294{
4295 Assert(iRegSrc < 32); Assert(cRotateRight < 64); Assert(fMask <= 0xf);
4296
4297 return UINT32_C(0xba000400)
4298 | (cRotateRight << 15)
4299 | (iRegSrc << 5)
4300 | fMask;
4301}
4302
4303
4304/**
4305 * A64: Encodes MRS (for reading a system register into a GPR).
4306 *
4307 * @returns The encoded instruction.
4308 * @param iRegDst The register to put the result into. SP is NOT valid.
4309 * @param idSysReg The system register ID (ARMV8_AARCH64_SYSREG_XXX),
4310 * IPRT specific format, of the register to read.
4311 */
4312DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMrs(uint32_t iRegDst, uint32_t idSysReg)
4313{
4314 Assert(iRegDst < 32);
4315 Assert(idSysReg < RT_BIT_32(16) && (idSysReg & RT_BIT_32(15)));
4316
4317 /* Note. The top bit of idSysReg must always be set and is also set in
4318 0xd5300000, otherwise we'll be encoding a different instruction. */
4319 return UINT32_C(0xd5300000)
4320 | (idSysReg << 5)
4321 | iRegDst;
4322}
4323
4324
4325/**
4326 * A64: Encodes MSR (for writing a GPR to a system register).
4327 *
4328 * @returns The encoded instruction.
4329 * @param iRegSrc The register which value to write. SP is NOT valid.
4330 * @param idSysReg The system register ID (ARMV8_AARCH64_SYSREG_XXX),
4331 * IPRT specific format, of the register to write.
4332 */
4333DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMsr(uint32_t iRegSrc, uint32_t idSysReg)
4334{
4335 Assert(iRegSrc < 32);
4336 Assert(idSysReg < RT_BIT_32(16) && (idSysReg & RT_BIT_32(15)));
4337
4338 /* Note. The top bit of idSysReg must always be set and is also set in
4339 0xd5100000, otherwise we'll be encoding a different instruction. */
4340 return UINT32_C(0xd5100000)
4341 | (idSysReg << 5)
4342 | iRegSrc;
4343}
4344
4345
4346/** @} */
4347
4348
4349/** @defgroup grp_rt_armv8_mkinstr_vec Vector Instruction Encoding Helpers
4350 * @ingroup grp_rt_armv8_mkinstr
4351 *
4352 * A few inlined functions and macros for assisting in encoding common ARMv8
4353 * Neon/SIMD instructions.
4354 *
4355 * @{ */
4356
4357/**
4358 * A64: Encodes ORR (vector, register).
4359 *
4360 * @returns The encoded instruction.
4361 * @param iVecRegDst The vector register to put the result into.
4362 * @param iVecRegSrc1 The 1st source register.
4363 * @param iVecRegSrc2 The 2nd source register.
4364 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
4365 * or just the low 64-bit (false).
4366 */
4367DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrOrr(uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
4368 bool f128Bit = true)
4369{
4370 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
4371
4372 return UINT32_C(0x0ea01c00)
4373 | ((uint32_t)f128Bit << 30)
4374 | (iVecRegSrc2 << 16)
4375 | (iVecRegSrc1 << 5)
4376 | iVecRegDst;
4377}
4378
4379
4380/**
4381 * A64: Encodes EOR (vector, register).
4382 *
4383 * @returns The encoded instruction.
4384 * @param iVecRegDst The vector register to put the result into.
4385 * @param iVecRegSrc1 The 1st source register.
4386 * @param iVecRegSrc2 The 2nd source register.
4387 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
4388 * or just the low 64-bit (false).
4389 */
4390DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrEor(uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
4391 bool f128Bit = true)
4392{
4393 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
4394
4395 return UINT32_C(0x2e201c00)
4396 | ((uint32_t)f128Bit << 30)
4397 | (iVecRegSrc2 << 16)
4398 | (iVecRegSrc1 << 5)
4399 | iVecRegDst;
4400}
4401
4402
4403/** Armv8 UMOV/INS vector element size. */
4404typedef enum ARMV8INSTRUMOVINSSZ
4405{
4406 kArmv8InstrUmovInsSz_U8 = 0, /**< Byte. */
4407 kArmv8InstrUmovInsSz_U16 = 1, /**< Halfword. */
4408 kArmv8InstrUmovInsSz_U32 = 2, /**< 32-bit. */
4409 kArmv8InstrUmovInsSz_U64 = 3 /**< 64-bit (only valid when the destination is a 64-bit register. */
4410} ARMV8INSTRUMOVINSSZ;
4411
4412
4413/**
4414 * A64: Encodes UMOV (vector, register).
4415 *
4416 * @returns The encoded instruction.
4417 * @param iRegDst The register to put the result into.
4418 * @param iVecRegSrc The vector source register.
4419 * @param idxElem The element index.
4420 * @param enmSz Element size of the source vector register.
4421 * @param fDst64Bit Flag whether the destination register is 64-bit (true) or 32-bit (false).
4422 */
4423DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrUmov(uint32_t iRegDst, uint32_t iVecRegSrc, uint8_t idxElem,
4424 ARMV8INSTRUMOVINSSZ enmSz = kArmv8InstrUmovInsSz_U64, bool fDst64Bit = true)
4425{
4426 Assert(iRegDst < 32); Assert(iVecRegSrc < 32);
4427 Assert((fDst64Bit && enmSz == kArmv8InstrUmovInsSz_U64) || (!fDst64Bit && enmSz != kArmv8InstrUmovInsSz_U64));
4428 Assert( (enmSz == kArmv8InstrUmovInsSz_U8 && idxElem < 16)
4429 || (enmSz == kArmv8InstrUmovInsSz_U16 && idxElem < 8)
4430 || (enmSz == kArmv8InstrUmovInsSz_U32 && idxElem < 4)
4431 || (enmSz == kArmv8InstrUmovInsSz_U64 && idxElem < 2));
4432
4433 return UINT32_C(0x0e003c00)
4434 | ((uint32_t)fDst64Bit << 30)
4435 | ((uint32_t)idxElem << (16 + enmSz + 1))
4436 | (RT_BIT_32(enmSz) << 16)
4437 | (iVecRegSrc << 5)
4438 | iRegDst;
4439}
4440
4441
4442/**
4443 * A64: Encodes INS (vector, register).
4444 *
4445 * @returns The encoded instruction.
4446 * @param iVecRegDst The vector register to put the result into.
4447 * @param iRegSrc The source register.
4448 * @param idxElem The element index for the destination.
4449 * @param enmSz Element size of the source vector register.
4450 *
4451 * @note This instruction assumes a 32-bit W<n> register for all non 64bit vector sizes.
4452 */
4453DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrIns(uint32_t iVecRegDst, uint32_t iRegSrc, uint8_t idxElem,
4454 ARMV8INSTRUMOVINSSZ enmSz = kArmv8InstrUmovInsSz_U64)
4455{
4456 Assert(iRegSrc < 32); Assert(iVecRegDst < 32);
4457 Assert( (enmSz == kArmv8InstrUmovInsSz_U8 && idxElem < 16)
4458 || (enmSz == kArmv8InstrUmovInsSz_U16 && idxElem < 8)
4459 || (enmSz == kArmv8InstrUmovInsSz_U32 && idxElem < 4)
4460 || (enmSz == kArmv8InstrUmovInsSz_U64 && idxElem < 2));
4461
4462 return UINT32_C(0x4e001c00)
4463 | ((uint32_t)idxElem << (16 + enmSz + 1))
4464 | (RT_BIT_32(enmSz) << 16)
4465 | (iRegSrc << 5)
4466 | iVecRegDst;
4467}
4468
4469
4470/**
4471 * A64: Encodes DUP (vector, register).
4472 *
4473 * @returns The encoded instruction.
4474 * @param iVecRegDst The vector register to put the result into.
4475 * @param iRegSrc The source register (ZR is valid).
4476 * @param enmSz Element size of the source vector register.
4477 * @param f128Bit Flag whether the instruction operates on the whole 128-bit of the vector register (true) or
4478 * just the low 64-bit (false).
4479 *
4480 * @note This instruction assumes a 32-bit W<n> register for all non 64bit vector sizes.
4481 */
4482DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrDup(uint32_t iVecRegDst, uint32_t iRegSrc, ARMV8INSTRUMOVINSSZ enmSz,
4483 bool f128Bit = true)
4484{
4485 Assert(iRegSrc < 32); Assert(iVecRegDst < 32);
4486 Assert( (enmSz == kArmv8InstrUmovInsSz_U8)
4487 || (enmSz == kArmv8InstrUmovInsSz_U16)
4488 || (enmSz == kArmv8InstrUmovInsSz_U32)
4489 || (enmSz == kArmv8InstrUmovInsSz_U64));
4490
4491 return UINT32_C(0x0e000c00)
4492 | ((uint32_t)f128Bit << 30)
4493 | (RT_BIT_32(enmSz) << 16)
4494 | (iRegSrc << 5)
4495 | iVecRegDst;
4496}
4497
4498
4499/** Armv8 vector compare to zero vector element size. */
4500typedef enum ARMV8INSTRVECCMPZEROSZ
4501{
4502 kArmv8InstrCmpZeroSz_S8 = 0, /**< Byte. */
4503 kArmv8InstrCmpZeroSz_S16 = 1, /**< Halfword. */
4504 kArmv8InstrCmpZeroSz_S32 = 2, /**< 32-bit. */
4505 kArmv8InstrCmpZeroSz_S64 = 3 /**< 64-bit. */
4506} ARMV8INSTRVECCMPZEROSZ;
4507
4508
4509/** Armv8 vector compare to zero vector operation. */
4510typedef enum ARMV8INSTRVECCMPZEROOP
4511{
4512 kArmv8InstrCmpZeroOp_Gt = 0, /**< Greater than. */
4513 kArmv8InstrCmpZeroOp_Ge = RT_BIT_32(29), /**< Greater than or equal to. */
4514 kArmv8InstrCmpZeroOp_Eq = RT_BIT_32(12), /**< Equal to. */
4515 kArmv8InstrCmpZeroOp_Le = RT_BIT_32(29) | RT_BIT_32(12) /**< Lower than or equal to. */
4516} ARMV8INSTRVECCMPZEROOP;
4517
4518
4519/**
4520 * A64: Encodes CMGT, CMGE, CMEQ or CMLE against zero (vector, register).
4521 *
4522 * @returns The encoded instruction.
4523 * @param iVecRegDst The vector register to put the result into.
4524 * @param iVecRegSrc The vector source register.
4525 * @param enmSz Vector element size.
4526 * @param enmOp The compare operation against to encode.
4527 */
4528DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCmpToZero(uint32_t iVecRegDst, uint32_t iVecRegSrc, ARMV8INSTRVECCMPZEROSZ enmSz,
4529 ARMV8INSTRVECCMPZEROOP enmOp)
4530{
4531 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
4532
4533 return UINT32_C(0x5e208800)
4534 | ((uint32_t)enmSz << 22)
4535 | (RT_BIT_32(enmSz) << 16)
4536 | (iVecRegSrc << 5)
4537 | iVecRegDst
4538 | (uint32_t)enmOp;
4539}
4540
4541
4542/**
4543 * A64: Encodes CNT (vector, register).
4544 *
4545 * @returns The encoded instruction.
4546 * @param iVecRegDst The vector register to put the result into.
4547 * @param iVecRegSrc The vector source register.
4548 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
4549 * or just the low 64-bit (false).
4550 */
4551DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCnt(uint32_t iVecRegDst, uint32_t iVecRegSrc, bool f128Bit = true)
4552{
4553 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
4554
4555 return UINT32_C(0x0e205800)
4556 | ((uint32_t)f128Bit << 30)
4557 | (iVecRegSrc << 5)
4558 | iVecRegDst;
4559}
4560
4561
4562/** Armv8 vector unsigned sum long across vector element size. */
4563typedef enum ARMV8INSTRVECUADDLVSZ
4564{
4565 kArmv8InstrUAddLVSz_8B = 0, /**< 8 x 8-bit. */
4566 kArmv8InstrUAddLVSz_16B = RT_BIT_32(30), /**< 16 x 8-bit. */
4567 kArmv8InstrUAddLVSz_4H = 1, /**< 4 x 16-bit. */
4568 kArmv8InstrUAddLVSz_8H = RT_BIT_32(30) | 1, /**< 8 x 16-bit. */
4569 kArmv8InstrUAddLVSz_4S = RT_BIT_32(30) | 2 /**< 4 x 32-bit. */
4570} ARMV8INSTRVECUADDLVSZ;
4571
4572
4573/**
4574 * A64: Encodes UADDLV (vector, register).
4575 *
4576 * @returns The encoded instruction.
4577 * @param iVecRegDst The vector register to put the result into.
4578 * @param iVecRegSrc The vector source register.
4579 * @param enmSz Element size.
4580 */
4581DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrUAddLV(uint32_t iVecRegDst, uint32_t iVecRegSrc, ARMV8INSTRVECUADDLVSZ enmSz)
4582{
4583 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
4584
4585 return UINT32_C(0x2e303800)
4586 | ((uint32_t)enmSz)
4587 | (iVecRegSrc << 5)
4588 | iVecRegDst;
4589}
4590
4591
4592/** @} */
4593
4594#endif /* !dtrace && __cplusplus */
4595
4596/** @} */
4597
4598#endif /* !IPRT_INCLUDED_armv8_h */
4599
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette