mirror of
https://github.com/torvalds/linux.git
synced 2024-11-19 10:31:48 +00:00
159fd7b8d3
Writes to ZCR_EL1 are self-synchronising, and so may be expensive in typical implementations. This patch adopts the approach used for costly system register writes elsewhere in the kernel: the system register write is suppressed if it would not change the stored value. Since the common case will be that of switching between tasks that use the same vector length as one another, prediction hit rates on the conditional branch should be reasonably good, with lower expected amortised cost than the unconditional execution of a heavyweight self-synchronising instruction. Signed-off-by: Dave Martin <Dave.Martin@arm.com> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
61 lines
1.3 KiB
ArmAsm
61 lines
1.3 KiB
ArmAsm
/*
|
|
* FP/SIMD state saving and restoring
|
|
*
|
|
* Copyright (C) 2012 ARM Ltd.
|
|
* Author: Catalin Marinas <catalin.marinas@arm.com>
|
|
*
|
|
* This program is free software; you can redistribute it and/or modify
|
|
* it under the terms of the GNU General Public License version 2 as
|
|
* published by the Free Software Foundation.
|
|
*
|
|
* This program is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
* GNU General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU General Public License
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
*/
|
|
|
|
#include <linux/linkage.h>
|
|
|
|
#include <asm/assembler.h>
|
|
#include <asm/fpsimdmacros.h>
|
|
|
|
/*
|
|
* Save the FP registers.
|
|
*
|
|
* x0 - pointer to struct fpsimd_state
|
|
*/
|
|
ENTRY(fpsimd_save_state)
|
|
fpsimd_save x0, 8
|
|
ret
|
|
ENDPROC(fpsimd_save_state)
|
|
|
|
/*
|
|
* Load the FP registers.
|
|
*
|
|
* x0 - pointer to struct fpsimd_state
|
|
*/
|
|
ENTRY(fpsimd_load_state)
|
|
fpsimd_restore x0, 8
|
|
ret
|
|
ENDPROC(fpsimd_load_state)
|
|
|
|
#ifdef CONFIG_ARM64_SVE
|
|
ENTRY(sve_save_state)
|
|
sve_save 0, x1, 2
|
|
ret
|
|
ENDPROC(sve_save_state)
|
|
|
|
ENTRY(sve_load_state)
|
|
sve_load 0, x1, x2, 3, x4
|
|
ret
|
|
ENDPROC(sve_load_state)
|
|
|
|
ENTRY(sve_get_vl)
|
|
_sve_rdvl 0, 1
|
|
ret
|
|
ENDPROC(sve_get_vl)
|
|
#endif /* CONFIG_ARM64_SVE */
|