Loading...
/* SPDX-License-Identifier: GPL-2.0-only */ /* * Copyright (C) 2012 ARM Ltd. */ #include <linux/linkage.h> #include <asm/asm-uaccess.h> #include <asm/assembler.h> #include <asm/cache.h> /* * Copy from user space to a kernel buffer (alignment handled by the hardware) * * Parameters: * x0 - to * x1 - from * x2 - n * Returns: * x0 - bytes not copied */ .macro ldrb1 ptr, regB, val uao_user_alternative 9998f, ldrb, ldtrb, \ptr, \regB, \val .endm .macro strb1 ptr, regB, val strb \ptr, [\regB], \val .endm .macro ldrh1 ptr, regB, val uao_user_alternative 9998f, ldrh, ldtrh, \ptr, \regB, \val .endm .macro strh1 ptr, regB, val strh \ptr, [\regB], \val .endm .macro ldr1 ptr, regB, val uao_user_alternative 9998f, ldr, ldtr, \ptr, \regB, \val .endm .macro str1 ptr, regB, val str \ptr, [\regB], \val .endm .macro ldp1 ptr, regB, regC, val uao_ldp 9998f, \ptr, \regB, \regC, \val .endm .macro stp1 ptr, regB, regC, val stp \ptr, \regB, [\regC], \val .endm end .req x5 ENTRY(__arch_copy_from_user) add end, x0, x2 #include "copy_template.S" mov x0, #0 // Nothing to copy ret ENDPROC(__arch_copy_from_user) EXPORT_SYMBOL(__arch_copy_from_user) .section .fixup,"ax" .align 2 9998: sub x0, end, dst // bytes not copied ret .previous |