Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 | /* SPDX-License-Identifier: GPL-2.0-only */ /* * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com) * * Vineetg: August 2010: From Android kernel work */ #ifndef _ASM_FUTEX_H #define _ASM_FUTEX_H #include <linux/futex.h> #include <linux/preempt.h> #include <linux/uaccess.h> #include <asm/errno.h> #ifdef CONFIG_ARC_HAS_LLSC #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\ \ smp_mb(); \ __asm__ __volatile__( \ "1: llock %1, [%2] \n" \ insn "\n" \ "2: scond %0, [%2] \n" \ " bnz 1b \n" \ " mov %0, 0 \n" \ "3: \n" \ " .section .fixup,\"ax\" \n" \ " .align 4 \n" \ "4: mov %0, %4 \n" \ " j 3b \n" \ " .previous \n" \ " .section __ex_table,\"a\" \n" \ " .align 4 \n" \ " .word 1b, 4b \n" \ " .word 2b, 4b \n" \ " .previous \n" \ \ : "=&r" (ret), "=&r" (oldval) \ : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \ : "cc", "memory"); \ smp_mb() \ #else /* !CONFIG_ARC_HAS_LLSC */ #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\ \ smp_mb(); \ __asm__ __volatile__( \ "1: ld %1, [%2] \n" \ insn "\n" \ "2: st %0, [%2] \n" \ " mov %0, 0 \n" \ "3: \n" \ " .section .fixup,\"ax\" \n" \ " .align 4 \n" \ "4: mov %0, %4 \n" \ " j 3b \n" \ " .previous \n" \ " .section __ex_table,\"a\" \n" \ " .align 4 \n" \ " .word 1b, 4b \n" \ " .word 2b, 4b \n" \ " .previous \n" \ \ : "=&r" (ret), "=&r" (oldval) \ : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \ : "cc", "memory"); \ smp_mb() \ #endif static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr) { int oldval = 0, ret; if (!access_ok(uaddr, sizeof(u32))) return -EFAULT; #ifndef CONFIG_ARC_HAS_LLSC preempt_disable(); /* to guarantee atomic r-m-w of futex op */ #endif switch (op) { case FUTEX_OP_SET: __futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg); break; case FUTEX_OP_ADD: /* oldval = *uaddr; *uaddr += oparg ; ret = *uaddr */ __futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg); break; case FUTEX_OP_OR: __futex_atomic_op("or %0, %1, %3", ret, oldval, uaddr, oparg); break; case FUTEX_OP_ANDN: __futex_atomic_op("bic %0, %1, %3", ret, oldval, uaddr, oparg); break; case FUTEX_OP_XOR: __futex_atomic_op("xor %0, %1, %3", ret, oldval, uaddr, oparg); break; default: ret = -ENOSYS; } #ifndef CONFIG_ARC_HAS_LLSC preempt_enable(); #endif if (!ret) *oval = oldval; return ret; } /* * cmpxchg of futex (pagefaults disabled by caller) * Return 0 for success, -EFAULT otherwise */ static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 expval, u32 newval) { int ret = 0; u32 existval; if (!access_ok(uaddr, sizeof(u32))) return -EFAULT; #ifndef CONFIG_ARC_HAS_LLSC preempt_disable(); /* to guarantee atomic r-m-w of futex op */ #endif smp_mb(); __asm__ __volatile__( #ifdef CONFIG_ARC_HAS_LLSC "1: llock %1, [%4] \n" " brne %1, %2, 3f \n" "2: scond %3, [%4] \n" " bnz 1b \n" #else "1: ld %1, [%4] \n" " brne %1, %2, 3f \n" "2: st %3, [%4] \n" #endif "3: \n" " .section .fixup,\"ax\" \n" "4: mov %0, %5 \n" " j 3b \n" " .previous \n" " .section __ex_table,\"a\" \n" " .align 4 \n" " .word 1b, 4b \n" " .word 2b, 4b \n" " .previous\n" : "+&r"(ret), "=&r"(existval) : "r"(expval), "r"(newval), "r"(uaddr), "ir"(-EFAULT) : "cc", "memory"); smp_mb(); #ifndef CONFIG_ARC_HAS_LLSC preempt_enable(); #endif *uval = existval; return ret; } #endif |