Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 | /* bitops.S: Low level assembler bit operations. * * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu) */ #include <asm/cprefix.h> #include <asm/ptrace.h> #include <asm/psr.h> .text .align 4 /* Take bits in %g2 and set them in word at %g1, * return whether bits were set in original value * in %g2. %g4 holds value to restore into %o7 * in delay slot of jmpl return, %g3 + %g5 + %g7 can be * used as temporaries and thus is considered clobbered * by all callers. */ .globl ___set_bit ___set_bit: rd %psr, %g3 nop; nop; nop; or %g3, PSR_PIL, %g5 wr %g5, 0x0, %psr nop; nop; nop #ifdef __SMP__ set C_LABEL(bitops_spinlock), %g5 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. orcc %g7, 0x0, %g0 ! Did we get it? bne 2b ! Nope... #endif ld [%g1], %g7 or %g7, %g2, %g5 and %g7, %g2, %g2 #ifdef __SMP__ st %g5, [%g1] set C_LABEL(bitops_spinlock), %g5 stb %g0, [%g5] #else st %g5, [%g1] #endif wr %g3, 0x0, %psr nop; nop; nop jmpl %o7, %g0 mov %g4, %o7 /* Same as above, but clears the bits from %g2 instead. */ .globl ___clear_bit ___clear_bit: rd %psr, %g3 nop; nop; nop or %g3, PSR_PIL, %g5 wr %g5, 0x0, %psr nop; nop; nop #ifdef __SMP__ set C_LABEL(bitops_spinlock), %g5 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. orcc %g7, 0x0, %g0 ! Did we get it? bne 2b ! Nope... #endif ld [%g1], %g7 andn %g7, %g2, %g5 and %g7, %g2, %g2 #ifdef __SMP__ st %g5, [%g1] set C_LABEL(bitops_spinlock), %g5 stb %g0, [%g5] #else st %g5, [%g1] #endif wr %g3, 0x0, %psr nop; nop; nop jmpl %o7, %g0 mov %g4, %o7 /* Same thing again, but this time toggles the bits from %g2. */ .globl ___change_bit ___change_bit: rd %psr, %g3 nop; nop; nop or %g3, PSR_PIL, %g5 wr %g5, 0x0, %psr nop; nop; nop #ifdef __SMP__ set C_LABEL(bitops_spinlock), %g5 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. orcc %g7, 0x0, %g0 ! Did we get it? bne 2b ! Nope... #endif ld [%g1], %g7 xor %g7, %g2, %g5 and %g7, %g2, %g2 #ifdef __SMP__ st %g5, [%g1] set C_LABEL(bitops_spinlock), %g5 stb %g0, [%g5] #else st %g5, [%g1] #endif wr %g3, 0x0, %psr nop; nop; nop jmpl %o7, %g0 mov %g4, %o7 /* Now the little endian versions. */ .globl ___set_le_bit ___set_le_bit: rd %psr, %g3 nop; nop; nop or %g3, PSR_PIL, %g5 wr %g5, 0x0, %psr nop; nop; nop #ifdef __SMP__ set C_LABEL(bitops_spinlock), %g5 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. orcc %g7, 0x0, %g0 ! Did we get it? bne 2b ! Nope... #endif ldub [%g1], %g7 or %g7, %g2, %g5 and %g7, %g2, %g2 #ifdef __SMP__ stb %g5, [%g1] set C_LABEL(bitops_spinlock), %g5 stb %g0, [%g5] #else stb %g5, [%g1] #endif wr %g3, 0x0, %psr nop; nop; nop jmpl %o7, %g0 mov %g4, %o7 .globl ___clear_le_bit ___clear_le_bit: rd %psr, %g3 nop; nop; nop or %g3, PSR_PIL, %g5 wr %g5, 0x0, %psr nop; nop; nop #ifdef __SMP__ set C_LABEL(bitops_spinlock), %g5 2: ldstub [%g5], %g7 ! Spin on the byte lock for SMP. orcc %g7, 0x0, %g0 ! Did we get it? bne 2b ! Nope... #endif ldub [%g1], %g7 andn %g7, %g2, %g5 and %g7, %g2, %g2 #ifdef __SMP__ stb %g5, [%g1] set C_LABEL(bitops_spinlock), %g5 stb %g0, [%g5] #else stb %g5, [%g1] #endif wr %g3, 0x0, %psr nop; nop; nop jmpl %o7, %g0 mov %g4, %o7 |