Loading...
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 | /* * This file is subject to the terms and conditions of the GNU General Public * License. See the file "COPYING" in the main directory of this archive * for more details. * * Copyright (C) 1996, 1999, 2000, 2001 by Ralf Baechle * Copyright (C) 1999, 2000, 2001 Silicon Graphics, Inc. */ #ifndef _ASM_UNALIGNED_H #define _ASM_UNALIGNED_H extern void __get_unaligned_bad_length(void); extern void __put_unaligned_bad_length(void); /* * Load quad unaligned. */ extern inline unsigned long __ldq_u(const unsigned long * __addr) { unsigned long __res; __asm__("uld\t%0,%1" : "=&r" (__res) : "m" (*__addr)); return __res; } /* * Load long unaligned. */ extern inline unsigned long __ldl_u(const unsigned int * __addr) { unsigned long __res; __asm__("ulw\t%0,%1" : "=&r" (__res) : "m" (*__addr)); return __res; } /* * Load word unaligned. */ extern inline unsigned long __ldw_u(const unsigned short * __addr) { unsigned long __res; __asm__("ulh\t%0,%1" : "=&r" (__res) : "m" (*__addr)); return __res; } /* * Store quad ununaligned. */ extern inline void __stq_u(unsigned long __val, unsigned long * __addr) { __asm__("usd\t%1, %0" : "=m" (*__addr) : "r" (__val)); } /* * Store long ununaligned. */ extern inline void __stl_u(unsigned long __val, unsigned int * __addr) { __asm__("usw\t%1, %0" : "=m" (*__addr) : "r" (__val)); } /* * Store word ununaligned. */ extern inline void __stw_u(unsigned long __val, unsigned short * __addr) { __asm__("ush\t%1, %0" : "=m" (*__addr) : "r" (__val)); } /* * get_unaligned - get value from possibly mis-aligned location * @ptr: pointer to value * * This macro should be used for accessing values larger in size than * single bytes at locations that are expected to be improperly aligned, * e.g. retrieving a u16 value from a location not u16-aligned. * * Note that unaligned accesses can be very expensive on some architectures. */ #define get_unaligned(ptr) \ ({ \ __typeof__(*(ptr)) __val; \ \ switch (sizeof(*(ptr))) { \ case 1: \ __val = *(const unsigned char *)(ptr); \ break; \ case 2: \ __val = __ldw_u((const unsigned short *)(ptr)); \ break; \ case 4: \ __val = __ldl_u((const unsigned int *)(ptr)); \ break; \ case 8: \ __val = __ldq_u((const unsigned long *)(ptr)); \ break; \ default: \ __get_unaligned_bad_length(); \ break; \ } \ \ __val; \ }) /* * put_unaligned - put value to a possibly mis-aligned location * @val: value to place * @ptr: pointer to location * * This macro should be used for placing values larger in size than * single bytes at locations that are expected to be improperly aligned, * e.g. writing a u16 value to a location not u16-aligned. * * Note that unaligned accesses can be very expensive on some architectures. */ #define put_unaligned(val,ptr) \ do { \ switch (sizeof(*(ptr))) { \ case 1: \ *(unsigned char *)(ptr) = (val); \ break; \ case 2: \ __stw_u((val), (unsigned short *)(ptr)); \ break; \ case 4: \ __stl_u((val), (unsigned int *)(ptr)); \ break; \ case 8: \ __stq_u((val), (unsigned long long *)(ptr)); \ break; \ default: \ __put_unaligned_bad_length(); \ break; \ } \ } while(0) #endif /* _ASM_UNALIGNED_H */ |