mirror of
git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2025-05-24 10:39:52 +00:00
powerpc: Fix Unaligned Loads and Stores
This patch modifies the unaligned access routines of the sstep.c module so that it properly reverses the bytes of storage operands in the little endian kernel kernel. This is implemented by breaking an unaligned little endian access into a combination of single byte accesses plus an overal byte reversal operation. Signed-off-by: Tom Musta <tmusta@gmail.com> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
This commit is contained in:
parent
630c8a5fc9
commit
6506b4718b
1 changed files with 45 additions and 0 deletions
|
@ -212,11 +212,19 @@ static int __kprobes read_mem_unaligned(unsigned long *dest, unsigned long ea,
|
|||
{
|
||||
int err;
|
||||
unsigned long x, b, c;
|
||||
#ifdef __LITTLE_ENDIAN__
|
||||
int len = nb; /* save a copy of the length for byte reversal */
|
||||
#endif
|
||||
|
||||
/* unaligned, do this in pieces */
|
||||
x = 0;
|
||||
for (; nb > 0; nb -= c) {
|
||||
#ifdef __LITTLE_ENDIAN__
|
||||
c = 1;
|
||||
#endif
|
||||
#ifdef __BIG_ENDIAN__
|
||||
c = max_align(ea);
|
||||
#endif
|
||||
if (c > nb)
|
||||
c = max_align(nb);
|
||||
err = read_mem_aligned(&b, ea, c);
|
||||
|
@ -225,7 +233,24 @@ static int __kprobes read_mem_unaligned(unsigned long *dest, unsigned long ea,
|
|||
x = (x << (8 * c)) + b;
|
||||
ea += c;
|
||||
}
|
||||
#ifdef __LITTLE_ENDIAN__
|
||||
switch (len) {
|
||||
case 2:
|
||||
*dest = byterev_2(x);
|
||||
break;
|
||||
case 4:
|
||||
*dest = byterev_4(x);
|
||||
break;
|
||||
#ifdef __powerpc64__
|
||||
case 8:
|
||||
*dest = byterev_8(x);
|
||||
break;
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
#ifdef __BIG_ENDIAN__
|
||||
*dest = x;
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -273,9 +298,29 @@ static int __kprobes write_mem_unaligned(unsigned long val, unsigned long ea,
|
|||
int err;
|
||||
unsigned long c;
|
||||
|
||||
#ifdef __LITTLE_ENDIAN__
|
||||
switch (nb) {
|
||||
case 2:
|
||||
val = byterev_2(val);
|
||||
break;
|
||||
case 4:
|
||||
val = byterev_4(val);
|
||||
break;
|
||||
#ifdef __powerpc64__
|
||||
case 8:
|
||||
val = byterev_8(val);
|
||||
break;
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
/* unaligned or little-endian, do this in pieces */
|
||||
for (; nb > 0; nb -= c) {
|
||||
#ifdef __LITTLE_ENDIAN__
|
||||
c = 1;
|
||||
#endif
|
||||
#ifdef __BIG_ENDIAN__
|
||||
c = max_align(ea);
|
||||
#endif
|
||||
if (c > nb)
|
||||
c = max_align(nb);
|
||||
err = write_mem_aligned(val >> (nb - c) * 8, ea, c);
|
||||
|
|
Loading…
Add table
Reference in a new issue