Commit 46fcc7f0 authored by Michael Weiser's avatar Michael Weiser Committed by Niels Möller
Browse files

arm: Fix memxor for non-armv6+ big-endian systems



ARM assembly adjustments for big-endian systems contained armv6+-only
instructions (rev) in generic arm memxor code. Replace those with an
actual conversion of the leftover byte store routines for big-endian
systems. This also provides a slight optimisation by removing the
additional instruction as well as increased symmetry between little- and
big-endian implementations.
Signed-off-by: Michael Weiser's avatarMichael Weiser <michael.weiser@gmx.de>
parent dedba6ff
......@@ -138,24 +138,25 @@ PROLOGUE(nettle_memxor)
adds N, #8
beq .Lmemxor_odd_done
C We have TNC/8 left-over bytes in r4, high end
C We have TNC/8 left-over bytes in r4, high end on LE and low end on
C BE, excess bits to be discarded by alignment adjustment at the other
S0ADJ r4, CNT
C now byte-aligned at low end on LE and high end on BE
ldr r3, [DST]
eor r3, r4
C memxor_leftover does an LSB store
C so we need to reverse if actually BE
IF_BE(< rev r3, r3>)
pop {r4,r5,r6}
C Store bytes, one by one.
.Lmemxor_leftover:
C bring uppermost byte down for saving while preserving lower ones
IF_BE(< ror r3, #24>)
strb r3, [DST], #+1
subs N, #1
beq .Lmemxor_done
subs TNC, #8
lsr r3, #8
C bring down next byte, no need to preserve
IF_LE(< lsr r3, #8>)
bne .Lmemxor_leftover
b .Lmemxor_bytes
.Lmemxor_odd_done:
......
......@@ -159,21 +159,23 @@ PROLOGUE(nettle_memxor3)
adds N, #8
beq .Lmemxor3_done
C Leftover bytes in r4, low end
C Leftover bytes in r4, low end on LE and high end on BE before
C preparatory alignment correction
ldr r5, [AP, #-4]
eor r4, r5, r4, S1ADJ ATNC
C leftover does an LSB store
C so we need to reverse if actually BE
IF_BE(< rev r4, r4>)
C now byte-aligned in high end on LE and low end on BE because we're
C working downwards in saving the very first bytes of the buffer
.Lmemxor3_au_leftover:
C Store a byte at a time
ror r4, #24
C bring uppermost byte down for saving while preserving lower ones
IF_LE(< ror r4, #24>)
strb r4, [DST, #-1]!
subs N, #1
beq .Lmemxor3_done
subs ACNT, #8
C bring down next byte, no need to preserve
IF_BE(< lsr r4, #8>)
sub AP, #1
bne .Lmemxor3_au_leftover
b .Lmemxor3_bytes
......@@ -273,18 +275,21 @@ IF_BE(< rev r4, r4>)
adds N, #8
beq .Lmemxor3_done
C leftover does an LSB store
C so we need to reverse if actually BE
IF_BE(< rev r4, r4>)
C Leftover bytes in a4, low end
ror r4, ACNT
C Leftover bytes in r4, low end on LE and high end on BE before
C preparatory alignment correction
IF_LE(< ror r4, ACNT>)
IF_BE(< ror r4, ATNC>)
C now byte-aligned in high end on LE and low end on BE because we're
C working downwards in saving the very first bytes of the buffer
.Lmemxor3_uu_leftover:
ror r4, #24
C bring uppermost byte down for saving while preserving lower ones
IF_LE(< ror r4, #24>)
strb r4, [DST, #-1]!
subs N, #1
beq .Lmemxor3_done
subs ACNT, #8
C bring down next byte, no need to preserve
IF_BE(< lsr r4, #8>)
bne .Lmemxor3_uu_leftover
b .Lmemxor3_bytes
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment