Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
Brian Smith
nettle
Commits
05964538
Commit
05964538
authored
Apr 23, 2013
by
Martin Storsjö
Committed by
Niels Möller
Apr 23, 2013
Browse files
Use movdqu instead of movdqa for saving xmm registers
The stack is not guaranteed to be 16-byte aligned on win64.
parent
508a312a
Changes
2
Hide whitespace changes
Inline
Side-by-side
ChangeLog
View file @
05964538
2013-04-23 Niels Möller <nisse@lysator.liu.se>
From Martin Storsjö:
* x86_64/machine.m4 (W64_ENTRY): Use movdqu instead of movdqa for
saving xmm registers, since the stack is not guaranteed to be
16-byte aligned on win64.
* Makefile.in: Consistently use EXEEXT_FOR_BUILD.
2013-04-21 Niels Möller <nisse@lysator.liu.se>
...
...
x86_64/machine.m4
View file @
05964538
...
...
@@ -71,34 +71,34 @@ define(<W64_ENTRY>, <
ifelse(W64_ABI,yes,[
ifelse(eval($2 > 6), 1, [
sub [$]eval(8 + 16*($2 - 6)), %rsp
movdq
a
%xmm6, 0(%rsp)
movdq
u
%xmm6, 0(%rsp)
])
ifelse(eval($2 > 7), 1, [
movdq
a
%xmm7, 16(%rsp)
movdq
u
%xmm7, 16(%rsp)
])
ifelse(eval($2 > 8), 1, [
movdq
a
%xmm8, 32(%rsp)
movdq
u
%xmm8, 32(%rsp)
])
ifelse(eval($2 > 9), 1, [
movdq
a
%xmm9, 48(%rsp)
movdq
u
%xmm9, 48(%rsp)
])
ifelse(eval($2 > 10), 1, [
movdq
a
%xmm10, 64(%rsp)
movdq
u
%xmm10, 64(%rsp)
])
ifelse(eval($2 > 11), 1, [
movdq
a
%xmm11, 80(%rsp)
movdq
u
%xmm11, 80(%rsp)
])
ifelse(eval($2 > 12), 1, [
movdq
a
%xmm12, 96(%rsp)
movdq
u
%xmm12, 96(%rsp)
])
ifelse(eval($2 > 13), 1, [
movdq
a
%xmm13, 112(%rsp)
movdq
u
%xmm13, 112(%rsp)
])
ifelse(eval($2 > 14), 1, [
movdq
a
%xmm14, 128(%rsp)
movdq
u
%xmm14, 128(%rsp)
])
ifelse(eval($2 > 15), 1, [
movdq
a
%xmm15, 144(%rsp)
movdq
u
%xmm15, 144(%rsp)
])
ifelse(eval($1 >= 1), 1, [
push %rdi
...
...
@@ -133,34 +133,34 @@ define(<W64_EXIT>, <
pop %rdi
])
ifelse(eval($2 > 15), 1, [
movdq
a
144(%rsp), %xmm15
movdq
u
144(%rsp), %xmm15
])
ifelse(eval($2 > 14), 1, [
movdq
a
128(%rsp), %xmm14
movdq
u
128(%rsp), %xmm14
])
ifelse(eval($2 > 13), 1, [
movdq
a
112(%rsp), %xmm13
movdq
u
112(%rsp), %xmm13
])
ifelse(eval($2 > 12), 1, [
movdq
a
96(%rsp), %xmm12
movdq
u
96(%rsp), %xmm12
])
ifelse(eval($2 > 11), 1, [
movdq
a
80(%rsp), %xmm11
movdq
u
80(%rsp), %xmm11
])
ifelse(eval($2 > 10), 1, [
movdq
a
64(%rsp), %xmm10
movdq
u
64(%rsp), %xmm10
])
ifelse(eval($2 > 9), 1, [
movdq
a
48(%rsp), %xmm9
movdq
u
48(%rsp), %xmm9
])
ifelse(eval($2 > 8), 1, [
movdq
a
32(%rsp), %xmm8
movdq
u
32(%rsp), %xmm8
])
ifelse(eval($2 > 7), 1, [
movdq
a
16(%rsp), %xmm7
movdq
u
16(%rsp), %xmm7
])
ifelse(eval($2 > 6), 1, [
movdq
a
0(%rsp), %xmm6
movdq
u
0(%rsp), %xmm6
add [$]eval(8 + 16*($2 - 6)), %rsp
])
])
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment