aboutsummaryrefslogtreecommitdiffstats
path: root/runtime/x86_64/i64_utof.S
diff options
context:
space:
mode:
authorXavier Leroy <xavier.leroy@inria.fr>2016-10-01 17:38:24 +0200
committerXavier Leroy <xavier.leroy@inria.fr>2016-10-01 17:38:24 +0200
commita14b9578ee5297d954103e05d7b2d322816ddd8f (patch)
tree93b7c2b6bd7de8a4dedaf399088257e0660959b8 /runtime/x86_64/i64_utof.S
parent3bef0962079cf971673b4267b0142bd5fe092509 (diff)
downloadcompcert-kvx-a14b9578ee5297d954103e05d7b2d322816ddd8f.tar.gz
compcert-kvx-a14b9578ee5297d954103e05d7b2d322816ddd8f.zip
Support for 64-bit architectures: x86 in 64-bit mode
This commit enriches the IA32 port so that it supports x86 processors in 64-bit mode as well as in 32-bit mode, depending on the value of Archi.ptr64, which itself is set from the configuration model. To activate x86-64 bit support, configure with "x86_64-linux". Main steps: - Enrich Op.v and Asm.v with 64-bit operations - SelectLong: in 64-bit mode, use 64-bit operations directly; in 32-bit mode, fall back on the old implementation based on pairs of 32-bit integers - Conventions1: support x86-64 ABI in addition to the 32-bit ABI. - Add support for the new 64-bit operations everywhere. - runtime/x86_64: implementation of the supporting library appropriate for x86 in 64-bit mode To do: - More optimizations are possible on 64-bit integer arithmetic operations. - Could add new chunks to load, say, an unsigned byte into a 64-bit long (currently we load as a 32-bit int then zero-extend). - Implements the wrong ABI for struct passing.
Diffstat (limited to 'runtime/x86_64/i64_utof.S')
-rw-r--r--runtime/x86_64/i64_utof.S56
1 files changed, 56 insertions, 0 deletions
diff --git a/runtime/x86_64/i64_utof.S b/runtime/x86_64/i64_utof.S
new file mode 100644
index 00000000..d0935341
--- /dev/null
+++ b/runtime/x86_64/i64_utof.S
@@ -0,0 +1,56 @@
+// *****************************************************************
+//
+// The Compcert verified compiler
+//
+// Xavier Leroy, INRIA Paris
+//
+// Copyright (c) 2016 Institut National de Recherche en Informatique et
+// en Automatique.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+// * Neither the name of the <organization> nor the
+// names of its contributors may be used to endorse or promote products
+// derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT
+// HOLDER> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// *********************************************************************
+
+// Helper functions for 64-bit integer arithmetic. x86_64 version.
+
+#include "sysdeps.h"
+
+// Conversion unsigned long -> single-precision float
+
+FUNCTION(__i64_utof)
+ testq %rdi, %rdi
+ js 1f
+ pxor %xmm0, %xmm0 // if < 2^63,
+ cvtsi2ssq %rdi, %xmm0 // convert as if signed
+ ret
+1: // if >= 2^63, use round-to-odd trick
+ movq %rdi, %rax
+ shrq %rax
+ andq $1, %rdi
+ orq %rdi, %rax // (arg >> 1) | (arg & 1)
+ pxor %xmm0, %xmm0
+ cvtsi2ssq %rax, %xmm0 // convert as if signed
+ addss %xmm0, %xmm0 // multiply result by 2.0
+ ret
+ENDFUNCTION(__i64_utof)