diff --git a/Makefile.am b/Makefile.am
index 08db83401987180884c9efc60026b70c62597db9..cfcd7679f385c80374458cacddba43dda519f14b 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -14,6 +14,8 @@ TOOLS = \
 		lackey \
 		none
 
+TOOLS += verrou
+
 EXP_TOOLS = \
 		exp-bbv
 
diff --git a/README b/README
index 511027ae9bd14fe327e5206596dc9a0c3b6ad1f7..0f32e73ea8f8beb596ddddcf82fb0b7d78584b61 100644
--- a/README
+++ b/README
@@ -1,4 +1,26 @@
 
+Specific instructions for Verrou
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You are currently looking at a version of valgrind which has been patched to
+contain the verrou tool. Please look at specific instructions contained in the file:
+
+    verrou/README.md
+
+or the verrou github page:
+
+    https://github.com/edf-hpc/verrou
+
+
+For the very impatient, you should probably run the following commands:
+
+    ./autogen.sh
+    ./configure --enable-only64bit --enable-verrou-fma --prefix=PREFIX
+    make
+    make install
+    source PREFIX/env.sh
+
+
 Release notes for Valgrind
 ~~~~~~~~~~~~~~~~~~~~~~~~~~
 If you are building a binary package of Valgrind for distribution,
diff --git a/configure.ac b/configure.ac
index b86e802204ee4eb503c6ccc4d3fc679aaa20bafc..f67bafdf10ecb29fcfeb5b02c11883bb29be56cf 100755
--- a/configure.ac
+++ b/configure.ac
@@ -8,7 +8,7 @@
 ##------------------------------------------------------------##
 
 # Process this file with autoconf to produce a configure script.
-AC_INIT([Valgrind],[3.16.1],[valgrind-users@lists.sourceforge.net])
+AC_INIT([Valgrind],[3.16.1+verrou-dev],[valgrind-users@lists.sourceforge.net])
 AC_CONFIG_SRCDIR(coregrind/m_main.c)
 AC_CONFIG_HEADERS([config.h])
 AM_INIT_AUTOMAKE([foreign dist-bzip2 subdir-objects])
@@ -4766,6 +4766,10 @@ AM_CONDITIONAL([HAVE_SHARED_POINTER_ANNOTATION],
                [test x$ac_have_shared_pointer_annotation = xyes])
 
 
+# Verrou
+m4_include(verrou/configure.ac)
+
+
 #----------------------------------------------------------------------------
 # Ok.  We're done checking.
 #----------------------------------------------------------------------------
@@ -4788,6 +4792,8 @@ AC_CONFIG_FILES([
    auxprogs/Makefile
    mpi/Makefile
    coregrind/Makefile 
+   verrou/Makefile
+   verrou/tests/Makefile
    memcheck/Makefile
    memcheck/tests/Makefile
    memcheck/tests/common/Makefile
diff --git a/docs/lib/vg_basic.css b/docs/lib/vg_basic.css
index 49367fed27d7a6a9298d9a8a467d6cf964c96c07..6dc3e5f7e59fc903756863cbb44beacc3be33266 100644
--- a/docs/lib/vg_basic.css
+++ b/docs/lib/vg_basic.css
@@ -65,3 +65,39 @@ blockquote {
  border:      solid 1px #ffde84; 
 }
 
+div.note,
+div.warning {
+    margin-left: 0 !important;
+    margin-right: 0 !important;
+    border-radius: 4px;
+    padding: 1em;
+}
+
+div.note h3,
+div.warning h3 {
+    font-size: 100%;
+    font-weight: bold;
+    margin-top: 0;
+}
+
+div.note p,
+div.warning p {
+    margin: 0;
+}
+
+div.note {
+    background-color: #d9edf7;
+    border: 1px solid #bce8f1;
+}
+div.note, div.note h3 {
+    color: #31708f;
+}
+
+div.warning {
+    background-color: #fcf8e3;
+    border: 1px solid #faebcc;
+}
+
+div.warning, div.warning h3 {
+    color: #8a6d3b;
+}
diff --git a/docs/xml/manpages-index.xml b/docs/xml/manpages-index.xml
index 17a81c99f0921d857fb1092c947ea7637ca35b1a..2086f49eb1b7b84c8a2df3abc57bd100442c9e83 100644
--- a/docs/xml/manpages-index.xml
+++ b/docs/xml/manpages-index.xml
@@ -78,6 +78,13 @@
     parse="xml" xmlns:xi="http://www.w3.org/2001/XInclude" />
 </chapter>
 
+<!-- verrou_dd -->
+<chapter>
+<title>verrou_dd</title>
+<xi:include href="../../verrou/docs/verrou_dd-manpage.xml"
+    parse="xml" xmlns:xi="http://www.w3.org/2001/XInclude" />
+</chapter>
+
 </book>
 </set>
 
diff --git a/docs/xml/manual.xml b/docs/xml/manual.xml
index 518be2964e04d57ea897dc4afe2cd3ea8721a2fe..5929b16a5c9697bfc46fc06d1dbab65c38493d84 100644
--- a/docs/xml/manual.xml
+++ b/docs/xml/manual.xml
@@ -40,6 +40,8 @@
       xmlns:xi="http://www.w3.org/2001/XInclude" />
   <xi:include href="../../lackey/docs/lk-manual.xml" parse="xml"  
       xmlns:xi="http://www.w3.org/2001/XInclude" />
+  <xi:include href="../../verrou/docs/vr-manual.xml" parse="xml"
+      xmlns:xi="http://www.w3.org/2001/XInclude" />
   <xi:include href="../../none/docs/nl-manual.xml" parse="xml"  
       xmlns:xi="http://www.w3.org/2001/XInclude" />
   <xi:include href="../../exp-bbv/docs/bbv-manual.xml" parse="xml"  
diff --git a/docs/xml/valgrind-manpage.xml b/docs/xml/valgrind-manpage.xml
index 3c893c680e471ccc57d352495712340028c8740c..d36eadec37be6a355d2e59fce297537574f133ab 100644
--- a/docs/xml/valgrind-manpage.xml
+++ b/docs/xml/valgrind-manpage.xml
@@ -221,6 +221,8 @@ system: <filename>&vg-docs-path;</filename>, or online:
 
 </refsect1>
 
+<xi:include href="../../verrou/docs/valgrind-manpage.xml"
+            xmlns:xi="http://www.w3.org/2001/XInclude" />
 
 <refsect1 id="see_also">
 <title>See Also</title>
diff --git a/docs/xml/vg-entities.xml b/docs/xml/vg-entities.xml
index 7d04ee795b1a0b3eafb6fc5144545d7ed8e63d14..a105f0b50503bca790e912a1e560b4b2b4cdac05 100644
--- a/docs/xml/vg-entities.xml
+++ b/docs/xml/vg-entities.xml
@@ -6,7 +6,7 @@
 
 <!-- valgrind release + version stuff -->
 <!ENTITY rel-type    "Release">
-<!ENTITY rel-version "3.16.1">
+<!ENTITY rel-version "3.16.1+verrou-dev">
 <!ENTITY rel-date    "22 June 2020">
 
 <!-- where the docs are installed -->
diff --git a/verrou/.gitignore b/verrou/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..5c67a3ee516571b3d1f6c89f1ff8d6b7d321122a
--- /dev/null
+++ b/verrou/.gitignore
@@ -0,0 +1,11 @@
+Makefile
+Makefile.in
+*amd64-linux*
+*.o
+*~
+.deps
+env.sh
+tests/*.std*.out
+tests/*.std*.diff
+tests/clreq
+tests/sum
diff --git a/verrou/.travis.yml b/verrou/.travis.yml
new file mode 100644
index 0000000000000000000000000000000000000000..afb055bf39ce6676c106fbe64d9e3dace24b4df0
--- /dev/null
+++ b/verrou/.travis.yml
@@ -0,0 +1,21 @@
+language: cpp
+dist: trusty
+sudo: required
+env:
+  - VALGRIND_VERSION=VALGRIND_3_16_BRANCH
+  - VALGRIND_VERSION=master
+matrix:
+  allow_failures:
+    - env: VALGRIND_VERSION=master
+before_install:
+  - sudo apt-get -qq update
+  - sudo apt-get install -y libc-dbg
+install:
+  - make -f travis.mk download-valgrind
+  - make -f travis.mk patch-valgrind || make -f travis.mk patch-error
+  - make -f travis.mk configure
+  - make -f travis.mk build
+script:
+  - make -f travis.mk check-install
+  - make -f travis.mk check || make -f travis.mk check-error
+  - make -f travis.mk unit-test
diff --git a/verrou/CHANGELOG.md b/verrou/CHANGELOG.md
new file mode 100644
index 0000000000000000000000000000000000000000..3b241a0beccf15f829dff58a91890b524068b658
--- /dev/null
+++ b/verrou/CHANGELOG.md
@@ -0,0 +1,165 @@
+# Change Log
+
+
+## [UNRELEASED]
+
+This version is based on Valgrind-3.15.0.
+
+### Added
+
+
+### Changed
+
+---
+
+## v2.2.0 - 2020-01-08
+
+This version is based on Valgrind-3.15.0.
+
+### Added
+
+- [EXPERIMENTAL] MCA back-ends
+  - use with valgrind option `--backend=mcaquad`
+  - deactivate support for MCA back-ends with configure switch `--enable-verrou-quad=no`
+
+- [EXPERIMENTAL] code coverage generation (`--trace=FILENAME`)
+
+- Generate the list of cancellations (`--cc-gen-source`)
+
+### Changed
+
+- Two scripts `verrou_dd_line` and `verrou_dd_sym` replace
+  `verrou_dd`.
+
+- Bug fix related to the random generator.
+
+- Bug fix: use the PID to generate the search space name.  This allows
+  using Delta-Debugging techniques with MPI programs.
+
+- Bug fix: correctly handle unnamed objects and source file lines.
+
+- Bug fix: allow gdb integration. (fixes gh-24)
+
+---
+
+## v2.1.0 - 2018-11-09
+
+This version is based on Valgrind-3.14.0. (fixes gh-19)
+
+### Added
+
+- Preliminary filtering before Delta-Debugging: only functions performing
+  floating-point operations are considered in the search for
+  instabilities.
+
+- Multiple variants of the Delta-Debugging algorithm: (fixes gh-14, gh-22)
+  - srDDmin: variant of rDDmin, specifically tuned to accomodate for stochastic
+    tests
+  - drDDmin: variant of rDDmin where a preliminary binary search is performed in
+    order to further reduce the search space.
+
+- New reduced-precision backend (`--rounding-mode=float`). This back-end
+  emulates the use of single-precision arithmetic for all double-precision
+  variables. (fixes gh-11)
+
+### Changed
+
+- Python3 port of `verrou_dd`.
+
+
+---
+
+## v2.0.0 - 2018-06-19
+
+This version is based on Valgrind-3.13.0.
+
+### Added
+
+- Generation of Valgrind errors for NaN values. This can be useful to debug
+  programs in conjunction with vgdb. (fixes gh-4)
+  
+- Instrumentation of all FP binary instructions, as obtained by any combination of:
+  - an operation:     ADD / SUB / MUL / DIV
+  - a vector variant: LLO / SSE / AVX2
+  - a precision:      single / double
+  
+- Instrumentation of cast instructions (double -> float).
+
+- Preparation for the common interflop backend interface.
+
+- (Experimental) Parallelism in `verrou_dd`. The number of concurrent threads
+  can be set using the `VERROU_DD_NUM_THREADS` environnement variable. (related
+  to gh-7)
+  
+- (Experimental) New DDmin algorithm for `verrou_dd`. Instead of computing a
+  maximal set of stable symbols/lines using the DDmax algorithm (and outputting
+  the complement), the DDmin algorithm computes the union of minimal sets of
+  unstable symbols/lines. It can be activated by setting `VERROU_DD_ALGO=rddmin`
+  in the environment.
+
+
+### Changed
+
+- C++ source files are now compiled using the C++11 standard.
+
+- Fix the LLO instrumentation bug. This solves problems which sometimes happened
+  when LLO and real vector instructions were mixed. Known examples of such bugs
+  situations include openblas or binaries compiled with the Intel compiler.
+  
+  The new --vr-unsafe-llo-optim allows keeping the old fast and buggy LLO
+  instrumentation.
+
+- Suppression of useless warnings.
+
+- Fix bugs of next_after and next_prev.
+
+- More robust rounding mode (upward, downward, toward_zero) with subnormals.
+
+- Improvement of tests and unit test (with UCB references).
+
+- Usability improvements of verrou_dd. There is no need for absolute paths to
+  argument scripts anymore. Also, error messages are now more readable. (related
+  to gh-7)
+
+- Increase the max. size of symbol names in exclude files. New limit is set to
+  4096 characters. (fixes gh-6)
+
+
+---
+
+## v1.1.0 - 2017-06-19
+
+This version is based on Valgrind-3.13.0.
+
+### Added
+
+- Command-line option `--vr-seed` allows setting the pRNG seed in order to
+  reproduce results in `random` and `average` rounding modes.
+
+
+---
+
+## v1.0.0 - 2017-05-19
+
+This version is based on Valgrind-3.12.0.
+
+### Added
+
+- Continuous integration using the Travis system.
+- Improve Delta-Debugging customization through environment variables.
+
+### Changed
+
+- There is no need anymore for an external, statically compiled libc/libm.
+
+
+---
+
+## v0.9.0 - 2017-03-31
+
+This is the first released version of Verrou. It is based on Valgrind-3.10.1,
+which supports the following system configurations:
+
+- `gcc`, versions 3.x to 5.x
+- `clang`, versions 2.9 to 4.x
+- `glibc`, versions 2.2 to 2.20
diff --git a/verrou/COPYING b/verrou/COPYING
new file mode 100644
index 0000000000000000000000000000000000000000..e90dfed1a31ed1c0c22befce22c6f37f9cf5f2bb
--- /dev/null
+++ b/verrou/COPYING
@@ -0,0 +1,340 @@
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+     59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Library General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	    How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) year  name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Library General
+Public License instead of this License.
diff --git a/verrou/Interlibmath/README.md b/verrou/Interlibmath/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..a3176b9edd927e75b4aad968dc717c90ecf91dba
--- /dev/null
+++ b/verrou/Interlibmath/README.md
@@ -0,0 +1,48 @@
+# Interlibmath
+Interlibmath is a library to intercept (with LD_PRELOAD) libm calls and replace it by the equivalent calls with the specified rounding mode. It helps you look for floating-point round-off errors in programs introduced by the libm.
+
+
+## Disclamer
+1- not yet sufficiently tested
+2- interface is not yet stabilized
+3- a lot of libm function are missing
+4- performance not yet optimized
+5- it relies on libquadmath to provide round to nearest
+
+
+## Compilation :
+Not yet included in verrou/valgrind compilation process
+make will produce the file interlibmath.so
+
+## How to use in standalone
+VERROU_LIBM_ROUNDING_MODE=random LD_PRELOAD='PATH'/interlibmath.so. /myProg
+or 
+VERROU_ROUNDING_MODE=random LD_PRELOAD='PATH'/interlibmath.so. /myProg
+
+If both  VERROU_LIBM_ROUNDING_MODE and VERROU_ROUNDING_MODE are set, interlibmath take into account VERROU_LIBM_ROUNDING_MODE.
+
+These variable can be to set to the values : random, average, nearest, upward, downward,toward_zero,farthest,float,native
+
+The meaning is the same for Verrou.
+The difference between native and nearest is that : native return the value obtain by native implementation and nearest return the nearest cast of the result obtain by libquadmath.
+
+
+
+## How to use with Verrou :
+
+VERROU_ROUNDING_MODE=random LD_PRELOAD='PATH'/interlibmath.so valgrind --tool=verrou --exclude=libm.ex ./myProg
+Remarks :
+- libm.ex should contain the libm and libquadmath.
+- libm functions and ./myProg operation are instrumented with random rounding
+
+To set different rounding mode to libm (random)  and to myProg (upward):
+   VERROU_LIBM_ROUNDING_MODE=random VERROU_ROUNDING_MODE=upward  LD_PRELOAD='PATH'/interlibmath.so valgrind --tool=verrou --exclude=libm.ex ./myProg	
+or
+   VERROU_LIBM_ROUNDING_MODE=random LD_PRELOAD='PATH'/interlibmath.so valgrind --tool=verrou --exclude=libm.ex --rounding-mode=upward ./myProg		
+
+
+## Remarks
+
+If your favorite libm function does not appear in counters :
+   1- interlibm do not implement it => mail to developers (it is easy to add if the function has only one argument)
+   2- your function may be inlined by compiler => modify your compiler option
diff --git a/verrou/Interlibmath/interlibmath.cxx b/verrou/Interlibmath/interlibmath.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..0815eac52a263b6ef741363634d20b74085492d4
--- /dev/null
+++ b/verrou/Interlibmath/interlibmath.cxx
@@ -0,0 +1,595 @@
+
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <fcntl.h>
+#include <cstdlib>
+
+#include <float.h>
+#include <quadmath.h>
+#include "../backend_verrou/interflop_verrou.h"
+#include "../backend_verrou/vr_rand.h"
+#include "../backend_verrou/vr_roundingOp.hxx"
+#include <iostream>
+#include <unistd.h>
+#include <sys/time.h>
+#include <dlfcn.h>
+
+#ifdef CLIENTREQUEST
+#include "valgrind/verrou.h"
+#else
+#define VERROU_START_INSTRUMENTATION
+#define VERROU_STOP_INSTRUMENTATION
+#endif
+
+
+vr_RoundingMode ROUNDINGMODE;
+void (*vr_cancellationHandler)(int)=NULL;
+void (*vr_panicHandler)(const char*)=NULL;
+void (*vr_nanHandler)()=NULL;
+
+unsigned int my_pid;
+
+
+class myLibMathFunction1{
+public:
+  myLibMathFunction1(std::string name):name_(name){
+    load_real_sym((void**)&(real_name_float) , name +std::string("f"));
+    load_real_sym((void**)&(real_name_double) , name);
+    load_real_sym((void**)&(real_name_long_double) , name +std::string("l"));
+  }
+
+  inline double apply(double a)const{
+    return real_name_double(a);
+  }
+
+  inline long double apply(long double a)const{
+    return real_name_long_double(a);
+  }
+
+  inline float apply(float a)const{
+    return real_name_float(a);
+  }
+
+  const std::string& name()const{
+    return name_;
+  }
+
+private:
+  void load_real_sym(void**fctPtr, std::string name ){
+    (*fctPtr) =dlsym(RTLD_NEXT, name.c_str());
+    if(*fctPtr==NULL){
+      std::cerr << "Problem with function "<< name<<std::endl;
+    }
+  }
+
+  //Attributs
+  float (*real_name_float)(float) ;
+  double (*real_name_double)(double) ;
+  long double (*real_name_long_double)(long double) ;
+  std::string name_;
+};
+
+class myLibMathFunction2{
+public:
+  myLibMathFunction2(std::string name):name_(name){
+    load_real_sym((void**)&(real_name_float) , name +std::string("f"));
+    load_real_sym((void**)&(real_name_double) , name);
+    load_real_sym((void**)&(real_name_long_double) , name +std::string("l"));
+  }
+
+  inline double apply(double a, double b)const{
+    return real_name_double(a,b);
+  }
+
+  inline long double apply(long double a, long double b)const{
+    return real_name_long_double(a,b);
+  }
+
+  inline float apply(float a, float b)const{
+    return real_name_float(a,b);
+  }
+
+  const std::string& name()const{
+    return name_;
+  }
+
+private:
+  void load_real_sym(void**fctPtr, std::string name ){
+    (*fctPtr) =dlsym(RTLD_NEXT, name.c_str());
+    if(*fctPtr==NULL){
+      std::cerr << "Problem with function "<< name<<std::endl;
+    }
+  }
+
+  //Attributs
+  float (*real_name_float)(float,float) ;
+  double (*real_name_double)(double,double) ;
+  long double (*real_name_long_double)(long double, long double) ;
+  std::string name_;
+};
+
+
+
+//shell LIST1="acos acosh asin asinh atan atanh cbrt erf exp exp2 expm1 log log10 log1p log2 tgamma lgamma sin sinh cos cosh sqrt tan tanh j0 j1 y0 y1"
+enum Function1Name {
+  //shell comand to generate:  for i in $LIST1 ; do  echo "enum$i,"; done;
+  enumacos,
+  enumacosh,
+  enumasin,
+  enumasinh,
+  enumatan,
+  enumatanh,
+  enumcbrt,
+  enumerf,
+  enumexp,
+  enumexp2,
+  enumexpm1,
+  enumlog,
+  enumlog10,
+  enumlog1p,
+  enumlog2,
+  enumtgamma,
+  enumlgamma,
+  enumsin,
+  enumsinh,
+  enumcos,
+  enumcosh,
+  enumsqrt,
+  enumtan,
+  enumtanh,
+  enumj0,
+  enumj1,
+  enumy0,
+  enumy1,
+  //fin shell
+  enum_libm_function1_name_size};
+
+myLibMathFunction1 function1NameTab[enum_libm_function1_name_size]={
+  //shell command to generate  for i in $LIST1 ; do  echo "myLibMathFunction1(\"$i\"),"; done;
+  myLibMathFunction1("acos"),
+  myLibMathFunction1("acosh"),
+  myLibMathFunction1("asin"),
+  myLibMathFunction1("asinh"),
+  myLibMathFunction1("atan"),
+  myLibMathFunction1("atanh"),
+  myLibMathFunction1("cbrt"),
+  myLibMathFunction1("erf"),
+  myLibMathFunction1("exp"),
+  myLibMathFunction1("exp2"),
+  myLibMathFunction1("expm1"),
+  myLibMathFunction1("log"),
+  myLibMathFunction1("log10"),
+  myLibMathFunction1("log1p"),
+  myLibMathFunction1("log2"),
+  myLibMathFunction1("tgamma"),
+  myLibMathFunction1("lgamma"),
+  myLibMathFunction1("sin"),
+  myLibMathFunction1("sinh"),
+  myLibMathFunction1("cos"),
+  myLibMathFunction1("cosh"),
+  myLibMathFunction1("sqrt"),
+  myLibMathFunction1("tan"),
+  myLibMathFunction1("tanh"),
+  myLibMathFunction1("j0"),
+  myLibMathFunction1("j1"),
+  myLibMathFunction1("y0"),
+  myLibMathFunction1("y1"),
+};
+enum Function2Name {
+  enumatan2,
+  enumfmod,
+  enumhypot,
+  enumpow,
+  enumfdim,
+  enumremainder,
+  enum_libm_function2_name_size};
+
+myLibMathFunction2 function2NameTab[enum_libm_function2_name_size]={
+  myLibMathFunction2("atan2"),
+  myLibMathFunction2("fmod"),
+  myLibMathFunction2("hypot"),
+  myLibMathFunction2("pow"),
+  myLibMathFunction2("fdim"),
+  myLibMathFunction2("remainder"),
+};
+
+
+
+unsigned int libMathCounter1[enum_libm_function1_name_size][3][2];
+unsigned int libMathCounter2[enum_libm_function2_name_size][3][2];
+
+void initLibMathCounter(){
+  for(int i=0; i< enum_libm_function1_name_size;i++){
+    for(int j=0; j< 3; j++){
+      libMathCounter1[i][j][0]=0;
+      libMathCounter1[i][j][1]=0;
+    }
+  }
+  for(int i=0; i< enum_libm_function2_name_size;i++){
+    for(int j=0; j< 3; j++){
+      libMathCounter2[i][j][0]=0;
+      libMathCounter2[i][j][1]=0;
+    }
+  }
+}
+
+template<class>
+struct realTypeIndex;
+template<>
+struct realTypeIndex<float>{
+  static const int index=0;
+};
+template<>
+struct realTypeIndex<double>{
+  static const int index=1;
+};
+template<>
+struct realTypeIndex<long double>{
+  static const int index=2;
+};
+
+
+template<class REALTYPE, int ENUM_LIBM, int INST>
+inline void incCounter1(){
+  libMathCounter1[ENUM_LIBM][realTypeIndex<REALTYPE>::index][INST]++;
+}
+
+template<class REALTYPE, int ENUM_LIBM, int INST>
+inline void incCounter2(){
+  libMathCounter2[ENUM_LIBM][realTypeIndex<REALTYPE>::index][INST]++;
+}
+
+
+
+unsigned int getCounter(int nbParam, int index,  int type, int isInst){
+  if(nbParam==1){
+    return libMathCounter1[index][type][isInst];
+  }
+  if(nbParam==2){
+    return libMathCounter2[index][type][isInst];
+  }
+  return 0;
+};
+
+const char*  verrou_rounding_mode_name_redefined (enum vr_RoundingMode mode) {
+  switch (mode) {
+  case VR_NEAREST:
+    return "NEAREST";
+  case VR_UPWARD:
+    return "UPWARD";
+  case VR_DOWNWARD:
+    return "DOWNWARD";
+  case VR_ZERO:
+    return "TOWARD_ZERO";
+  case VR_RANDOM:
+    return "RANDOM";
+  case VR_AVERAGE:
+    return "AVERAGE";
+  case VR_FARTHEST:
+    return "FARTHEST";
+  case VR_FLOAT:
+    return "FLOAT";
+  case VR_NATIVE:
+    return "NATIVE";
+  }
+
+  return "undefined";
+}
+
+
+void printCounter(){
+  std::cerr  << "=="<<my_pid<<"== "<< "ROUNDINGMODE: "<< verrou_rounding_mode_name_redefined (ROUNDINGMODE)<<std::endl;
+  std::cerr << "=="<<my_pid<<"== " << "Interlibm counter " <<std::endl;
+  std::cerr << "=="<<my_pid<<"== " << "\t\t Total \tInstrumented" <<std::endl;
+
+  for(int nbParam=1; nbParam <=2; nbParam++){
+    int paramSize= (int)enum_libm_function1_name_size;
+    if(nbParam==2){
+      paramSize=(int)enum_libm_function2_name_size;
+    }
+
+    for(int i=0; i< paramSize;i++){
+      std::cerr << "=="<<my_pid<<"== ";
+      std::cerr<<  "---------------------------------------------------"<<std::endl;
+      std::cerr << "=="<<my_pid<<"== ";
+      if(nbParam==1){
+	std::cerr<< function1NameTab[i].name();
+      }
+      if(nbParam==2){
+	std::cerr<< function2NameTab[i].name();
+      }
+
+      int total=0;
+      int totalInst=0;
+      for(int j=0;j<3;j++){
+	total+=getCounter(nbParam,i,j,0)+getCounter(nbParam,i,j,1);
+	totalInst+=getCounter(nbParam,i,j,0);
+      }
+
+      std::cerr<< "\t\t" <<  total << "\t" << totalInst<<std::endl;
+      if(total!=0){
+	std::cerr << "=="<<my_pid<<"== ";
+	std::cerr<< " `-" " flt ";
+	std::cerr<< "\t" <<  getCounter(nbParam,i,0,0)+getCounter(nbParam,i,0,1)  << "\t" << getCounter(nbParam,i,0,0)<<std::endl;
+
+	std::cerr << "=="<<my_pid<<"== ";
+	std::cerr<< " `-" " dbl ";
+	std::cerr<< "\t" <<  getCounter(nbParam,i,1,0)+getCounter(nbParam,i,1,1)  << "\t" << getCounter(nbParam,i,1,0)<<std::endl;
+
+	std::cerr << "=="<<my_pid<<"== ";
+	std::cerr<< " `-" " lgd ";
+	std::cerr<< "\t" <<  getCounter(nbParam,i,2,0)+getCounter(nbParam,i,2,1)  << "\t" << getCounter(nbParam,i,2,0)<<std::endl;
+      }
+    }
+  }
+}
+
+
+template<class LIBMQ, typename REALTYPE >
+class libMathFunction1{
+public:
+  typedef REALTYPE RealType;
+  typedef vr_packArg<RealType,1> PackArgs;
+
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "libmath ?";}
+#endif
+
+  static inline RealType nearestOp (const PackArgs& p) {
+    const RealType & a(p.arg1);
+    __float128 ref=LIBMQ::apply((__float128)a);
+    return (RealType)ref;
+  };
+
+  static inline RealType error (const PackArgs& p, const RealType& z) {\
+    const RealType & a(p.arg1);
+    __float128 ref=LIBMQ::apply((__float128)a);
+    const __float128 error128=  ref -(__float128)z ;
+    return (RealType)error128;
+  };
+
+  static inline RealType sameSignOfError (const PackArgs& p,const RealType& c) {
+    return error(p,c) ;
+  };
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return p.isOneArgNanInf();
+  }
+
+
+  static inline void check(const PackArgs& p, const RealType& d){
+  };
+
+};
+
+template<class LIBMQ, typename REALTYPE >
+class libMathFunction2{
+public:
+  typedef REALTYPE RealType;
+  typedef vr_packArg<RealType,2> PackArgs;
+
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "libmath ?";}
+#endif
+
+  static inline RealType nearestOp (const PackArgs& p) {
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+
+    __float128 ref=LIBMQ::apply((__float128)a, (__float128)b);
+    return (RealType)ref;
+  };
+
+  static inline RealType error (const PackArgs& p, const RealType& z) {\
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+
+    __float128 ref=LIBMQ::apply((__float128)a,(__float128)b);
+    const __float128 error128=  ref -(__float128)z ;
+    return (RealType)error128;
+  };
+
+  static inline RealType sameSignOfError (const PackArgs& p,const RealType& c) {
+    return error(p,c) ;
+  };
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return p.isOneArgNanInf();
+  }
+
+
+  static inline void check(const PackArgs& p, const RealType& d){
+  };
+
+};
+
+
+// template<class REALTYPE>
+// REALTYPE MAGIC(constraint_m1p1)(const REALTYPE& x ){
+//   if(x>1) return 1.;
+//   if(x<-1) return -1.;
+//   return x;
+// }
+
+
+#define DEFINE_INTERP_LIBM1_C_IMPL(FCT)					\
+  struct libmq##FCT{							\
+    static __float128 apply(__float128 a){return FCT##q(a);}		\
+  };									\
+  extern "C"{								\
+  double FCT (double a){						\
+    if(ROUNDINGMODE==VR_NATIVE){					\
+      incCounter1<double, enum##FCT ,1>();				\
+      return function1NameTab[enum##FCT].apply(a);			\
+    }else{								\
+      incCounter1<double, enum##FCT ,0>();				\
+      typedef OpWithSelectedRoundingMode<libMathFunction1<libmq##FCT,double> > Op; \
+      double res;							\
+      VERROU_STOP_INSTRUMENTATION;                                      \
+      Op::apply(Op::PackArgs(a) ,&res,NULL);				\
+      VERROU_START_INSTRUMENTATION;                                     \
+      return res;							\
+    }									\
+  }									\
+									\
+  float FCT##f (float a){						\
+    if(ROUNDINGMODE==VR_NATIVE){					\
+      incCounter1<float, enum##FCT ,1>();				\
+      return function1NameTab[enum##FCT].apply(a);			\
+    }else{								\
+      incCounter1<float, enum##FCT,0>();			       	\
+      VERROU_STOP_INSTRUMENTATION;                                      \
+typedef OpWithSelectedRoundingMode<libMathFunction1<libmq##FCT,float> > Op; \
+      float res;							\
+      Op::apply(Op::PackArgs(a) ,&res,NULL);				\
+      VERROU_START_INSTRUMENTATION;                                     \
+      return res;							\
+    }									\
+  }									\
+									\
+  long double FCT##l (long double a){					\
+    incCounter1<long double, enum##FCT,1>();				\
+    return function1NameTab[enum##FCT].apply(a);			\
+  }									\
+};
+
+#define DEFINE_INTERP_LIBM2_C_IMPL(FCT)					\
+  struct libmq##FCT{							\
+    static __float128 apply(__float128 a,__float128 b){return FCT##q(a,b);} \
+  };									\
+  extern "C"{								\
+    double FCT (double a, double b){					\
+      if(ROUNDINGMODE==VR_NATIVE){					\
+      incCounter2<double, enum##FCT ,1>();				\
+      return function2NameTab[enum##FCT].apply(a,b);       		\
+   }else{							        \
+      incCounter2<double, enum##FCT ,0>();				\
+      typedef OpWithSelectedRoundingMode<libMathFunction2<libmq##FCT,double> > Op; \
+      VERROU_STOP_INSTRUMENTATION;                                      \
+      double res;							\
+      Op::apply(Op::PackArgs(a,b) ,&res,NULL);				\
+      VERROU_START_INSTRUMENTATION;                                     \
+    return res;								\
+    }									\
+  }									\
+									\
+    float FCT##f (float a, float b){						\
+    if(ROUNDINGMODE==VR_NATIVE){					\
+      incCounter2<float, enum##FCT ,1>();				\
+      return function2NameTab[enum##FCT].apply(a,b);			\
+    }else{								\
+      incCounter2<float, enum##FCT,0>();					\
+      typedef OpWithSelectedRoundingMode<libMathFunction2<libmq##FCT,float> > Op; \
+      float res;							\
+      VERROU_STOP_INSTRUMENTATION;                                      \
+      Op::apply(Op::PackArgs(a,b) ,&res,NULL);				\
+      VERROU_START_INSTRUMENTATION;                                     \
+      return res;								\
+    }									\
+  }									\
+									\
+    long double FCT##l (long double a, long double b){				\
+    incCounter2<long double, enum##FCT,1>();				\
+    return function2NameTab[enum##FCT].apply(a,b);			\
+    }									\
+  };
+
+//shell for i in $LIST1 ; do  echo " DEFINE_INTERP_LIBM1_C_IMPL($i);"; done;
+ DEFINE_INTERP_LIBM1_C_IMPL(acos);
+ DEFINE_INTERP_LIBM1_C_IMPL(acosh);
+ DEFINE_INTERP_LIBM1_C_IMPL(asin);
+ DEFINE_INTERP_LIBM1_C_IMPL(asinh);
+ DEFINE_INTERP_LIBM1_C_IMPL(atan);
+ DEFINE_INTERP_LIBM1_C_IMPL(atanh);
+ DEFINE_INTERP_LIBM1_C_IMPL(cbrt);
+ DEFINE_INTERP_LIBM1_C_IMPL(erf);
+ DEFINE_INTERP_LIBM1_C_IMPL(exp);
+// DEFINE_INTERP_LIBM1_C_IMPL(exp2);
+ DEFINE_INTERP_LIBM1_C_IMPL(expm1);
+ DEFINE_INTERP_LIBM1_C_IMPL(log);
+ DEFINE_INTERP_LIBM1_C_IMPL(log10);
+ DEFINE_INTERP_LIBM1_C_IMPL(log1p);
+ DEFINE_INTERP_LIBM1_C_IMPL(log2);
+ DEFINE_INTERP_LIBM1_C_IMPL(tgamma);
+ DEFINE_INTERP_LIBM1_C_IMPL(lgamma);
+ DEFINE_INTERP_LIBM1_C_IMPL(sin);
+ DEFINE_INTERP_LIBM1_C_IMPL(sinh);
+ DEFINE_INTERP_LIBM1_C_IMPL(cos);
+ DEFINE_INTERP_LIBM1_C_IMPL(cosh);
+ DEFINE_INTERP_LIBM1_C_IMPL(sqrt);
+ DEFINE_INTERP_LIBM1_C_IMPL(tan);
+ DEFINE_INTERP_LIBM1_C_IMPL(tanh);
+ DEFINE_INTERP_LIBM1_C_IMPL(j0);
+ DEFINE_INTERP_LIBM1_C_IMPL(j1);
+ DEFINE_INTERP_LIBM1_C_IMPL(y0);
+ DEFINE_INTERP_LIBM1_C_IMPL(y1);
+
+
+DEFINE_INTERP_LIBM2_C_IMPL(atan2);
+DEFINE_INTERP_LIBM2_C_IMPL(fmod);
+DEFINE_INTERP_LIBM2_C_IMPL(hypot);
+DEFINE_INTERP_LIBM2_C_IMPL(pow);
+DEFINE_INTERP_LIBM2_C_IMPL(fdim);
+DEFINE_INTERP_LIBM2_C_IMPL(remainder);
+
+#undef DEFINE_INTERP_LIBM1_C_IMPL
+#undef DEFINE_INTERP_LIBM2_C_IMPL
+
+
+
+void __attribute__((constructor)) init_interlibmath(){
+  struct timeval now;
+  gettimeofday(&now, NULL);
+  my_pid = getpid();
+  unsigned int vr_seed=  now.tv_usec + my_pid;
+  vr_rand_setSeed(&vr_rand, vr_seed);
+
+  ROUNDINGMODE=VR_NATIVE; //Default value
+
+  char* vrm=std::getenv("VERROU_LIBM_ROUNDING_MODE");
+  if(vrm==NULL){
+    vrm=std::getenv("VERROU_ROUNDING_MODE");
+  }
+
+  if(vrm!=NULL){
+    std::string envString(vrm);
+    if(envString==std::string("random")){
+      ROUNDINGMODE=VR_RANDOM;
+    }
+    if(envString==std::string("average")){
+      ROUNDINGMODE=VR_AVERAGE;
+    }
+    if(envString==std::string("nearest")){
+      ROUNDINGMODE=VR_NEAREST;
+    }
+    if(envString==std::string("upward")){
+      ROUNDINGMODE=VR_UPWARD;
+    }
+    if(envString==std::string("downward")){
+      ROUNDINGMODE=VR_DOWNWARD;
+    }
+    if(envString==std::string("toward_zero")){
+      ROUNDINGMODE=VR_ZERO;
+    }
+    if(envString==std::string("farthest")){
+      ROUNDINGMODE=VR_FARTHEST;
+    }
+    if(envString==std::string("float")){
+      ROUNDINGMODE=VR_FLOAT;
+    }
+    if(envString==std::string("native")){
+      ROUNDINGMODE=VR_NATIVE;
+    }
+  }
+
+  initLibMathCounter();
+}
+
+
+void __attribute__((destructor)) finalyze_interlibmath(){
+  printCounter();
+};
diff --git a/verrou/Interlibmath/runExemple.sh b/verrou/Interlibmath/runExemple.sh
new file mode 100755
index 0000000000000000000000000000000000000000..31a7ba39342e117675e795532dc6cd0d9bb6fcb0
--- /dev/null
+++ b/verrou/Interlibmath/runExemple.sh
@@ -0,0 +1,13 @@
+#!/bin/sh
+echo "random python "
+VERROU_LIBM_ROUNDING_MODE=random LD_PRELOAD="./interlibmath.so" ./testCos.py  1.1
+
+echo "average python"
+VERROU_ROUNDING_MODE=average LD_PRELOAD="./interlibmath.so" ./testCos.py  1.1
+
+echo "random testCos binary"
+
+VERROU_LIBM_ROUNDING_MODE=random LD_PRELOAD="./interlibmath.so" ./testCos  1.1
+
+echo "native testCos binary"
+VERROU_LIBM_ROUNDING_MODE=native LD_PRELOAD="./interlibmath.so" ./testCos  1.1
diff --git a/verrou/Interlibmath/testCos.cxx b/verrou/Interlibmath/testCos.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..29090c5764b58169446eb0b0bc0447e790c68920
--- /dev/null
+++ b/verrou/Interlibmath/testCos.cxx
@@ -0,0 +1,37 @@
+
+#include<iostream> 
+#include<math.h> 
+
+int main(int argc, char** argv){
+
+  double a(0.1);
+  float af(a);
+  long double al= 1./10.;
+
+  for(int i=0; i<4; i++){
+    std::cout << "diff cos: " <<  cos(a) -cos(a) << std::endl;
+    std::cout << "diff cosf: " <<  cosf(af) -cosf(af) << std::endl;
+  }
+
+  for(int i=0; i<4; i++){
+    std::cout << "diff sin: " <<  sin(a) -sin(a) << std::endl;
+    std::cout << "diff sinf: " <<  sinf(af) -sinf(af) << std::endl;
+  }
+
+  for(int i=0; i<4; i++){
+    std::cout << "diff erf: " <<  erf(a) -erf(a) << std::endl;
+    std::cout << "diff erff: " <<  erff(af) -erff(af) << std::endl;
+  }
+
+  std::cout << "sqrt: "<<sqrt(a)<<std::endl;
+  //  std::cout << "sqrtf: "<<sqrtf(af)<<std::endl;
+  std::cout << "sqrtl: "<<sqrtl(al)<<std::endl;
+  for(int i=0; i<6; i++){
+    std::cout << "diff sqrt: " <<  sqrt(a) -sqrt(a) << std::endl;
+    std::cout << "diff sqrtf: " <<  sqrtf(af) -sqrtf(af) << std::endl;
+    std::cout << "diff sqrtl: " <<  sqrtl(al) -sqrtl(al) << std::endl;
+  }
+
+  std::cout << "atan2: "<<atan2(a,a+0.1)<<std::endl;
+  std::cout << "hypotf: "<<hypotf(a,a+0.1)<<std::endl;
+};
diff --git a/verrou/Interlibmath/testCos.py b/verrou/Interlibmath/testCos.py
new file mode 100755
index 0000000000000000000000000000000000000000..9539df9861b6a7cfe8ad0d3589357143e3fe433b
--- /dev/null
+++ b/verrou/Interlibmath/testCos.py
@@ -0,0 +1,10 @@
+#!/usr/bin/env python3
+
+import math
+import sys
+
+x=float(sys.argv[1])
+
+for i in range(4):
+    print("cos diff: ", math.cos(x)-math.cos(x))
+
diff --git a/verrou/Makefile.am b/verrou/Makefile.am
new file mode 100644
index 0000000000000000000000000000000000000000..bf711deca8e9fc72cbdeb4c47d5395de687c2fc7
--- /dev/null
+++ b/verrou/Makefile.am
@@ -0,0 +1,117 @@
+include $(top_srcdir)/Makefile.tool.am
+
+EXTRA_DIST = \
+	docs/vr-manual.xml \
+	docs/verrou_dd-manpage.xml
+
+pkginclude_HEADERS = verrou.h
+
+#----------------------------------------------------------------------------
+# verrou-<platform>
+#----------------------------------------------------------------------------
+
+PYTHON_REP=pyTools
+bin_SCRIPTS = ${PYTHON_REP}/verrou_dd_line ${PYTHON_REP}/verrou_dd_sym ${PYTHON_REP}/cmpCov.py
+pkgpython_PYTHON = ${PYTHON_REP}/DD.py ${PYTHON_REP}/dd_config.py ${PYTHON_REP}/DD_stoch.py ${PYTHON_REP}/DD_exec_stat.py
+
+install-exec-local:
+	mkdir -p ${pkgpythondir}
+	touch ${pkgpythondir}/__init__.py
+	install -t ${prefix} -m 644 env.sh
+
+noinst_PROGRAMS  = verrou-@VGCONF_ARCH_PRI@-@VGCONF_OS@
+if VGCONF_HAVE_PLATFORM_SEC
+noinst_PROGRAMS += verrou-@VGCONF_ARCH_SEC@-@VGCONF_OS@
+endif
+
+VERROU_SOURCES_COMMON = \
+	vr_main.c	\
+	vr_exclude.c	\
+	vr_error.c	\
+	vr_clreq.c	\
+	vr_clo.c	\
+	vr_include_trace.c \
+	backend_verrou/interflop_verrou.cxx\
+	backend_checkcancellation/interflop_checkcancellation.cxx
+
+
+
+VERROU_FLAG_CXX = $(subst \
+	-Wmissing-prototypes,,$(subst \
+	-Wstrict-prototypes,,$(subst \
+	-Wmissing-parameter-type,,$(subst \
+	-Wold-style-declaration,,$(subst \
+	-std=gnu99,,$(AM_CFLAGS_@VGCONF_PLATFORM_PRI_CAPS@))))))
+
+VERROU_FLAG_C =
+
+# Disable exceptions and RTTI to avoid problems with `__gxx_personality_v0'
+# Enable c++11 for std::uint64_t
+VERROU_FLAG_CXX += -fno-exceptions -fno-rtti -std=c++11
+
+# We use the test of fma normally use to configure check
+if HAVE_FMA_INTRIN
+VERROU_FLAG_CXX += -march=native -mfma -DUSE_VERROU_FMA
+VERROU_FLAG_C   += -march=native -DUSE_VERROU_FMA
+endif
+
+if USE_QUAD
+VERROU_SOURCES_COMMON+= \
+	backend_mcaquad/interflop_mcaquad.c\
+	backend_mcaquad/common/tinymt64.c\
+	backend_mcaquad/common/fmaqApprox.c
+VERROU_FLAG_CXX +=  -DUSE_VERROU_QUAD
+VERROU_FLAG_C   +=  -DUSE_VERROU_QUAD
+endif
+
+
+verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_SOURCES      = \
+	$(VERROU_SOURCES_COMMON)
+verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_CPPFLAGS     = \
+	$(AM_CPPFLAGS_@VGCONF_PLATFORM_PRI_CAPS@)
+verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_CFLAGS       = \
+	$(AM_CFLAGS_@VGCONF_PLATFORM_PRI_CAPS@)\
+	$(VERROU_FLAG_C)
+verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_CXXFLAGS     = \
+	$(VERROU_FLAG_CXX)
+verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_DEPENDENCIES = \
+	$(TOOL_DEPENDENCIES_@VGCONF_PLATFORM_PRI_CAPS@)
+verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_LDADD        = \
+	$(TOOL_LDADD_@VGCONF_PLATFORM_PRI_CAPS@)
+verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_LDFLAGS      = \
+	$(TOOL_LDFLAGS_@VGCONF_PLATFORM_PRI_CAPS@)
+verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_LINK = \
+	$(top_builddir)/coregrind/link_tool_exe_@VGCONF_OS@ \
+	@VALT_LOAD_ADDRESS_PRI@ \
+	$(LINK) \
+	$(verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_CFLAGS) \
+	$(verrou_@VGCONF_ARCH_PRI@_@VGCONF_OS@_LDFLAGS)
+
+
+
+
+
+if VGCONF_HAVE_PLATFORM_SEC
+verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_SOURCES      = \
+	$(VERROU_SOURCES_COMMON)
+verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_CPPFLAGS     = \
+	$(AM_CPPFLAGS_@VGCONF_PLATFORM_SEC_CAPS@)
+verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_CFLAGS       = \
+	$(AM_CFLAGS_@VGCONF_PLATFORM_SEC_CAPS@)
+verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_CXXFLAGS     = \
+	$(AM_CFLAGS_@VGCONF_PLATFORM_SEC_CAPS@)     \
+	-fno-exceptions -fno-rtti
+verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_DEPENDENCIES = \
+	$(TOOL_DEPENDENCIES_@VGCONF_PLATFORM_SEC_CAPS@)
+verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_LDADD        = \
+	$(TOOL_LDADD_@VGCONF_PLATFORM_SEC_CAPS@)    \
+	-lm -lc
+verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_LDFLAGS      = \
+	$(TOOL_LDFLAGS_@VGCONF_PLATFORM_SEC_CAPS@)
+verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_LINK = \
+	$(top_builddir)/coregrind/link_tool_exe_@VGCONF_OS@ \
+	@VALT_LOAD_ADDRESS_SEC@ \
+	$(LINK) \
+	$(verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_CFLAGS) \
+	$(verrou_@VGCONF_ARCH_SEC@_@VGCONF_OS@_LDFLAGS)
+endif
diff --git a/verrou/README.md b/verrou/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..2b8974a08aa28d889bacc3f4e2237868fd0877df
--- /dev/null
+++ b/verrou/README.md
@@ -0,0 +1,167 @@
+# Verrou
+
+[![Build Status](https://travis-ci.org/edf-hpc/verrou.svg?branch=master)](https://travis-ci.org/edf-hpc/verrou) 
+[![Documentation](https://img.shields.io/badge/docs-latest-blue.svg)](http://edf-hpc.github.io/verrou/vr-manual.html)
+
+Verrou helps you look for floating-point round-off errors in programs. It
+implements various forms of arithmetic, including:
+
+- all IEEE-754 standard rounding modes;
+
+- two variants of stochastic floating-point arithmetic based on random rounding:
+  all floating-point operations are perturbed by randomly switching rounding
+  modes. These can be seen as an asynchronous variant of the CESTAC method, or a
+  subset of Monte Carlo Arithmetic, performing only output randomization through
+  random rounding;
+
+- an emulation of single-precision rounding, in order to test the effect of
+  reduced precision without any need to change the source code.
+
+Verrou also comes with a `verrou_dd` utility, which simplifies the Verrou-based
+debugging process by implementing several variants of the Delta-Debugging
+algorithm. This allows easily locating which parts of the analyzed source code
+are likely to be responsible for Floating-Point-related instabilities.
+
+The documentation for Verrou is available as a dedicated [chapter in the
+Valgrind manual](http://edf-hpc.github.io/verrou/vr-manual.html).
+
+
+## Installation
+
+### Get the sources
+
+The preferred way to get Verrou sources is to download the latest *stable*
+version: [v2.2.0](https://github.com/edf-hpc/verrou/releases/latest).
+Older versions are available in the [releases](https://github.com/edf-hpc/verrou/releases)
+page. After downloading one of the released versions, skip to the "Configure
+and build" section below.
+
+<p>&nbsp;</p>
+
+In order to build the *development* version of Verrou, it is necesary to first
+download a specific Valgrind version, and patch it. Fetch valgrind's sources:
+
+    git clone --branch=24f63fd435e7bdab5058f2ab52953eb53a768010 --single-branch git://sourceware.org/git/valgrind.git valgrind-3.16.1+verrou-dev
+
+Add verrou's sources to it:
+
+    cd valgrind
+    git clone --branch=master --single-branch https://github.com/edf-hpc/verrou.git verrou
+
+    patch -p1 <verrou/valgrind.diff
+
+
+### Configure and build
+
+First, install all required dependencies (the names of relevant Debian packages
+are put in parentheses as examples):
+
+- C & C++ compilers (`build-essential`),
+- autoconf & automake (`automake`),
+- Python 3 (`python3`)
+- C standard library with debugging symbols (`libc6-dbg`).
+
+<p>&nbsp;</p>
+
+Configure valgrind:
+
+    ./autogen.sh
+    ./configure --enable-only64bit --enable-verrou-fma --prefix=PREFIX
+
+As stated above, it is recommended to use the `--enable-verrou-fma` flag if your
+system supports FMA (Fused Multiply-Add) instructions. Depending on your system,
+it may be required to set `CFLAGS` in order to enable the use of FMA in your
+compiler:
+
+    ./configure --enable-only64bit --enable-verrou-fma --prefix=PREFIX CFLAGS="-mfma"
+
+Systems that don't support FMA instructions can drop the `--enable-verrou-fma`
+configure switch, but be aware that this causes some tests to fail:
+
+    ./configure --enable-only64bit --prefix=PREFIX
+
+<p>&nbsp;</p>
+
+Build and install:
+
+    make
+    make install
+
+
+### Load the environment
+
+In order to actually use Verrou, you must load the correct environment. This can
+be done using:
+
+    source PREFIX/env.sh
+
+
+### Test (optional)
+
+#### General tests
+
+You can test the whole platform:
+
+    make check
+    perl tests/vg_regtest --all
+    
+or only verrou:
+
+    make -C tests check
+    make -C verrou check
+    perl tests/vg_regtest verrou
+    
+    
+#### Specific tests
+
+These tests are more closely related to the arithmetic part in Verrou:
+
+    make -C verrou/unitTest
+
+
+## Documentation
+
+The documentation for verrou is available as a
+[chapter in valgrind's manual](//edf-hpc.github.com/verrou/vr-manual.html).
+
+<p>&nbsp;</p>
+
+You can also re-build it:
+
+    make -C docs html-docs man-pages
+
+and browse it locally:
+
+    iceweasel docs/html/vr-manual.html
+
+
+Beware, this requires lots of tools which are not necessarily tested for in
+`configure`, including (but not necessarily limited to):
+
+  - xsltproc
+  - docbook-xsl
+
+
+## Bibliography & References
+
+The following papers explain in more details the internals of Verrou, as well as
+some of its applications. If you use Verrou for a research work, please consider
+citing one of these references:
+
+1. François Févotte and Bruno Lathuilière. Debugging and optimization of HPC
+   programs with the Verrou tool. In *International Workshop on Software
+   Correctness for HPC Applications (Correctness)*, Denver, CO, USA,
+   Nov. 2019. [DOI: 10.1109/Correctness49594.2019.00006](http://dx.doi.org/10.1109/Correctness49594.2019.00006)
+1. Hadrien Grasland, François Févotte, Bruno Lathuilière, and David
+   Chamont. Floating-point profiling of ACTS using Verrou. *EPJ Web Conf.*, 214, 2019.
+   [DOI: 10.1051/epjconf/201921405025](http://dx.doi.org/10.1051/epjconf/201921405025)
+1. François Févotte and Bruno Lathuilière. Studying the numerical quality of an
+   industrial computing code: A case study on code_aster. In *10th International
+   Workshop on Numerical Software Verification (NSV)*, pages 61--80, Heidelberg,
+   Germany,
+   July 2017. [DOI: 10.1007/978-3-319-63501-9_5](http://dx.doi.org/10.1007/978-3-319-63501-9_5)
+1. François Févotte and Bruno Lathuilière. VERROU: a CESTAC evaluation without
+   recompilation. In *International Symposium on Scientific Computing, Computer
+   Arithmetics and Verified Numerics (SCAN)*, Uppsala, Sweden, September 2016.
+
+(These references are also available in [bibtex format](verrou.bib))
diff --git a/verrou/backend_checkcancellation/.dirstamp b/verrou/backend_checkcancellation/.dirstamp
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/verrou/backend_checkcancellation/interflop_checkcancellation.cxx b/verrou/backend_checkcancellation/interflop_checkcancellation.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..dd83e624adc1f7eab2cca67a612c78b2317e6e41
--- /dev/null
+++ b/verrou/backend_checkcancellation/interflop_checkcancellation.cxx
@@ -0,0 +1,151 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Interface for floating-point operations overloading.         ---*/
+/*---                                                 vr_fpOps.cxx ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#include "interflop_checkcancellation.h"
+#include "vr_fpRepr.hxx"
+#include <stddef.h>
+
+
+checkcancellation_conf_t checkcancellation_conf;
+
+template <typename REAL>
+void ifcc_checkCancellation (const REAL & a, const REAL & b, const REAL & r);
+
+
+// * Global variables & parameters
+
+void (*ifcc_cancellationHandler)(int)=NULL;
+void (*ifcc_panicHandler)(const char*)=NULL;
+
+void checkcancellation_set_cancellation_handler(void (*cancellationHandler)(int)){
+  ifcc_cancellationHandler=cancellationHandler;
+}
+
+void checkcancellation_set_panic_handler(void (*panicHandler)(const char*)){
+  ifcc_panicHandler=panicHandler;
+}
+
+template<typename REAL>
+int ifcc_threshold(const REAL& a);
+
+template<>
+int ifcc_threshold(const float& a){
+  return checkcancellation_conf.threshold_float;
+}
+template<>
+int ifcc_threshold(const double& a){
+  return checkcancellation_conf.threshold_double;
+}
+
+
+template <typename REAL>
+inline
+void ifcc_checkCancellation (const REAL & a, const REAL & b, const REAL & r) {
+
+  const int ea = exponentField (a);
+  const int eb = exponentField (b);
+  const int er = exponentField (r);
+
+  const int emax = ea>eb ? ea : eb;
+  const int cancelled = emax - er;
+
+  if (cancelled >= ifcc_threshold(a)) {
+    ifcc_cancellationHandler(cancelled);
+  }
+}
+
+
+
+// * C interface
+void IFCC_FCTNAME(configure)(checkcancellation_conf_t mode, void* context) {
+  checkcancellation_conf=mode;
+}
+
+void IFCC_FCTNAME(finalyze)(void* context){
+}
+
+const char* IFCC_FCTNAME(get_backend_name)() {
+  return "checkcancellation";
+}
+
+const char* IFCC_FCTNAME(get_backend_version)() {
+  return "1.x-dev";
+}
+
+void IFCC_FCTNAME(add_double) (double a, double b, double* res,void* context) {
+  ifcc_checkCancellation(a,b,*res);
+}
+
+void IFCC_FCTNAME(add_float) (float a, float b, float* res,void* context) {
+  ifcc_checkCancellation(a,b,*res);
+}
+
+void IFCC_FCTNAME(sub_double) (double a, double b, double* res,void* context) {
+  ifcc_checkCancellation(a,b,*res);
+}
+
+void IFCC_FCTNAME(sub_float) (float a, float b, float* res,void* context) {
+  ifcc_checkCancellation(a,b,*res);
+}
+
+
+void IFCC_FCTNAME(madd_double) (double a, double b, double c, double* res, void* context){
+  ifcc_checkCancellation(a*b,c,*res);
+}
+
+void IFCC_FCTNAME(madd_float) (float a, float b, float c, float* res, void* context){
+  ifcc_checkCancellation(a*b,c,*res);
+}
+
+
+
+
+struct interflop_backend_interface_t IFCC_FCTNAME(init)(void ** context){
+  struct interflop_backend_interface_t config;
+
+  config.interflop_add_float = & IFCC_FCTNAME(add_float);
+  config.interflop_sub_float = & IFCC_FCTNAME(sub_float);
+  config.interflop_mul_float = NULL;
+  config.interflop_div_float = NULL;
+
+  config.interflop_add_double = & IFCC_FCTNAME(add_double);
+  config.interflop_sub_double = & IFCC_FCTNAME(sub_double);
+  config.interflop_mul_double = NULL;
+  config.interflop_div_double = NULL;
+
+  config.interflop_cast_double_to_float=NULL;
+
+  config.interflop_madd_float = & IFCC_FCTNAME(madd_float);
+  config.interflop_madd_double =& IFCC_FCTNAME(madd_double);
+
+  return config;
+}
diff --git a/verrou/backend_checkcancellation/interflop_checkcancellation.h b/verrou/backend_checkcancellation/interflop_checkcancellation.h
new file mode 100644
index 0000000000000000000000000000000000000000..0aa96a891cd67be4116cf12e143b58905396a861
--- /dev/null
+++ b/verrou/backend_checkcancellation/interflop_checkcancellation.h
@@ -0,0 +1,85 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Interface for floatin-point operations overloading.          ---*/
+/*---                                                   vr_fpops.h ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#ifndef __INTERFLOP_CHECKCANCELLATION_H
+#define __INTERFLOP_CHECKCANCELLATION_H
+
+//#define DEBUG_PRINT_OP
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#define IFCC_FCTNAME(FCT) interflop_checkcancellation_##FCT
+
+#include "../interflop_backend_interface.h"
+
+   struct checkcancellation_conf {
+     unsigned int threshold_float;
+     unsigned int threshold_double;
+  };
+
+  typedef struct checkcancellation_conf checkcancellation_conf_t;
+
+
+  void IFCC_FCTNAME(configure)(checkcancellation_conf_t mode,void* context);
+  void IFCC_FCTNAME(finalyze)(void* context);
+
+  const char* IFCC_FCTNAME(get_backend_name)(void);
+  const char* IFCC_FCTNAME(get_backend_version)(void);
+
+  void checkcancellation_set_cancellation_handler(void (*)(int));
+
+  extern void (*vr_panicHandler)(const char*);
+  void checkcancellation_set_panic_handler(void (*)(const char*));
+
+  struct interflop_backend_interface_t IFCC_FCTNAME(init)(void ** context);
+   
+  void IFCC_FCTNAME(add_double) (double a, double b, double* res, void* context);    
+  void IFCC_FCTNAME(add_float)  (float a,  float b,  float*  res, void* context);
+  void IFCC_FCTNAME(sub_double) (double a, double b, double* res, void* context);
+  void IFCC_FCTNAME(sub_float)  (float a,  float b,  float*  res, void* context);
+   //void IFCC_FCTNAME(mul_double) (double a, double b, double* res, void* context);
+   //void IFCC_FCTNAME(mul_float)  (float a,  float b,  float*  res, void* context);
+   //void IFCC_FCTNAME(div_double) (double a, double b, double* res, void* context);
+   //void IFCC_FCTNAME(div_float)  (float a,  float b,  float*  res, void* context);
+
+   //void IFCC_FCTNAME(cast_double_to_float) (double a, float* b, void* context);
+
+  void IFCC_FCTNAME(madd_double)(double a, double b, double c, double* res, void* context);
+  void IFCC_FCTNAME(madd_float) (float a,  float b,  float c,  float*  res, void* context);
+
+  
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* ndef __INTERFLOP_CHECKCANCELLATION_H */
diff --git a/verrou/backend_checkcancellation/test_main.cxx b/verrou/backend_checkcancellation/test_main.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..dd8e83e9bd5965c790872e3083291afa2462cf77
--- /dev/null
+++ b/verrou/backend_checkcancellation/test_main.cxx
@@ -0,0 +1,43 @@
+#include "interflop_checkcancellation.h"
+#include <stdio.h>
+#include <iostream>
+#include <iomanip>
+
+
+
+void printCancellation(int range){
+  std::cout << "Cancellation deteted : "<<range << std::endl;
+}
+
+int main(int argc, char** argv){
+
+  void* context;
+  struct interflop_backend_interface_t ifcheckcancellation=interflop_checkcancellation_init(&context);
+
+  //  interflop_checkcancellation_configure(VR_NEAREST, context);
+  checkcancellation_conf conf;
+  conf.threshold_float=15;
+  conf.threshold_double=40;
+  interflop_checkcancellation_configure(conf, context);
+
+  checkcancellation_set_cancellation_handler(&printCancellation);
+
+  double a=0.1000000000001;
+  double b=0.10;
+  double c=a-b;
+
+  float af=0.101;
+  float bf=0.10;
+  float cf=af-bf;
+
+
+  interflop_checkcancellation_sub_double(a,b,&c,context);
+  interflop_checkcancellation_sub_float(af,bf,&cf,context);
+
+  std::cout << std::setprecision(16);
+  std::cout << "c: "<<c << std::endl;
+
+  interflop_checkcancellation_finalyze(context);
+
+  return 0;
+}
diff --git a/verrou/backend_checkcancellation/vr_fpRepr.hxx b/verrou/backend_checkcancellation/vr_fpRepr.hxx
new file mode 100644
index 0000000000000000000000000000000000000000..1a695d1d3d5445ae3de02dd5a1d8e52def8e7578
--- /dev/null
+++ b/verrou/backend_checkcancellation/vr_fpRepr.hxx
@@ -0,0 +1,210 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Utilities for easier manipulation of floating-point values.  ---*/
+/*---                                                vr_fpRepr.hxx ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#pragma once
+//#include <string>
+//#include <sstream>
+#include <math.h>
+#include <cfloat>
+#include <stdint.h>
+#include <limits>
+#ifdef VALGRIND_DEBUG_VERROU
+extern "C" {
+#include "pub_tool_libcprint.h"
+#include "pub_tool_libcassert.h"
+}
+#endif
+
+//#include "interflop_verrou.h"
+
+
+// * Real types storage
+
+// ** Internal functions
+
+// IEEE754-like binary floating point number representation:
+//
+// Real:     corresponding C type (float/double)
+// BitField: type of the corresponding bit field
+// SIGN:     number of sign bits
+// EXP:      number of exponent bits
+// MANT:     number of mantissa (aka fraction) bits
+template <typename Real, typename BitField, int SIGN, int EXP, int MANT>
+class FPRepr {
+
+public:
+  typedef Real RealType;
+
+  // Integer value of the exponent field of the given real
+  //
+  // Warning: this value is shifted. The real exponent of x is:
+  //    exponentField(x) - exponentShift()
+  //
+  // x: floating point value
+  static inline int exponentField (const Real & x) {
+    const BitField *xx = (const BitField*)(&x);
+    return bitrange<MANT, EXP> (xx);
+  }
+
+  // Smallest floating point increment for a given value.
+  //
+  // x: floating point value around which to compute the ulp
+  static Real ulp (const Real & x) {
+    const int exponent = exponentField(x);
+
+    Real ret = 0;
+    BitField & ulp = *((BitField*)&ret);
+    int exponentULP = exponent-MANT;
+
+    if (exponentULP < 0) {
+      // ULP is a subnormal number:
+      //    exp  = 0
+      //    mant = 1 in the last place
+      exponentULP = 0;
+      ulp += 1;
+    }
+
+    ulp += ((BitField)exponentULP) << MANT;
+
+    return ret;
+  }
+
+  static int sign(const Real& x){
+    const BitField *xx = (BitField*)(&x);
+    const int sign = bitrange<MANT+EXP, SIGN> (xx);
+    return sign;
+  }
+
+
+  static inline void pp (const Real & x) {
+    //    std::ostringstream oss;
+
+    const BitField *xx = (BitField*)(&x);
+    const int sign = bitrange<MANT+EXP, SIGN> (xx);
+    const int expField = exponentField(x);
+    BitField mantissa = bitrange<0, MANT> (xx);
+    int exponent = expField-exponentShift();
+
+    if (expField == 0) {
+      // Subnormal floating-point number
+      exponent += 1;
+    } else {
+      // Normal floating-point number
+      mantissa += ((BitField)1<<MANT);
+    }
+
+    //    oss << (sign==0?" ":"-") << mantissa << " * 2**" << exponent;
+#ifdef VALGRIND_DEBUG_VERROU
+    VG_(printf)( (sign==0?" ":"-"));
+    VG_(printf)("%lu",mantissa);
+    VG_(printf)(" * 2**%d  ", exponent);
+#endif
+  //    return oss.str();
+  }
+
+
+
+
+  static inline int storedBits () {
+    return MANT;
+  }
+
+private:
+  static inline int exponentShift () {
+    return (1 << (EXP-1)) - 1 + MANT;
+  }
+
+  // Return a range in a bit field.
+  //
+  // BitField: type of the bit field
+  // begin:    index of the first interesting bit
+  // size:     number of desired bits
+  // x:        pointer to the bit field
+  template <int BEGIN, int SIZE>
+  static inline BitField bitrange (BitField const*const x) {
+    BitField ret = *x;
+
+    const int leftShift = 8*sizeof(BitField)-BEGIN-SIZE;
+    if (leftShift > 0)
+      ret = ret << leftShift;
+
+    const int rightShift = BEGIN + leftShift;
+    if (rightShift > 0)
+      ret = ret >> rightShift;
+
+    return ret;
+  }
+
+
+};
+
+
+// ** Interface for simple & double precision FP numbers
+
+template <typename Real> struct FPType;
+
+template <> struct FPType<float> {
+  typedef FPRepr<float, uint32_t, 1,  8, 23>  Repr;
+};
+
+template <> struct FPType<double> {
+  typedef FPRepr<double, uint64_t, 1, 11, 52>  Repr;
+};
+
+// Smallest floating point increments for IEE754 binary formats
+template <typename Real> Real ulp (const Real & x) {
+  return FPType<Real>::Repr::ulp (x);
+}
+
+// Pretty-print representation
+/*
+template <typename Real> std::string ppReal (const Real & x) {
+  return FPType<Real>::Repr::pp (x);
+  }*/
+
+// Exponent field
+template <typename Real> int exponentField (const Real & x) {
+  return FPType<Real>::Repr::exponentField (x);
+}
+
+// Number of significant bits
+template <typename Real> int storedBits (const Real & x) {
+  return FPType<Real>::Repr::storedBits();
+}
+
+// sign
+template <typename Real> int sign (const Real & x) {
+  return FPType<Real>::Repr::sign(x);
+}
+
+
+
diff --git a/verrou/backend_mcaquad/common/fmaqApprox.c b/verrou/backend_mcaquad/common/fmaqApprox.c
new file mode 100644
index 0000000000000000000000000000000000000000..55c6ce6323885d2755ecd4d46e23a6daad41ee00
--- /dev/null
+++ b/verrou/backend_mcaquad/common/fmaqApprox.c
@@ -0,0 +1,315 @@
+/* 
+   File come from gcc libquadmath :
+wget https://raw.githubusercontent.com/gcc-mirror/gcc/529ebc2a706c4223dc8068a32a195b6c400d1f2d/libquadmath/math/fmaq.c
+
+Modifications :
+change name : fmaq -> fmaqApprox as we do necessary have access to fenv
+              comment->fenv and related operations
+
+*/
+/* Compute x * y + z as ternary operation.
+   Copyright (C) 2010-2018 Free Software Foundation, Inc.
+   This file is part of the GNU C Library.
+   Contributed by Jakub Jelinek <jakub@redhat.com>, 2010.
+
+   The GNU C Library is free software; you can redistribute it and/or
+   modify it under the terms of the GNU Lesser General Public
+   License as published by the Free Software Foundation; either
+   version 2.1 of the License, or (at your option) any later version.
+
+   The GNU C Library is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   Lesser General Public License for more details.
+
+   You should have received a copy of the GNU Lesser General Public
+   License along with the GNU C Library; if not, see
+   <http://www.gnu.org/licenses/>.  */
+
+#include "fmaqApprox.h"
+#undef HAVE_FENV_H
+#include "quadmath-imp.h"
+
+/* This implementation uses rounding to odd to avoid problems with
+   double rounding.  See a paper by Boldo and Melquiond:
+   http://www.lri.fr/~melquion/doc/08-tc.pdf  */
+
+__float128
+fmaqApprox (__float128 x, __float128 y, __float128 z)
+{
+  ieee854_float128 u, v, w;
+  int adjust = 0;
+  u.value = x;
+  v.value = y;
+  w.value = z;
+  if (__builtin_expect (u.ieee.exponent + v.ieee.exponent
+			>= 0x7fff + IEEE854_FLOAT128_BIAS
+			   - FLT128_MANT_DIG, 0)
+      || __builtin_expect (u.ieee.exponent >= 0x7fff - FLT128_MANT_DIG, 0)
+      || __builtin_expect (v.ieee.exponent >= 0x7fff - FLT128_MANT_DIG, 0)
+      || __builtin_expect (w.ieee.exponent >= 0x7fff - FLT128_MANT_DIG, 0)
+      || __builtin_expect (u.ieee.exponent + v.ieee.exponent
+			   <= IEEE854_FLOAT128_BIAS + FLT128_MANT_DIG, 0))
+    {
+      /* If z is Inf, but x and y are finite, the result should be
+	 z rather than NaN.  */
+      if (w.ieee.exponent == 0x7fff
+	  && u.ieee.exponent != 0x7fff
+          && v.ieee.exponent != 0x7fff)
+	return (z + x) + y;
+      /* If z is zero and x are y are nonzero, compute the result
+	 as x * y to avoid the wrong sign of a zero result if x * y
+	 underflows to 0.  */
+      if (z == 0 && x != 0 && y != 0)
+	return x * y;
+      /* If x or y or z is Inf/NaN, or if x * y is zero, compute as
+	 x * y + z.  */
+      if (u.ieee.exponent == 0x7fff
+	  || v.ieee.exponent == 0x7fff
+	  || w.ieee.exponent == 0x7fff
+	  || x == 0
+	  || y == 0)
+	return x * y + z;
+      /* If fma will certainly overflow, compute as x * y.  */
+      if (u.ieee.exponent + v.ieee.exponent
+	  > 0x7fff + IEEE854_FLOAT128_BIAS)
+	return x * y;
+      /* If x * y is less than 1/4 of FLT128_TRUE_MIN, neither the
+	 result nor whether there is underflow depends on its exact
+	 value, only on its sign.  */
+      if (u.ieee.exponent + v.ieee.exponent
+	  < IEEE854_FLOAT128_BIAS - FLT128_MANT_DIG - 2)
+	{
+	  int neg = u.ieee.negative ^ v.ieee.negative;
+	  __float128 tiny = neg ? -0x1p-16494Q : 0x1p-16494Q;
+	  if (w.ieee.exponent >= 3)
+	    return tiny + z;
+	  /* Scaling up, adding TINY and scaling down produces the
+	     correct result, because in round-to-nearest mode adding
+	     TINY has no effect and in other modes double rounding is
+	     harmless.  But it may not produce required underflow
+	     exceptions.  */
+	  v.value = z * 0x1p114Q + tiny;
+	  if (TININESS_AFTER_ROUNDING
+	      ? v.ieee.exponent < 115
+	      : (w.ieee.exponent == 0
+		 || (w.ieee.exponent == 1
+		     && w.ieee.negative != neg
+		     && w.ieee.mantissa3 == 0
+		     && w.ieee.mantissa2 == 0
+		     && w.ieee.mantissa1 == 0
+		     && w.ieee.mantissa0 == 0)))
+	    {
+	      __float128 force_underflow = x * y;
+	      math_force_eval (force_underflow);
+	    }
+	  return v.value * 0x1p-114Q;
+	}
+      if (u.ieee.exponent + v.ieee.exponent
+	  >= 0x7fff + IEEE854_FLOAT128_BIAS - FLT128_MANT_DIG)
+	{
+	  /* Compute 1p-113 times smaller result and multiply
+	     at the end.  */
+	  if (u.ieee.exponent > v.ieee.exponent)
+	    u.ieee.exponent -= FLT128_MANT_DIG;
+	  else
+	    v.ieee.exponent -= FLT128_MANT_DIG;
+	  /* If x + y exponent is very large and z exponent is very small,
+	     it doesn't matter if we don't adjust it.  */
+	  if (w.ieee.exponent > FLT128_MANT_DIG)
+	    w.ieee.exponent -= FLT128_MANT_DIG;
+	  adjust = 1;
+	}
+      else if (w.ieee.exponent >= 0x7fff - FLT128_MANT_DIG)
+	{
+	  /* Similarly.
+	     If z exponent is very large and x and y exponents are
+	     very small, adjust them up to avoid spurious underflows,
+	     rather than down.  */
+	  if (u.ieee.exponent + v.ieee.exponent
+	      <= IEEE854_FLOAT128_BIAS + 2 * FLT128_MANT_DIG)
+	    {
+	      if (u.ieee.exponent > v.ieee.exponent)
+		u.ieee.exponent += 2 * FLT128_MANT_DIG + 2;
+	      else
+		v.ieee.exponent += 2 * FLT128_MANT_DIG + 2;
+	    }
+	  else if (u.ieee.exponent > v.ieee.exponent)
+	    {
+	      if (u.ieee.exponent > FLT128_MANT_DIG)
+		u.ieee.exponent -= FLT128_MANT_DIG;
+	    }
+	  else if (v.ieee.exponent > FLT128_MANT_DIG)
+	    v.ieee.exponent -= FLT128_MANT_DIG;
+	  w.ieee.exponent -= FLT128_MANT_DIG;
+	  adjust = 1;
+	}
+      else if (u.ieee.exponent >= 0x7fff - FLT128_MANT_DIG)
+	{
+	  u.ieee.exponent -= FLT128_MANT_DIG;
+	  if (v.ieee.exponent)
+	    v.ieee.exponent += FLT128_MANT_DIG;
+	  else
+	    v.value *= 0x1p113Q;
+	}
+      else if (v.ieee.exponent >= 0x7fff - FLT128_MANT_DIG)
+	{
+	  v.ieee.exponent -= FLT128_MANT_DIG;
+	  if (u.ieee.exponent)
+	    u.ieee.exponent += FLT128_MANT_DIG;
+	  else
+	    u.value *= 0x1p113Q;
+	}
+      else /* if (u.ieee.exponent + v.ieee.exponent
+		  <= IEEE854_FLOAT128_BIAS + FLT128_MANT_DIG) */
+	{
+	  if (u.ieee.exponent > v.ieee.exponent)
+	    u.ieee.exponent += 2 * FLT128_MANT_DIG + 2;
+	  else
+	    v.ieee.exponent += 2 * FLT128_MANT_DIG + 2;
+	  if (w.ieee.exponent <= 4 * FLT128_MANT_DIG + 6)
+	    {
+	      if (w.ieee.exponent)
+		w.ieee.exponent += 2 * FLT128_MANT_DIG + 2;
+	      else
+		w.value *= 0x1p228Q;
+	      adjust = -1;
+	    }
+	  /* Otherwise x * y should just affect inexact
+	     and nothing else.  */
+	}
+      x = u.value;
+      y = v.value;
+      z = w.value;
+    }
+
+  /* Ensure correct sign of exact 0 + 0.  */
+  if (__glibc_unlikely ((x == 0 || y == 0) && z == 0))
+    {
+       x = math_opt_barrier (x);
+      return x * y + z;
+    }
+
+  fenv_t env;
+  feholdexcept (&env);
+  fesetround (FE_TONEAREST);
+
+  /* Multiplication m1 + m2 = x * y using Dekker's algorithm.  */
+#define C ((1LL << (FLT128_MANT_DIG + 1) / 2) + 1)
+  __float128 x1 = x * C;
+  __float128 y1 = y * C;
+  __float128 m1 = x * y;
+  x1 = (x - x1) + x1;
+  y1 = (y - y1) + y1;
+  __float128 x2 = x - x1;
+  __float128 y2 = y - y1;
+  __float128 m2 = (((x1 * y1 - m1) + x1 * y2) + x2 * y1) + x2 * y2;
+
+  /* Addition a1 + a2 = z + m1 using Knuth's algorithm.  */
+  __float128 a1 = z + m1;
+  __float128 t1 = a1 - z;
+  __float128 t2 = a1 - t1;
+  t1 = m1 - t1;
+  t2 = z - t2;
+  __float128 a2 = t1 + t2;
+  /* Ensure the arithmetic is not scheduled after feclearexcept call.  */
+  math_force_eval (m2);
+  math_force_eval (a2);
+  feclearexcept (FE_INEXACT);
+
+  /* If the result is an exact zero, ensure it has the correct sign.  */
+  if (a1 == 0 && m2 == 0)
+    {
+      feupdateenv (&env);
+      /* Ensure that round-to-nearest value of z + m1 is not reused.  */
+      z = math_opt_barrier (z);
+      return z + m1;
+    }
+
+  fesetround (FE_TOWARDZERO);
+  /* Perform m2 + a2 addition with round to odd.  */
+  u.value = a2 + m2;
+
+  if (__glibc_likely (adjust == 0))
+    {
+      if ((u.ieee.mantissa3 & 1) == 0 && u.ieee.exponent != 0x7fff)
+	u.ieee.mantissa3 |= fetestexcept (FE_INEXACT) != 0;
+      feupdateenv (&env);
+      /* Result is a1 + u.value.  */
+      return a1 + u.value;
+    }
+  else if (__glibc_likely (adjust > 0))
+    {
+      if ((u.ieee.mantissa3 & 1) == 0 && u.ieee.exponent != 0x7fff)
+	u.ieee.mantissa3 |= fetestexcept (FE_INEXACT) != 0;
+      feupdateenv (&env);
+      /* Result is a1 + u.value, scaled up.  */
+      return (a1 + u.value) * 0x1p113Q;
+    }
+  else
+    {
+      if ((u.ieee.mantissa3 & 1) == 0)
+	u.ieee.mantissa3 |= fetestexcept (FE_INEXACT) != 0;
+      v.value = a1 + u.value;
+      /* Ensure the addition is not scheduled after fetestexcept call.  */
+      math_force_eval (v.value);
+      int j = fetestexcept (FE_INEXACT) != 0;
+
+      feupdateenv (&env);
+      /* Ensure the following computations are performed in default rounding
+	 mode instead of just reusing the round to zero computation.  */
+      __asm __volatile__ ("" : "=m" (u) : "m" (u));
+      /* If a1 + u.value is exact, the only rounding happens during
+	 scaling down.  */
+      if (j == 0)
+	return v.value * 0x1p-228Q;
+      /* If result rounded to zero is not subnormal, no double
+	 rounding will occur.  */
+      if (v.ieee.exponent > 228)
+	return (a1 + u.value) * 0x1p-228Q;
+      /* If v.value * 0x1p-228L with round to zero is a subnormal above
+	 or equal to FLT128_MIN / 2, then v.value * 0x1p-228L shifts mantissa
+	 down just by 1 bit, which means v.ieee.mantissa3 |= j would
+	 change the round bit, not sticky or guard bit.
+	 v.value * 0x1p-228L never normalizes by shifting up,
+	 so round bit plus sticky bit should be already enough
+	 for proper rounding.  */
+      if (v.ieee.exponent == 228)
+	{
+	  /* If the exponent would be in the normal range when
+	     rounding to normal precision with unbounded exponent
+	     range, the exact result is known and spurious underflows
+	     must be avoided on systems detecting tininess after
+	     rounding.  */
+	  if (TININESS_AFTER_ROUNDING)
+	    {
+	      w.value = a1 + u.value;
+	      if (w.ieee.exponent == 229)
+		return w.value * 0x1p-228Q;
+	    }
+	  /* v.ieee.mantissa3 & 2 is LSB bit of the result before rounding,
+	     v.ieee.mantissa3 & 1 is the round bit and j is our sticky
+	     bit.  */
+	  w.value = 0;
+	  w.ieee.mantissa3 = ((v.ieee.mantissa3 & 3) << 1) | j;
+	  w.ieee.negative = v.ieee.negative;
+	  v.ieee.mantissa3 &= ~3U;
+	  v.value *= 0x1p-228Q;
+	  w.value *= 0x1p-2Q;
+	  return v.value + w.value;
+	}
+      v.ieee.mantissa3 |= j;
+      return v.value * 0x1p-228Q;
+    }
+}
+
+
+double
+fmaApprox (double x, double y, double z){
+   __float128 x128=x;
+   __float128 y128=y;
+   __float128 z128=z;
+
+   __float128 res=fmaqApprox(x128, y128, z128);
+   return (double) res;
+}
diff --git a/verrou/backend_mcaquad/common/fmaqApprox.h b/verrou/backend_mcaquad/common/fmaqApprox.h
new file mode 100644
index 0000000000000000000000000000000000000000..d87beb2695da00b8a52a36415706fa40f4c71ffa
--- /dev/null
+++ b/verrou/backend_mcaquad/common/fmaqApprox.h
@@ -0,0 +1,7 @@
+#pragma once
+
+__float128
+fmaqApprox (__float128 x, __float128 y, __float128 z);
+
+double
+fmaApprox (double x, double y, double z);
diff --git a/verrou/backend_mcaquad/common/mca_const.h b/verrou/backend_mcaquad/common/mca_const.h
new file mode 100644
index 0000000000000000000000000000000000000000..8dfd21b2e455f982a889655f00710a9308d1554f
--- /dev/null
+++ b/verrou/backend_mcaquad/common/mca_const.h
@@ -0,0 +1,72 @@
+//Round to nearest using cast
+//Works for standard type, aka double to float, if the ieee rounding flag is set to nearest
+//WARNING: For quad to double we notice that the behavior is always round toward zero
+#define NEAREST_FLOAT(x)	((float) (x))
+#define	NEAREST_DOUBLE(x)	((double) (x))
+
+
+//Quad precision exponent encoding size
+#define QUAD_EXP_SIZE      15
+//Quad precision pseudo mantissa encoding size 
+#define QUAD_PMAN_SIZE  112
+//Quad precision pseudo mantissa encoding size in the word containing the 64 msb
+#define QUAD_HX_PMAN_SIZE  48
+//Quad precison pseudo mantissa encoding size in the word containing the 64 lsb
+#define QUAD_LX_PMAN_SIZE  64
+//Quad precison mantissa size
+#define QUAD_PREC          113
+//Quad precison exponent complement
+#define QUAD_EXP_COMP      16383
+//Quad precison max exponent
+#define QUAD_EXP_MAX       16383
+//Quad precison min exponent
+#define QUAD_EXP_MIN       16382
+//Quad precison mask to remove the sign bit
+#define QUAD_HX_ERASE_SIGN 0x7fffffffffffffffULL
+//Quad precison 64 msb to encode plus infinity
+#define QINF_hx            0x7fff000000000000ULL
+//Quad precison 64 msb to encode minus infinity
+#define QMINF_hx            0x7fff000000000000ULL
+//Quad precison 64 lsb to encode plus infinity
+#define QINF_lx            0x0000000000000000ULL
+//Quad precison 64 lsb to encode minus infinity
+#define QMINF_lx            0x0000000000000000ULL
+//Quad precision pseudo mantissa msb set to one
+#define QUAD_HX_PMAN_MSB   0x0000800000000000ULL
+
+//Double precision expoenent encoding size
+#define DOUBLE_EXP_SIZE    11
+//Double precision pseudo-mantissa encoding size
+#define DOUBLE_PMAN_SIZE   52
+//Double precision mantissa size 
+#define DOUBLE_PREC        53
+//Double precison exponent complement
+#define DOUBLE_EXP_COMP    1023
+//Double precison max exponent
+#define DOUBLE_EXP_MAX     1023
+//Double precison min exponent
+#define DOUBLE_EXP_MIN     1022
+//Double precision plus infinity encoding
+#define DOUBLE_PLUS_INF    0x7FF0000000000000ULL
+//Double precision pseudo matissa msb set to one
+#define DOUBLE_PMAN_MSB    0x0008000000000000ULL
+//Double precision mask to erase sign bit
+#define DOUBLE_ERASE_SIGN  0x7fffffffffffffffULL
+//Double precision mask to extract sign bit
+#define DOUBLE_GET_SIGN    0x8000000000000000ULL
+//Double precision mask to extract the pseudo mantissa
+#define DOUBLE_GET_PMAN    0x000fffffffffffffULL
+
+//Single precision exponent encoding size
+#define FLOAT_EXP_SIZE     8
+//Single precision pseudo mantisa encoding size
+#define FLOAT_PMAN_SIZE    23
+//single precision mantissa size
+#define FLOAT_PREC         24
+
+//Sign encoding size
+#define SIGN_SIZE          1
+//64bit word with msb set to 1
+#define WORD64_MSB         0x8000000000000000ULL
+
+
diff --git a/verrou/backend_mcaquad/common/quadmath-imp.h b/verrou/backend_mcaquad/common/quadmath-imp.h
new file mode 100644
index 0000000000000000000000000000000000000000..7eff8a5391f5fdf46cc9d2adee23c77bb947bac9
--- /dev/null
+++ b/verrou/backend_mcaquad/common/quadmath-imp.h
@@ -0,0 +1,354 @@
+/*
+This file come from gcc libquadmath :
+wget https://raw.githubusercontent.com/gcc-mirror/gcc/529ebc2a706c4223dc8068a32a195b6c400d1f2d/libquadmath/quadmath-imp.h
+
+Modification :
+- comment config.h include
+
+*/
+
+/* GCC Quad-Precision Math Library
+   Copyright (C) 2010, 2011 Free Software Foundation, Inc.
+   Written by Francois-Xavier Coudert  <fxcoudert@gcc.gnu.org>
+
+This file is part of the libquadmath library.
+Libquadmath is free software; you can redistribute it and/or
+modify it under the terms of the GNU Library General Public
+License as published by the Free Software Foundation; either
+version 2 of the License, or (at your option) any later version.
+
+Libquadmath is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Library General Public License for more details.
+
+You should have received a copy of the GNU Library General Public
+License along with libquadmath; see the file COPYING.LIB.  If
+not, write to the Free Software Foundation, Inc., 51 Franklin Street - Fifth Floor,
+Boston, MA 02110-1301, USA.  */
+
+#ifndef QUADMATH_IMP_H
+#define QUADMATH_IMP_H
+
+#include <errno.h>
+#include <limits.h>
+#include <stdbool.h>
+#include <stdint.h>
+#include <stdlib.h>
+#include "quadmath.h"
+//#include "config.h"
+#ifdef HAVE_FENV_H
+# include <fenv.h>
+#endif
+
+
+/* Under IEEE 754, an architecture may determine tininess of
+   floating-point results either "before rounding" or "after
+   rounding", but must do so in the same way for all operations
+   returning binary results.  Define TININESS_AFTER_ROUNDING to 1 for
+   "after rounding" architectures, 0 for "before rounding"
+   architectures.  */
+
+#define TININESS_AFTER_ROUNDING   1
+
+#define HIGH_ORDER_BIT_IS_SET_FOR_SNAN 0
+
+#define FIX_FLT128_LONG_CONVERT_OVERFLOW 0
+#define FIX_FLT128_LLONG_CONVERT_OVERFLOW 0
+
+/* Prototypes for internal functions.  */
+extern int32_t __quadmath_rem_pio2q (__float128, __float128 *);
+extern void __quadmath_kernel_sincosq (__float128, __float128, __float128 *,
+				       __float128 *, int);
+extern __float128 __quadmath_kernel_sinq (__float128, __float128, int);
+extern __float128 __quadmath_kernel_cosq (__float128, __float128);
+extern __float128 __quadmath_kernel_tanq (__float128, __float128, int);
+extern __float128 __quadmath_gamma_productq (__float128, __float128, int,
+					     __float128 *);
+extern __float128 __quadmath_gammaq_r (__float128, int *);
+extern __float128 __quadmath_lgamma_negq (__float128, int *);
+extern __float128 __quadmath_lgamma_productq (__float128, __float128,
+					      __float128, int);
+extern __float128 __quadmath_lgammaq_r (__float128, int *);
+extern __float128 __quadmath_x2y2m1q (__float128 x, __float128 y);
+extern __complex128 __quadmath_kernel_casinhq (__complex128, int);
+
+static inline void
+mul_splitq (__float128 *hi, __float128 *lo, __float128 x, __float128 y)
+{
+  /* Fast built-in fused multiply-add.  */
+  *hi = x * y;
+  *lo = fmaq (x, y, -*hi);
+}
+
+
+
+
+/* Frankly, if you have __float128, you have 64-bit integers, right?  */
+#ifndef UINT64_C
+# error "No way!"
+#endif
+
+
+/* Main union type we use to manipulate the floating-point type.  */
+typedef union
+{
+  __float128 value;
+
+  struct
+#ifdef __MINGW32__
+  /* On mingw targets the ms-bitfields option is active by default.
+     Therefore enforce gnu-bitfield style.  */
+  __attribute__ ((gcc_struct))
+#endif
+  {
+#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+    unsigned negative:1;
+    unsigned exponent:15;
+    unsigned mantissa0:16;
+    unsigned mantissa1:32;
+    unsigned mantissa2:32;
+    unsigned mantissa3:32;
+#else
+    unsigned mantissa3:32;
+    unsigned mantissa2:32;
+    unsigned mantissa1:32;
+    unsigned mantissa0:16;
+    unsigned exponent:15;
+    unsigned negative:1;
+#endif
+  } ieee;
+
+  struct
+  {
+#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+    uint64_t high;
+    uint64_t low;
+#else
+    uint64_t low;
+    uint64_t high;
+#endif
+  } words64;
+
+  struct
+  {
+#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+    uint32_t w0;
+    uint32_t w1;
+    uint32_t w2;
+    uint32_t w3;
+#else
+    uint32_t w3;
+    uint32_t w2;
+    uint32_t w1;
+    uint32_t w0;
+#endif
+  } words32;
+
+  struct
+#ifdef __MINGW32__
+  /* Make sure we are using gnu-style bitfield handling.  */
+  __attribute__ ((gcc_struct))
+#endif
+  {
+#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+    unsigned negative:1;
+    unsigned exponent:15;
+    unsigned quiet_nan:1;
+    unsigned mantissa0:15;
+    unsigned mantissa1:32;
+    unsigned mantissa2:32;
+    unsigned mantissa3:32;
+#else
+    unsigned mantissa3:32;
+    unsigned mantissa2:32;
+    unsigned mantissa1:32;
+    unsigned mantissa0:15;
+    unsigned quiet_nan:1;
+    unsigned exponent:15;
+    unsigned negative:1;
+#endif
+  } ieee_nan;
+
+} ieee854_float128;
+
+
+/* Get two 64 bit ints from a long double.  */
+#define GET_FLT128_WORDS64(ix0,ix1,d)  \
+do {                                   \
+  ieee854_float128 u;                  \
+  u.value = (d);                       \
+  (ix0) = u.words64.high;              \
+  (ix1) = u.words64.low;               \
+} while (0)
+
+/* Set a long double from two 64 bit ints.  */
+#define SET_FLT128_WORDS64(d,ix0,ix1)  \
+do {                                   \
+  ieee854_float128 u;                  \
+  u.words64.high = (ix0);              \
+  u.words64.low = (ix1);               \
+  (d) = u.value;                       \
+} while (0)
+
+/* Get the more significant 64 bits of a long double mantissa.  */
+#define GET_FLT128_MSW64(v,d)          \
+do {                                   \
+  ieee854_float128 u;                  \
+  u.value = (d);                       \
+  (v) = u.words64.high;                \
+} while (0)
+
+/* Set the more significant 64 bits of a long double mantissa from an int.  */
+#define SET_FLT128_MSW64(d,v)          \
+do {                                   \
+  ieee854_float128 u;                  \
+  u.value = (d);                       \
+  u.words64.high = (v);                \
+  (d) = u.value;                       \
+} while (0)
+
+/* Get the least significant 64 bits of a long double mantissa.  */
+#define GET_FLT128_LSW64(v,d)          \
+do {                                   \
+  ieee854_float128 u;                  \
+  u.value = (d);                       \
+  (v) = u.words64.low;                 \
+} while (0)
+
+
+#define IEEE854_FLOAT128_BIAS 0x3fff
+
+#define QUADFP_NAN		0
+#define QUADFP_INFINITE		1
+#define QUADFP_ZERO		2
+#define QUADFP_SUBNORMAL	3
+#define QUADFP_NORMAL		4
+#define fpclassifyq(x) \
+  __builtin_fpclassify (QUADFP_NAN, QUADFP_INFINITE, QUADFP_NORMAL, \
+			QUADFP_SUBNORMAL, QUADFP_ZERO, x)
+
+#ifndef math_opt_barrier
+# define math_opt_barrier(x) \
+({ __typeof (x) __x = (x); __asm ("" : "+m" (__x)); __x; })
+# define math_force_eval(x) \
+({ __typeof (x) __x = (x); __asm __volatile__ ("" : : "m" (__x)); })
+#endif
+
+/* math_narrow_eval reduces its floating-point argument to the range
+   and precision of its semantic type.  (The original evaluation may
+   still occur with excess range and precision, so the result may be
+   affected by double rounding.)  */
+#define math_narrow_eval(x) (x)
+
+/* If X (which is not a NaN) is subnormal, force an underflow
+   exception.  */
+#define math_check_force_underflow(x)				\
+  do								\
+    {								\
+      __float128 force_underflow_tmp = (x);			\
+      if (fabsq (force_underflow_tmp) < FLT128_MIN)		\
+	{							\
+	  __float128 force_underflow_tmp2			\
+	    = force_underflow_tmp * force_underflow_tmp;	\
+	  math_force_eval (force_underflow_tmp2);		\
+	}							\
+    }								\
+  while (0)
+/* Likewise, but X is also known to be nonnegative.  */
+#define math_check_force_underflow_nonneg(x)			\
+  do								\
+    {								\
+      __float128 force_underflow_tmp = (x);			\
+      if (force_underflow_tmp < FLT128_MIN)			\
+	{							\
+	  __float128 force_underflow_tmp2			\
+	    = force_underflow_tmp * force_underflow_tmp;	\
+	  math_force_eval (force_underflow_tmp2);		\
+	}							\
+    }								\
+  while (0)
+
+/* Likewise, for both real and imaginary parts of a complex
+   result.  */
+#define math_check_force_underflow_complex(x)				\
+  do									\
+    {									\
+      __typeof (x) force_underflow_complex_tmp = (x);			\
+      math_check_force_underflow (__real__ force_underflow_complex_tmp); \
+      math_check_force_underflow (__imag__ force_underflow_complex_tmp); \
+    }									\
+  while (0)
+
+#ifndef HAVE_FENV_H
+# define feraiseexcept(arg) ((void) 0)
+typedef int fenv_t;
+# define feholdexcept(arg) ((void) 0)
+# define fesetround(arg) ((void) 0)
+# define feupdateenv(arg) ((void) (arg))
+# define fesetenv(arg) ((void) (arg))
+# define fetestexcept(arg) 0
+# define feclearexcept(arg) ((void) 0)
+#else
+# ifndef HAVE_FEHOLDEXCEPT
+#  define feholdexcept(arg) ((void) 0)
+# endif
+# ifndef HAVE_FESETROUND
+#  define fesetround(arg) ((void) 0)
+# endif
+# ifndef HAVE_FEUPDATEENV
+#  define feupdateenv(arg) ((void) (arg))
+# endif
+# ifndef HAVE_FESETENV
+#  define fesetenv(arg) ((void) (arg))
+# endif
+# ifndef HAVE_FETESTEXCEPT
+#  define fetestexcept(arg) 0
+# endif
+#endif
+
+#ifndef __glibc_likely
+# define __glibc_likely(cond)	__builtin_expect ((cond), 1)
+#endif
+
+#ifndef __glibc_unlikely
+# define __glibc_unlikely(cond)	__builtin_expect ((cond), 0)
+#endif
+
+#if defined HAVE_FENV_H && defined HAVE_FESETROUND && defined HAVE_FEUPDATEENV
+struct rm_ctx
+{
+  fenv_t env;
+  bool updated_status;
+};
+
+# define SET_RESTORE_ROUNDF128(RM)					\
+  struct rm_ctx ctx __attribute__((cleanup (libc_feresetround_ctx)));	\
+  libc_feholdsetround_ctx (&ctx, (RM))
+
+static inline __attribute__ ((always_inline)) void
+libc_feholdsetround_ctx (struct rm_ctx *ctx, int round)
+{
+  ctx->updated_status = false;
+
+  /* Update rounding mode only if different.  */
+  if (__glibc_unlikely (round != fegetround ()))
+    {
+      ctx->updated_status = true;
+      fegetenv (&ctx->env);
+      fesetround (round);
+    }
+}
+
+static inline __attribute__ ((always_inline)) void
+libc_feresetround_ctx (struct rm_ctx *ctx)
+{
+  /* Restore the rounding mode if updated.  */
+  if (__glibc_unlikely (ctx->updated_status))
+    feupdateenv (&ctx->env);
+}
+#else
+# define SET_RESTORE_ROUNDF128(RM) ((void) 0)
+#endif
+
+#endif
diff --git a/verrou/backend_mcaquad/common/tinymt64.c b/verrou/backend_mcaquad/common/tinymt64.c
new file mode 100644
index 0000000000000000000000000000000000000000..36d2a87353da4e1fb3558b43ca0acda0675801ac
--- /dev/null
+++ b/verrou/backend_mcaquad/common/tinymt64.c
@@ -0,0 +1,130 @@
+/**
+ * @file tinymt64.c
+ *
+ * @brief 64-bit Tiny Mersenne Twister only 127 bit internal state
+ *
+ * @author Mutsuo Saito (Hiroshima University)
+ * @author Makoto Matsumoto (The University of Tokyo)
+ *
+ * Copyright (C) 2011 Mutsuo Saito, Makoto Matsumoto,
+ * Hiroshima University and The University of Tokyo.
+ * All rights reserved.
+ *
+ * The 3-clause BSD License is applied to this software, see
+ * LICENSE.txt
+ */
+#include "tinymt64.h"
+
+#define MIN_LOOP 8
+
+/**
+ * This function represents a function used in the initialization
+ * by init_by_array
+ * @param[in] x 64-bit integer
+ * @return 64-bit integer
+ */
+static uint64_t ini_func1(uint64_t x) {
+    return (x ^ (x >> 59)) * UINT64_C(2173292883993);
+}
+
+/**
+ * This function represents a function used in the initialization
+ * by init_by_array
+ * @param[in] x 64-bit integer
+ * @return 64-bit integer
+ */
+static uint64_t ini_func2(uint64_t x) {
+    return (x ^ (x >> 59)) * UINT64_C(58885565329898161);
+}
+
+/**
+ * This function certificate the period of 2^127-1.
+ * @param random tinymt state vector.
+ */
+static void period_certification(tinymt64_t * random) {
+    if ((random->status[0] & TINYMT64_MASK) == 0 &&
+	random->status[1] == 0) {
+	random->status[0] = 'T';
+	random->status[1] = 'M';
+    }
+}
+
+/**
+ * This function initializes the internal state array with a 64-bit
+ * unsigned integer seed.
+ * @param random tinymt state vector.
+ * @param seed a 64-bit unsigned integer used as a seed.
+ */
+void tinymt64_init(tinymt64_t * random, uint64_t seed) {
+    int i;
+    random->status[0] = seed ^ ((uint64_t)random->mat1 << 32);
+    random->status[1] = random->mat2 ^ random->tmat;
+    for (i = 1; i < MIN_LOOP; i++) {
+	random->status[i & 1] ^= i + UINT64_C(6364136223846793005)
+	    * (random->status[(i - 1) & 1]
+	       ^ (random->status[(i - 1) & 1] >> 62));
+    }
+    period_certification(random);
+}
+
+/**
+ * This function initializes the internal state array,
+ * with an array of 64-bit unsigned integers used as seeds
+ * @param random tinymt state vector.
+ * @param init_key the array of 64-bit integers, used as a seed.
+ * @param key_length the length of init_key.
+ */
+void tinymt64_init_by_array(tinymt64_t * random, const uint64_t init_key[],
+			    int key_length) {
+    const int lag = 1;
+    const int mid = 1;
+    const int size = 4;
+    int i, j;
+    int count;
+    uint64_t r;
+    uint64_t st[4];
+
+    st[0] = 0;
+    st[1] = random->mat1;
+    st[2] = random->mat2;
+    st[3] = random->tmat;
+    if (key_length + 1 > MIN_LOOP) {
+	count = key_length + 1;
+    } else {
+	count = MIN_LOOP;
+    }
+    r = ini_func1(st[0] ^ st[mid % size]
+		  ^ st[(size - 1) % size]);
+    st[mid % size] += r;
+    r += key_length;
+    st[(mid + lag) % size] += r;
+    st[0] = r;
+    count--;
+    for (i = 1, j = 0; (j < count) && (j < key_length); j++) {
+	r = ini_func1(st[i] ^ st[(i + mid) % size] ^ st[(i + size - 1) % size]);
+	st[(i + mid) % size] += r;
+	r += init_key[j] + i;
+	st[(i + mid + lag) % size] += r;
+	st[i] = r;
+	i = (i + 1) % size;
+    }
+    for (; j < count; j++) {
+	r = ini_func1(st[i] ^ st[(i + mid) % size] ^ st[(i + size - 1) % size]);
+	st[(i + mid) % size] += r;
+	r += i;
+	st[(i + mid + lag) % size] += r;
+	st[i] = r;
+	i = (i + 1) % size;
+    }
+    for (j = 0; j < size; j++) {
+	r = ini_func2(st[i] + st[(i + mid) % size] + st[(i + size - 1) % size]);
+	st[(i + mid) % size] ^= r;
+	r -= i;
+	st[(i + mid + lag) % size] ^= r;
+	st[i] = r;
+	i = (i + 1) % size;
+    }
+    random->status[0] = st[0] ^ st[1];
+    random->status[1] = st[2] ^ st[3];
+    period_certification(random);
+}
diff --git a/verrou/backend_mcaquad/common/tinymt64.h b/verrou/backend_mcaquad/common/tinymt64.h
new file mode 100644
index 0000000000000000000000000000000000000000..52e980dbb8ba239dba5366ec76a76511d53e50f0
--- /dev/null
+++ b/verrou/backend_mcaquad/common/tinymt64.h
@@ -0,0 +1,218 @@
+#ifndef TINYMT64_H
+#define TINYMT64_H
+/**
+ * @file tinymt64.h
+ *
+ * @brief Tiny Mersenne Twister only 127 bit internal state
+ *
+ * @author Mutsuo Saito (Hiroshima University)
+ * @author Makoto Matsumoto (The University of Tokyo)
+ *
+ * Copyright (C) 2011 Mutsuo Saito, Makoto Matsumoto,
+ * Hiroshima University and The University of Tokyo.
+ * All rights reserved.
+ *
+ * The 3-clause BSD License is applied to this software, see
+ * LICENSE.txt
+ */
+
+#include <stdint.h>
+#include <inttypes.h>
+
+#define TINYMT64_MEXP 127
+#define TINYMT64_SH0 12
+#define TINYMT64_SH1 11
+#define TINYMT64_SH8 8
+#define TINYMT64_MASK UINT64_C(0x7fffffffffffffff)
+#define TINYMT64_MUL (1.0 / 9007199254740992.0)
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+/*
+ * tinymt64 internal state vector and parameters
+ */
+struct TINYMT64_T {
+    uint64_t status[2];
+    uint32_t mat1;
+    uint32_t mat2;
+    uint64_t tmat;
+};
+
+typedef struct TINYMT64_T tinymt64_t;
+
+void tinymt64_init(tinymt64_t * random, uint64_t seed);
+void tinymt64_init_by_array(tinymt64_t * random, const uint64_t init_key[],
+			    int key_length);
+
+#if defined(__GNUC__)
+/**
+ * This function always returns 127
+ * @param random not used
+ * @return always 127
+ */
+inline static int tinymt64_get_mexp(
+    tinymt64_t * random  __attribute__((unused))) {
+    return TINYMT64_MEXP;
+}
+#else
+inline static int tinymt64_get_mexp(tinymt64_t * random) {
+    return TINYMT64_MEXP;
+}
+#endif
+
+/**
+ * This function changes internal state of tinymt64.
+ * Users should not call this function directly.
+ * @param random tinymt internal status
+ */
+inline static void tinymt64_next_state(tinymt64_t * random) {
+    uint64_t x;
+
+    random->status[0] &= TINYMT64_MASK;
+    x = random->status[0] ^ random->status[1];
+    x ^= x << TINYMT64_SH0;
+    x ^= x >> 32;
+    x ^= x << 32;
+    x ^= x << TINYMT64_SH1;
+    random->status[0] = random->status[1];
+    random->status[1] = x;
+    random->status[0] ^= -((int64_t)(x & 1)) & random->mat1;
+    random->status[1] ^= -((int64_t)(x & 1)) & (((uint64_t)random->mat2) << 32);
+}
+
+/**
+ * This function outputs 64-bit unsigned integer from internal state.
+ * Users should not call this function directly.
+ * @param random tinymt internal status
+ * @return 64-bit unsigned pseudorandom number
+ */
+inline static uint64_t tinymt64_temper(tinymt64_t * random) {
+    uint64_t x;
+#if defined(LINEARITY_CHECK)
+    x = random->status[0] ^ random->status[1];
+#else
+    x = random->status[0] + random->status[1];
+#endif
+    x ^= random->status[0] >> TINYMT64_SH8;
+    x ^= -((int64_t)(x & 1)) & random->tmat;
+    return x;
+}
+
+/**
+ * This function outputs floating point number from internal state.
+ * Users should not call this function directly.
+ * @param random tinymt internal status
+ * @return floating point number r (1.0 <= r < 2.0)
+ */
+inline static double tinymt64_temper_conv(tinymt64_t * random) {
+    uint64_t x;
+    union {
+	uint64_t u;
+	double d;
+    } conv;
+#if defined(LINEARITY_CHECK)
+    x = random->status[0] ^ random->status[1];
+#else
+    x = random->status[0] + random->status[1];
+#endif
+    x ^= random->status[0] >> TINYMT64_SH8;
+    conv.u = ((x ^ (-((int64_t)(x & 1)) & random->tmat)) >> 12)
+	| UINT64_C(0x3ff0000000000000);
+    return conv.d;
+}
+
+/**
+ * This function outputs floating point number from internal state.
+ * Users should not call this function directly.
+ * @param random tinymt internal status
+ * @return floating point number r (1.0 < r < 2.0)
+ */
+inline static double tinymt64_temper_conv_open(tinymt64_t * random) {
+    uint64_t x;
+    union {
+	uint64_t u;
+	double d;
+    } conv;
+#if defined(LINEARITY_CHECK)
+    x = random->status[0] ^ random->status[1];
+#else
+    x = random->status[0] + random->status[1];
+#endif
+    x ^= random->status[0] >> TINYMT64_SH8;
+    conv.u = ((x ^ (-((int64_t)(x & 1)) & random->tmat)) >> 12)
+	| UINT64_C(0x3ff0000000000001);
+    return conv.d;
+}
+
+/**
+ * This function outputs 64-bit unsigned integer from internal state.
+ * @param random tinymt internal status
+ * @return 64-bit unsigned integer r (0 <= r < 2^64)
+ */
+inline static uint64_t tinymt64_generate_uint64(tinymt64_t * random) {
+    tinymt64_next_state(random);
+    return tinymt64_temper(random);
+}
+
+/**
+ * This function outputs floating point number from internal state.
+ * This function is implemented using multiplying by (1 / 2^53).
+ * @param random tinymt internal status
+ * @return floating point number r (0.0 <= r < 1.0)
+ */
+inline static double tinymt64_generate_double(tinymt64_t * random) {
+    tinymt64_next_state(random);
+    return (tinymt64_temper(random) >> 11) * TINYMT64_MUL;
+}
+
+/**
+ * This function outputs floating point number from internal state.
+ * This function is implemented using union trick.
+ * @param random tinymt internal status
+ * @return floating point number r (0.0 <= r < 1.0)
+ */
+inline static double tinymt64_generate_double01(tinymt64_t * random) {
+    tinymt64_next_state(random);
+    return tinymt64_temper_conv(random) - 1.0;
+}
+
+/**
+ * This function outputs floating point number from internal state.
+ * This function is implemented using union trick.
+ * @param random tinymt internal status
+ * @return floating point number r (1.0 <= r < 2.0)
+ */
+inline static double tinymt64_generate_double12(tinymt64_t * random) {
+    tinymt64_next_state(random);
+    return tinymt64_temper_conv(random);
+}
+
+/**
+ * This function outputs floating point number from internal state.
+ * This function is implemented using union trick.
+ * @param random tinymt internal status
+ * @return floating point number r (0.0 < r <= 1.0)
+ */
+inline static double tinymt64_generate_doubleOC(tinymt64_t * random) {
+    tinymt64_next_state(random);
+    return 2.0 - tinymt64_temper_conv(random);
+}
+
+/**
+ * This function outputs floating point number from internal state.
+ * This function is implemented using union trick.
+ * @param random tinymt internal status
+ * @return floating point number r (0.0 < r < 1.0)
+ */
+inline static double tinymt64_generate_doubleOO(tinymt64_t * random) {
+    tinymt64_next_state(random);
+    return tinymt64_temper_conv_open(random) - 1.0;
+}
+
+#if defined(__cplusplus)
+}
+#endif
+
+#endif
diff --git a/verrou/backend_mcaquad/interflop_mcaquad.c b/verrou/backend_mcaquad/interflop_mcaquad.c
new file mode 100644
index 0000000000000000000000000000000000000000..1ccd892be3e34088fac971e463f05c4c4b50a68c
--- /dev/null
+++ b/verrou/backend_mcaquad/interflop_mcaquad.c
@@ -0,0 +1,166 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Interface for floating-point operations overloading.         ---*/
+/*---                                                 vr_fpOps.cxx ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#include "interflop_mcaquad.h"
+#include <stddef.h>
+
+
+
+const char*  mcaquad_mode_name (unsigned int mode) {
+  switch (mode) {
+  case  MCAMODE_IEEE:
+    return "IEEE";
+  case  MCAMODE_MCA:
+    return "MCA";
+  case MCAMODE_PB:
+    return "PB";
+  case MCAMODE_RR:
+    return "RR";
+  }
+  return "undefined";
+}
+
+// * Global variables & parameters
+//mcaquad_conf_t mcaquad_conf;
+//unsigned int mcaquad_seed;
+
+void (*mcaquad_panicHandler)(const char*)=NULL;
+
+void mcaquad_set_panic_handler(void (*panicHandler)(const char*)){
+  mcaquad_panicHandler=panicHandler;
+}
+
+void (*mcaquad_debug_print_op)(int,const char*, const double*, const double*)=NULL;
+void mcaquad_set_debug_print_op(void (*printOpHandler)(int nbArg,const char*name, const double* args,const double* res)){
+  mcaquad_debug_print_op=printOpHandler;
+};
+
+
+#include "mcalib.c"
+
+// * C interface
+void IFMQ_FCTNAME(configure)(mcaquad_conf_t mode,void* context) {  
+  _set_mca_mode(mode.mode);
+  _set_mca_precision(mode.precision_double, mode.precision_float);
+}
+
+void IFMQ_FCTNAME(finalyze)(void* context){
+}
+
+const char* IFMQ_FCTNAME(get_backend_name)() {
+  return "mcaquad";
+}
+
+const char* IFMQ_FCTNAME(get_backend_version)() {
+  return "1.x-dev";
+}
+
+
+static uint64_t mcaquadrandom_seed;
+
+void mcaquad_set_seed (unsigned int seed) {
+  uint64_t seed64=(uint64_t) seed;
+  _mca_set_seed(&seed64,1);
+  mcaquadrandom_seed = tinymt64_generate_uint64(&random_state);
+}
+
+void mcaquad_set_random_seed () {
+  _mca_set_seed(&mcaquadrandom_seed,1);
+}
+
+void IFMQ_FCTNAME(add_double) (double a, double b, double* res,void* context) {
+  *res=_mca_dbin(a, b, MCA_ADD);
+}
+
+void IFMQ_FCTNAME(add_float) (float a, float b, float* res,void* context) {
+  *res=_mca_sbin(a, b, MCA_ADD);
+}
+
+void IFMQ_FCTNAME(sub_double) (double a, double b, double* res,void* context) {
+  *res=_mca_dbin(a, b, MCA_SUB);
+}
+
+void IFMQ_FCTNAME(sub_float) (float a, float b, float* res,void* context) {
+  *res=_mca_sbin(a, b, MCA_SUB);
+}
+
+void IFMQ_FCTNAME(mul_double) (double a, double b, double* res,void* context) {
+  *res=_mca_dbin(a, b, MCA_MUL);
+}
+
+void IFMQ_FCTNAME(mul_float) (float a, float b, float* res,void* context) {
+  *res=_mca_sbin(a, b, MCA_MUL);
+}
+
+void IFMQ_FCTNAME(div_double) (double a, double b, double* res,void* context) {
+  *res=_mca_dbin(a, b, MCA_DIV);
+}
+
+void IFMQ_FCTNAME(div_float) (float a, float b, float* res,void* context) {
+  *res=_mca_sbin(a, b, MCA_DIV);
+}
+
+void IFMQ_FCTNAME(cast_double_to_float) (double a, float* res, void* context){
+   *res=_mca_dtosbin(a);
+}
+
+ void IFMQ_FCTNAME(madd_double) (double a, double b, double c, double* res, void* context){
+    *res=_mca_dbin_fma(a,b,c);
+ }
+
+void IFMQ_FCTNAME(madd_float) (float a, float b, float c, float* res, void* context){
+   *res=_mca_sbin_fma(a,b,c);
+ }
+
+
+
+
+struct interflop_backend_interface_t IFMQ_FCTNAME(init)(void ** context){
+  struct interflop_backend_interface_t config;
+
+  config.interflop_add_float = & IFMQ_FCTNAME(add_float);
+  config.interflop_sub_float = & IFMQ_FCTNAME(sub_float);
+  config.interflop_mul_float = & IFMQ_FCTNAME(mul_float);
+  config.interflop_div_float = & IFMQ_FCTNAME(div_float);
+
+  config.interflop_add_double = & IFMQ_FCTNAME(add_double);
+  config.interflop_sub_double = & IFMQ_FCTNAME(sub_double);
+  config.interflop_mul_double = & IFMQ_FCTNAME(mul_double);
+  config.interflop_div_double = & IFMQ_FCTNAME(div_double);
+
+  config.interflop_cast_double_to_float= & IFMQ_FCTNAME(cast_double_to_float);
+
+  config.interflop_madd_float = & IFMQ_FCTNAME(madd_float);
+  config.interflop_madd_double = & IFMQ_FCTNAME(madd_double);
+
+  return config;
+}
diff --git a/verrou/backend_mcaquad/interflop_mcaquad.h b/verrou/backend_mcaquad/interflop_mcaquad.h
new file mode 100644
index 0000000000000000000000000000000000000000..6acfa51912cadcc6eb836362dcd0793824beda70
--- /dev/null
+++ b/verrou/backend_mcaquad/interflop_mcaquad.h
@@ -0,0 +1,108 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Interface for floatin-point operations overloading.          ---*/
+/*---                                                   vr_fpops.h ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#ifndef __INTERFLOP_MCAQUAD_H
+#define __INTERFLOP_MCAQUAD_H
+
+//#define DEBUG_PRINT_OP
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#define IFMQ_FCTNAME(FCT) interflop_mcaquad_##FCT
+
+#include "../interflop_backend_interface.h"
+
+
+   /* Repris depuis vfcwrapper.h*/
+   /* define the available MCA modes of operation */
+#define MCAMODE_IEEE 0
+#define MCAMODE_MCA  1
+#define MCAMODE_PB   2
+#define MCAMODE_RR   3
+   /* Fin Reprise depuis vfcwrapper.h*/
+
+  const char*  mcaquad_mode_name (unsigned int mode);
+   
+  struct mcaquad_conf {
+     unsigned int precision_float;
+     unsigned int precision_double;
+     int mode;
+  };
+   
+  typedef struct mcaquad_conf mcaquad_conf_t;
+
+  void IFMQ_FCTNAME(configure)(mcaquad_conf_t mode,void* context);
+  void IFMQ_FCTNAME(finalyze)(void* context);
+
+  const char* IFMQ_FCTNAME(get_backend_name)(void);
+  const char* IFMQ_FCTNAME(get_backend_version)(void);
+
+
+//  const char* verrou_rounding_mode_name (enum vr_RoundingMode mode);
+
+   //void verrou_begin_instr(void);
+   //void verrou_end_instr(void);
+
+  void mcaquad_set_seed (unsigned int seed);
+  void mcaquad_set_random_seed (void);
+
+   //void verrou_set_cancellation_handler(void (*)(int));
+
+  extern void (*mcaquad_panicHandler)(const char*);
+  void mcaquad_set_panic_handler(void (*)(const char*));
+
+  extern void (*mcaquad_debug_print_op)(int,const char*, const double* args, const double* res);
+  void mcaquad_set_debug_print_op(void (*)(int nbArg, const char* name, const double* args, const double* res));
+
+  struct interflop_backend_interface_t IFMQ_FCTNAME(init)(void ** context);
+
+  void IFMQ_FCTNAME(add_double) (double a, double b, double* res, void* context);    
+  void IFMQ_FCTNAME(add_float)  (float a,  float b,  float*  res, void* context);
+  void IFMQ_FCTNAME(sub_double) (double a, double b, double* res, void* context);
+  void IFMQ_FCTNAME(sub_float)  (float a,  float b,  float*  res, void* context);
+  void IFMQ_FCTNAME(mul_double) (double a, double b, double* res, void* context);
+  void IFMQ_FCTNAME(mul_float)  (float a,  float b,  float*  res, void* context);
+  void IFMQ_FCTNAME(div_double) (double a, double b, double* res, void* context);
+  void IFMQ_FCTNAME(div_float)  (float a,  float b,  float*  res, void* context);
+
+  void IFMQ_FCTNAME(cast_double_to_float) (double a, float* b, void* context);
+
+  void IFMQ_FCTNAME(madd_double)(double a, double b, double c, double* res, void* context);
+  void IFMQ_FCTNAME(madd_float) (float a,  float b,  float c,  float*  res, void* context);
+
+  
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* ndef __INTERFLOP_MCAQUAD_H */
diff --git a/verrou/backend_mcaquad/libmca-quad.h b/verrou/backend_mcaquad/libmca-quad.h
new file mode 100644
index 0000000000000000000000000000000000000000..130ac37c9d719d9c76fa7d73a0bd08c942317103
--- /dev/null
+++ b/verrou/backend_mcaquad/libmca-quad.h
@@ -0,0 +1,25 @@
+/********************************************************************************
+ *                                                                              *
+ *  This file is part of Verificarlo.                                           *
+ *                                                                              *
+ *  Copyright (c) 2015                                                          *
+ *     Universite de Versailles St-Quentin-en-Yvelines                          *
+ *     CMLA, Ecole Normale Superieure de Cachan                                 *
+ *                                                                              *
+ *  Verificarlo is free software: you can redistribute it and/or modify         *
+ *  it under the terms of the GNU General Public License as published by        *
+ *  the Free Software Foundation, either version 3 of the License, or           *
+ *  (at your option) any later version.                                         *
+ *                                                                              *
+ *  Verificarlo is distributed in the hope that it will be useful,              *
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of              *
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               *
+ *  GNU General Public License for more details.                                *
+ *                                                                              *
+ *  You should have received a copy of the GNU General Public License           *
+ *  along with Verificarlo.  If not, see <http://www.gnu.org/licenses/>.        *
+ *                                                                              *
+ ********************************************************************************/
+
+struct mca_interface_t;
+extern struct mca_interface_t quad_mca_interface;
diff --git a/verrou/backend_mcaquad/mcalib.c b/verrou/backend_mcaquad/mcalib.c
new file mode 100644
index 0000000000000000000000000000000000000000..d951e36ff77adafafd79625cfe22836f278248e9
--- /dev/null
+++ b/verrou/backend_mcaquad/mcalib.c
@@ -0,0 +1,500 @@
+/********************************************************************************
+ *                                                                              *
+ *  This file is part of Verificarlo. *
+ *                                                                              *
+ *  Copyright (c) 2015 *
+ *     Universite de Versailles St-Quentin-en-Yvelines *
+ *     CMLA, Ecole Normale Superieure de Cachan *
+ *  Copyright (c) 2018 *
+ *     Universite de Versailles St-Quentin-en-Yvelines *
+ *                                                                              *
+ *  Verificarlo is free software: you can redistribute it and/or modify *
+ *  it under the terms of the GNU General Public License as published by *
+ *  the Free Software Foundation, either version 3 of the License, or *
+ *  (at your option) any later version. *
+ *                                                                              *
+ *  Verificarlo is distributed in the hope that it will be useful, *
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of *
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *
+ *  GNU General Public License for more details. *
+ *                                                                              *
+ *  You should have received a copy of the GNU General Public License *
+ *  along with Verificarlo.  If not, see <http://www.gnu.org/licenses/>. *
+ *                                                                              *
+ ********************************************************************************/
+
+// Changelog:
+//
+// 2015-05-20 replace random number generator with TinyMT64. This
+// provides a reentrant, independent generator of better quality than
+// the one provided in libc.
+//
+// 2015-10-11 New version based on quad floating point type to replace MPFR
+// until
+// required MCA precision is lower than quad mantissa divided by 2, i.e. 56 bits
+//
+// 2015-11-16 New version using double precision for single precision operation
+//
+// 2016-07-14 Support denormalized numbers
+//
+// 2017-04-25 Rewrite debug and validate the noise addition operation
+//
+
+//BL #include <math.h>
+#include <stdbool.h>
+//BL #include <stdint.h>
+//BL #include <stdio.h>
+//BL #include <stdlib.h>
+//BL #include <sys/time.h>
+//BL #include <unistd.h>
+
+
+
+#include "./common/mca_const.h"
+#include "./common/quadmath-imp.h"
+#include "./common/fmaqApprox.h"
+#include "./common/tinymt64.h"
+//#include "../vfcwrapper/vfcwrapper.h"
+  //#include "interflop_mcaquad.h"
+#include "libmca-quad.h"
+
+static int MCALIB_OP_TYPE = MCAMODE_IEEE;
+static int MCALIB_DOUBLE_T = 53;
+static int MCALIB_FLOAT_T = 24;
+
+// possible op values
+#define MCA_ADD 1
+#define MCA_SUB 2
+#define MCA_MUL 3
+#define MCA_DIV 4
+
+#define min(a, b) ((a) < (b) ? (a) : (b))
+
+static float _mca_sbin(float a, float b, int qop);
+
+static double _mca_dbin(double a, double b, int qop);
+
+/******************** MCA CONTROL FUNCTIONS *******************
+* The following functions are used to set virtual precision and
+* MCA mode of operation.
+***************************************************************/
+
+static int _set_mca_mode(int mode) {
+  if (mode < 0 || mode > 3)
+    return -1;
+
+  MCALIB_OP_TYPE = mode;
+  return 0;
+}
+
+static int _set_mca_precision(int precision_double, int precision_float) {
+  MCALIB_DOUBLE_T = precision_double;
+  MCALIB_FLOAT_T = precision_float;
+  return 0;
+}
+
+/******************** MCA RANDOM FUNCTIONS ********************
+* The following functions are used to calculate the random
+* perturbations used for MCA
+***************************************************************/
+
+/* random generator internal state */
+static tinymt64_t random_state;
+
+static double _mca_rand(void) {
+  /* Returns a random double in the (0,1) open interval */
+  return tinymt64_generate_doubleOO(&random_state);
+}
+
+static inline double pow2d(int exp) {
+  double res = 0;
+  //BL uint64_t *x = (uint64_t*)malloc(sizeof(uint64_t));
+  uint64_t x[1];
+  // specials
+  if (exp == 0)
+    return 1;
+
+  if (exp > 1023) { /*exceed max exponent*/
+    *x = DOUBLE_PLUS_INF;
+    res = *((double *)x);
+    return res;
+  }
+  if (exp < -1022) { /*subnormal*/
+    *x = ((uint64_t)DOUBLE_PMAN_MSB) >> -(exp + DOUBLE_EXP_MAX);
+    res = *((double *)x);
+    return res;
+  }
+
+  // normal case
+  // complement the exponent, shift it at the right place in the MSW
+  *x = (((uint64_t)exp) + DOUBLE_EXP_COMP) << DOUBLE_PMAN_SIZE;
+  res = *((double *)x);
+  return res;
+}
+
+static inline uint32_t rexpq(__float128 x) {
+  // no need to check special value in our cases since qnoise will deal with it
+  // do not reuse it outside this code!
+  uint64_t hx, ix;
+  uint32_t exp = 0;
+  GET_FLT128_MSW64(hx, x);
+  // remove sign bit, mantissa will be erased by the next shift
+  ix = hx & QUAD_HX_ERASE_SIGN;
+  // shift exponent to have LSB on position 0 and complement
+  exp += (ix >> QUAD_HX_PMAN_SIZE) - QUAD_EXP_COMP;
+  return exp;
+}
+
+static inline uint32_t rexpd(double x) {
+  // no need to check special value in our cases since pow2d will deal with it
+  // do not reuse it outside this code!
+  uint64_t hex, ix;
+  uint32_t exp = 0;
+  // change type to bit field
+  hex = *((uint64_t *)&x);
+  // remove sign bit, mantissa will be erased by the next shift
+  ix = hex & DOUBLE_ERASE_SIGN;
+  // shift exponent to have LSB on position 0 and complement
+  exp += (ix >> DOUBLE_PMAN_SIZE) - DOUBLE_EXP_COMP;
+  return exp;
+}
+
+static inline __float128 qnoise(int exp) {
+  double d_rand = (_mca_rand() - 0.5);
+  uint64_t u_rand = *((uint64_t *)&d_rand);
+  __float128 noise;
+  uint64_t hx, lx;
+  // specials
+  if (exp == 0)
+    return 1;
+
+  if (exp > QUAD_EXP_MAX) { /*exceed max exponent*/
+    SET_FLT128_WORDS64(noise, QINF_hx, QINF_lx);
+    return noise;
+  }
+  if (exp < -QUAD_EXP_MIN) { /*subnormal*/
+    // test for minus infinity
+    if (exp < -(QUAD_EXP_MIN + QUAD_PMAN_SIZE)) {
+      SET_FLT128_WORDS64(noise, QMINF_hx, QMINF_lx);
+      return noise;
+    }
+    // noise will be a subnormal
+    // build HX with sign of d_rand, exp
+    uint64_t u_hx = ((uint64_t)(-QUAD_EXP_MIN + QUAD_EXP_COMP))
+                    << QUAD_HX_PMAN_SIZE;
+    // add the sign bit
+    uint64_t sign = u_rand & DOUBLE_GET_SIGN;
+    u_hx = u_hx + sign;
+    // erase the sign bit from u_rand
+    u_rand = u_rand - sign;
+
+    if (-exp - QUAD_EXP_MIN < -QUAD_HX_PMAN_SIZE) {
+      // the higher part of the noise start in HX of noise
+      // set the mantissa part: U_rand>> by -exp-QUAD_EXP_MIN
+      u_hx += u_rand >> (-exp - QUAD_EXP_MIN + QUAD_EXP_SIZE + 1 /*SIGN_SIZE*/);
+      // build LX with the remaining bits of the noise
+      // (-exp-QUAD_EXP_MIN-QUAD_HX_PMAN_SIZE) at the msb of LX
+      // remove the bit already used in hx and put the remaining at msb of LX
+      uint64_t u_lx = u_rand << (QUAD_HX_PMAN_SIZE + exp + QUAD_EXP_MIN);
+      SET_FLT128_WORDS64(noise, u_hx, u_lx);
+    } else { // the higher part of the noise start  in LX of noise
+      // the noise as been already implicitly shifeted by QUAD_HX_PMAN_SIZE when
+      // starting in LX
+      uint64_t u_lx = u_rand >> (-exp - QUAD_EXP_MIN - QUAD_HX_PMAN_SIZE);
+      SET_FLT128_WORDS64(noise, u_hx, u_lx);
+    }
+//BL    int prec = 20;
+//BL    int width = 46;
+    // char buf[128];
+    // int len=quadmath_snprintf (buf, sizeof(buf), "%+-#*.20Qe", width, noise);
+    // if ((size_t) len < sizeof(buf))
+    // printf ("subnormal noise %s\n", buf);
+    return noise;
+  }
+  // normal case
+  // complement the exponent, shift it at the right place in the MSW
+  hx = (((uint64_t)exp + rexpd(d_rand)) + QUAD_EXP_COMP) << QUAD_HX_PMAN_SIZE;
+  // set sign = sign of d_rand
+  hx += u_rand & DOUBLE_GET_SIGN;
+  // extract u_rand (pseudo) mantissa and put the first 48 bits in hx...
+  uint64_t p_mantissa = u_rand & DOUBLE_GET_PMAN;
+  hx += (p_mantissa) >>
+        (DOUBLE_PMAN_SIZE - QUAD_HX_PMAN_SIZE); // 4=52 (double pmantissa) - 48
+  //...and the last 4 in lx at msb
+  // uint64_t
+  lx = (p_mantissa) << (SIGN_SIZE + DOUBLE_EXP_SIZE +
+                        QUAD_HX_PMAN_SIZE); // 60=1(s)+11(exp double)+48(hx)
+  SET_FLT128_WORDS64(noise, hx, lx);
+//BL  int prec = 20;
+//BL  int width = 46;
+  return noise;
+}
+
+static bool _is_representableq(__float128 *qa) {
+  /* Check if *qa is exactly representable
+   * in the current virtual precision */
+  uint64_t hx, lx;
+  GET_FLT128_WORDS64(hx, lx, *qa);
+
+  /* compute representable bits in hx and lx */
+  char bits_in_hx = min((MCALIB_DOUBLE_T - 1), QUAD_HX_PMAN_SIZE);
+  char bits_in_lx = (MCALIB_DOUBLE_T - 1) - bits_in_hx;
+
+  /* check bits in lx */
+  /* here we know that bits_in_lx < 64 */
+  bool representable = ((lx << bits_in_lx) == 0);
+
+  /* check bits in hx,
+   * the test always succeeds when bits_in_hx == QUAD_HX_PMAN_SIZE,
+   * cannot remove the test since << 64 is undefined in C. */
+  if (bits_in_hx < QUAD_HX_PMAN_SIZE) {
+    representable &= ((hx << (1 + QUAD_EXP_SIZE + bits_in_hx)) == 0);
+  }
+
+  return representable;
+}
+
+static bool _is_representabled(double *da) {
+  /* Check if *da is exactly representable
+   * in the current virtual precision */
+  uint64_t p_mantissa = (*((uint64_t *)da)) & DOUBLE_GET_PMAN;
+  /* here we know that (MCALIB_T-1) < 53 */
+  return ((p_mantissa << (MCALIB_FLOAT_T - 1)) == 0);
+}
+
+static void _mca_inexactq(__float128 *qa) {
+  if (MCALIB_OP_TYPE == MCAMODE_IEEE) {
+    return ;
+  }
+
+  /* In RR if the number is representable in current virtual precision,
+   * do not add any noise */
+  if (MCALIB_OP_TYPE == MCAMODE_RR && _is_representableq(qa)) {
+    return ;
+  }
+
+  int32_t e_a = 0;
+  e_a = rexpq(*qa);
+  int32_t e_n = e_a - (MCALIB_DOUBLE_T - 1);
+  __float128 noise = qnoise(e_n);
+  *qa = noise + *qa;
+}
+
+static void _mca_inexactd(double *da) {
+  if (MCALIB_OP_TYPE == MCAMODE_IEEE) {
+    return ;
+  }
+
+  /* In RR if the number is representable in current virtual precision,
+   * do not add any noise */
+  if (MCALIB_OP_TYPE == MCAMODE_RR && _is_representabled(da)) {
+    return ;
+  }
+
+  int32_t e_a = 0;
+  e_a = rexpd(*da);
+  int32_t e_n = e_a - (MCALIB_FLOAT_T - 1);
+  double d_rand = (_mca_rand() - 0.5);
+  *da = *da + pow2d(e_n) * d_rand;
+}
+
+//BL static void _mca_seed(void) {
+//BL   const int key_length = 3;
+//BL   uint64_t init_key[key_length];
+//BL   struct timeval t1;
+//BL   gettimeofday(&t1, NULL);
+//BL 
+//BL   /* Hopefully the following seed is good enough for Montercarlo */
+//BL   init_key[0] = t1.tv_sec;
+//BL   init_key[1] = t1.tv_usec;
+//BL   init_key[2] = getpid();
+//BL 
+//BL   tinymt64_init_by_array(&random_state, init_key, key_length);
+//BL }
+
+static void _mca_set_seed(uint64_t* init_key, int key_length) {
+  tinymt64_init_by_array(&random_state, init_key, key_length);
+}
+
+/******************** MCA ARITHMETIC FUNCTIONS ********************
+* The following set of functions perform the MCA operation. Operands
+* are first converted to quad  format (GCC), inbound and outbound
+* perturbations are applied using the _mca_inexact function, and the
+* result converted to the original format for return
+*******************************************************************/
+
+// perform_bin_op: applies the binary operator (op) to (a) and (b)
+// and stores the result in (res)
+#define perform_bin_op(op, res, a, b)                                          \
+  switch (op) {                                                                \
+  case MCA_ADD:                                                                \
+    res = (a) + (b);                                                           \
+    break;                                                                     \
+  case MCA_MUL:                                                                \
+    res = (a) * (b);                                                           \
+    break;                                                                     \
+  case MCA_SUB:                                                                \
+    res = (a) - (b);                                                           \
+    break;                                                                     \
+  case MCA_DIV:                                                                \
+    res = (a) / (b);                                                           \
+    break;                                                                     \
+  default:                                                                     \
+     if(mcaquad_panicHandler!=NULL){ /*Modif BL*/                       \
+        mcaquad_panicHandler("invalid operator in mcaquad.\n");         \
+     }\
+  };
+
+static inline float _mca_sbin(float a, float b, const int dop) {
+  double da = (double)a;
+  double db = (double)b;
+
+  double res = 0;
+
+  if (MCALIB_OP_TYPE != MCAMODE_RR) {
+    _mca_inexactd(&da);
+    _mca_inexactd(&db);
+  }
+
+  perform_bin_op(dop, res, da, db);
+
+  if (MCALIB_OP_TYPE != MCAMODE_PB) {
+    _mca_inexactd(&res);
+  }
+
+  return ((float)res);
+}
+
+static inline double _mca_dbin(double a, double b, const int qop) {
+  __float128 qa = (__float128)a;
+  __float128 qb = (__float128)b;
+  __float128 res = 0;
+
+  if (MCALIB_OP_TYPE != MCAMODE_RR) {
+    _mca_inexactq(&qa);
+    _mca_inexactq(&qb);
+  }
+
+  perform_bin_op(qop, res, qa, qb);
+
+  if (MCALIB_OP_TYPE != MCAMODE_PB) {
+    _mca_inexactq(&res);
+  }
+
+  return NEAREST_DOUBLE(res);
+}
+
+static inline float _mca_dtosbin(double a){
+
+   float resf;
+   if (MCALIB_OP_TYPE != MCAMODE_RR) {
+      __float128 qa = (__float128)a;
+      _mca_inexactq(&qa);
+      resf=NEAREST_FLOAT(qa);
+   }else{
+      resf=(float)a;
+   }
+
+   if (MCALIB_OP_TYPE != MCAMODE_PB) {
+      double resd;
+      resd=(double)resf;
+      _mca_inexactd(&resd);
+      return NEAREST_FLOAT(resd);
+   }else{
+      return resf;
+   }
+}
+
+static inline double _mca_dbin_fma(double a, double b, double c) {
+  __float128 qa = (__float128)a;
+  __float128 qb = (__float128)b;
+  __float128 qc = (__float128)c;
+  __float128 res = 0;
+
+  if (MCALIB_OP_TYPE != MCAMODE_RR) {
+    _mca_inexactq(&qa);
+    _mca_inexactq(&qb);
+    _mca_inexactq(&qc);
+  }
+
+  res=fmaqApprox(a,b,c);
+
+  if (MCALIB_OP_TYPE != MCAMODE_PB) {
+    _mca_inexactq(&res);
+  }
+
+  return NEAREST_DOUBLE(res);
+}
+
+static inline double _mca_sbin_fma(double a, double b, double c) {
+   double da = (double)a;
+   double db = (double)b;
+   double dc = (double)b;
+   double res = 0;
+
+  if (MCALIB_OP_TYPE != MCAMODE_RR) {
+    _mca_inexactd(&da);
+    _mca_inexactd(&db);
+    _mca_inexactd(&dc);
+  }
+
+  res=fmaApprox(a,b,c);
+
+  if (MCALIB_OP_TYPE != MCAMODE_PB) {
+    _mca_inexactd(&res);
+  }
+
+  return ((float)res);
+}
+
+
+/************************* FPHOOKS FUNCTIONS *************************
+* These functions correspond to those inserted into the source code
+* during source to source compilation and are replacement to floating
+* point operators
+**********************************************************************/
+
+#define QUADMCAVERROU 
+#ifndef QUADMCAVERROU 
+static float _floatadd(float a, float b) { return _mca_sbin(a, b, MCA_ADD); }
+
+static float _floatsub(float a, float b) {
+  // return a - b
+  return _mca_sbin(a, b, MCA_SUB);
+}
+
+static float _floatmul(float a, float b) {
+  // return a * b
+  return _mca_sbin(a, b, MCA_MUL);
+}
+
+static float _floatdiv(float a, float b) {
+  // return a / b
+  return _mca_sbin(a, b, MCA_DIV);
+}
+
+static double _doubleadd(double a, double b) {
+  double tmp = _mca_dbin(a, b, MCA_ADD);
+  return tmp;
+}
+
+static double _doublesub(double a, double b) {
+  // return a - b
+  return _mca_dbin(a, b, MCA_SUB);
+}
+
+static double _doublemul(double a, double b) {
+  // return a * b
+  return _mca_dbin(a, b, MCA_MUL);
+}
+
+static double _doublediv(double a, double b) {
+  // return a / b
+  return _mca_dbin(a, b, MCA_DIV);
+}
+
+struct mca_interface_t quad_mca_interface = {
+    _floatadd,  _floatsub,     _floatmul,         _floatdiv,
+    _doubleadd, _doublesub,    _doublemul,        _doublediv,
+    _mca_seed,  _set_mca_mode, _set_mca_precision};
+#endif
diff --git a/verrou/backend_mcaquad/test_main.cxx b/verrou/backend_mcaquad/test_main.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..952da05ac0ef46a9ea2b9f5c660a25feb4d93c5a
--- /dev/null
+++ b/verrou/backend_mcaquad/test_main.cxx
@@ -0,0 +1,80 @@
+
+#define DEBUG_PRINT_OP
+
+#include "interflop_mcaquad.h"
+#include <stdio.h>
+#include <iostream>
+#include <iomanip>
+
+#include <sys/time.h>
+#include <unistd.h>
+
+void print_debug(int nbArg, const char * op,const  double* a, const double* res){
+
+  if(nbArg==1){
+    std::cout << op << " : "<< a[0] << "->"<<&res << std::endl;  
+  }
+
+  if(nbArg==2){
+    std::cout << op << " : "<< a[0] << "," << a[1]<< "->"<<&res << std::endl;  
+  }
+
+  if(nbArg==3){
+    std::cout << op << " : "<< a[0] << "," << a[1]<< "," << a[2]<< "->"<<&res << std::endl;  
+  }
+
+} ;
+
+
+int main(int argc, char** argv){
+
+  void* context;
+  
+  struct interflop_backend_interface_t ifmcaquad=interflop_mcaquad_init(&context);
+
+  //  interflop_verrou_configure(VR_NEAREST, context);
+  mcaquad_conf_t conf;
+  conf.precision_float=32;
+  conf.precision_double=53;
+  conf.precision_double=20;
+  conf.mode=MCAMODE_MCA;
+  //MCAMODE_PB;
+  //MCAMODE_RR;
+  interflop_mcaquad_configure(conf, context);
+
+  struct timeval now;
+  gettimeofday(&now, NULL);
+  unsigned int pid = getpid();
+  unsigned int firstSeed = now.tv_usec + pid;
+
+  firstSeed=3;
+  mcaquad_set_seed(firstSeed);
+
+  //verrou_set_debug_print_op(&print_debug);
+  
+
+  double a=0.1;
+  double b=0.1;
+  double c1;
+  double c2;
+
+  interflop_mcaquad_add_double(a,b,&c1,context);
+  ifmcaquad.interflop_add_double(a,b,&c2,context);
+  
+  std::cout << std::setprecision(16);
+  std::cout << "c1: "<<c1 << std::endl; 
+  std::cout << "c2: "<<c2 << std::endl; 
+
+  ifmcaquad.interflop_madd_double(a,b,c1,&c2,context);
+
+  std::cout << "fma: "<<c2 << std::endl;
+
+  //  ifmcaquad.interflop_madd_double(a,b,c1,&c2,context)
+  
+  interflop_mcaquad_finalyze(context);
+  
+
+  return 0;
+}
+
+
diff --git a/verrou/backend_verrou/interflop_verrou.cxx b/verrou/backend_verrou/interflop_verrou.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..a2196ba6a7e2f620dd5f7e6673d4b1d5f842e621
--- /dev/null
+++ b/verrou/backend_verrou/interflop_verrou.cxx
@@ -0,0 +1,214 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Interface for floating-point operations overloading.         ---*/
+/*---                                                 vr_fpOps.cxx ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#include "interflop_verrou.h"
+#include "vr_nextUlp.hxx"
+#include "vr_isNan.hxx"
+#include "vr_fma.hxx"
+#include <stddef.h>
+//extern "C" {
+#include "vr_rand.h"
+//}
+
+
+
+
+#include "vr_roundingOp.hxx"
+#include "vr_op.hxx"
+
+
+// * Global variables & parameters
+int CHECK_C  = 0;
+vr_RoundingMode DEFAULTROUNDINGMODE;
+vr_RoundingMode ROUNDINGMODE;
+unsigned int vr_seed;
+void (*vr_panicHandler)(const char*)=NULL;
+void (*vr_nanHandler)()=NULL;
+
+
+
+void verrou_set_panic_handler(void (*panicHandler)(const char*)){
+  vr_panicHandler=panicHandler;
+}
+
+void verrou_set_nan_handler(void (*nanHandler)()){
+  vr_nanHandler=nanHandler;
+}
+
+
+void (*vr_debug_print_op)(int,const char*, const double*, const double*)=NULL;
+void verrou_set_debug_print_op(void (*printOpHandler)(int nbArg,const char*name, const double* args,const double* res)){
+  vr_debug_print_op=printOpHandler;
+};
+
+
+// * Operation implementation
+const char*  verrou_rounding_mode_name (enum vr_RoundingMode mode) {
+  switch (mode) {
+  case VR_NEAREST:
+    return "NEAREST";
+  case VR_UPWARD:
+    return "UPWARD";
+  case VR_DOWNWARD:
+    return "DOWNWARD";
+  case VR_ZERO:
+    return "TOWARD_ZERO";
+  case VR_RANDOM:
+    return "RANDOM";
+  case VR_AVERAGE:
+    return "AVERAGE";
+  case VR_FARTHEST:
+    return "FARTHEST";
+  case VR_FLOAT:
+    return "FLOAT";
+  case VR_NATIVE:
+    return "NATIVE";
+  }
+
+  return "undefined";
+}
+
+
+
+
+// * C interface
+void IFV_FCTNAME(configure)(vr_RoundingMode mode,void* context) {
+  DEFAULTROUNDINGMODE = mode;
+  ROUNDINGMODE=mode;
+}
+
+void IFV_FCTNAME(finalyze)(void* context){
+}
+
+const char* IFV_FCTNAME(get_backend_name)() {
+  return "verrou";
+}
+
+const char* IFV_FCTNAME(get_backend_version)() {
+  return "1.x-dev";
+}
+
+void verrou_begin_instr(){
+  ROUNDINGMODE=DEFAULTROUNDINGMODE;
+}
+
+void verrou_end_instr(){
+  ROUNDINGMODE= VR_NEAREST;
+}
+
+void verrou_set_seed (unsigned int seed) {
+  vr_seed = vr_rand_int (&vr_rand);
+  vr_rand_setSeed (&vr_rand, seed);
+}
+
+void verrou_set_random_seed () {
+  vr_rand_setSeed(&vr_rand, vr_seed);
+}
+
+void IFV_FCTNAME(add_double) (double a, double b, double* res,void* context) {
+  typedef OpWithSelectedRoundingMode<AddOp <double>  > Op;
+  Op::apply(Op::PackArgs(a,b),res,context);
+}
+
+void IFV_FCTNAME(add_float) (float a, float b, float* res,void* context) {
+  typedef OpWithSelectedRoundingMode<AddOp <float>  > Op;
+  Op::apply(Op::PackArgs(a,b),res,context);
+}
+
+void IFV_FCTNAME(sub_double) (double a, double b, double* res,void* context) {
+  typedef OpWithSelectedRoundingMode<SubOp <double> > Op;
+  Op::apply(Op::PackArgs(a,b),res,context);
+}
+
+void IFV_FCTNAME(sub_float) (float a, float b, float* res,void* context) {
+  typedef OpWithSelectedRoundingMode<SubOp <float>  > Op;
+  Op::apply(Op::PackArgs(a,b),res,context);
+}
+
+void IFV_FCTNAME(mul_double) (double a, double b, double* res,void* context) {
+  typedef OpWithSelectedRoundingMode<MulOp <double> > Op;
+  Op::apply(Op::PackArgs(a,b),res,context);
+}
+
+void IFV_FCTNAME(mul_float) (float a, float b, float* res,void* context) {
+  typedef OpWithSelectedRoundingMode<MulOp <float> > Op;
+  Op::apply(Op::PackArgs(a,b),res,context);
+}
+
+void IFV_FCTNAME(div_double) (double a, double b, double* res,void* context) {
+  typedef OpWithSelectedRoundingMode<DivOp <double> > Op;
+  Op::apply(Op::PackArgs(a,b),res,context);
+}
+
+void IFV_FCTNAME(div_float) (float a, float b, float* res,void* context) {
+  typedef OpWithSelectedRoundingMode<DivOp <float>  > Op;
+  Op::apply(Op::PackArgs(a,b),res,context);
+}
+
+void IFV_FCTNAME(cast_double_to_float) (double a, float* res, void* context){
+  typedef OpWithSelectedRoundingMode<CastOp<double,float>  > Op;
+  Op::apply(Op::PackArgs(a),res,context);
+}
+
+void IFV_FCTNAME(madd_double) (double a, double b, double c, double* res, void* context){
+  typedef OpWithSelectedRoundingMode<MAddOp <double> > Op;
+  Op::apply(Op::PackArgs(a,b,c), res,context);
+}
+
+void IFV_FCTNAME(madd_float) (float a, float b, float c, float* res, void* context){
+  typedef OpWithSelectedRoundingMode<MAddOp <float> > Op;
+  Op::apply(Op::PackArgs(a,b,c), res, context);
+}
+
+
+
+
+struct interflop_backend_interface_t IFV_FCTNAME(init)(void ** context){
+  struct interflop_backend_interface_t config;
+
+  config.interflop_add_float = & IFV_FCTNAME(add_float);
+  config.interflop_sub_float = & IFV_FCTNAME(sub_float);
+  config.interflop_mul_float = & IFV_FCTNAME(mul_float);
+  config.interflop_div_float = & IFV_FCTNAME(div_float);
+
+  config.interflop_add_double = & IFV_FCTNAME(add_double);
+  config.interflop_sub_double = & IFV_FCTNAME(sub_double);
+  config.interflop_mul_double = & IFV_FCTNAME(mul_double);
+  config.interflop_div_double = & IFV_FCTNAME(div_double);
+
+  config.interflop_cast_double_to_float=& IFV_FCTNAME(cast_double_to_float);
+
+  config.interflop_madd_float = & IFV_FCTNAME(madd_float);
+  config.interflop_madd_double =& IFV_FCTNAME(madd_double);
+
+  return config;
+}
diff --git a/verrou/backend_verrou/interflop_verrou.h b/verrou/backend_verrou/interflop_verrou.h
new file mode 100644
index 0000000000000000000000000000000000000000..015233e3f178fab9b2a661ad4c33313f1e74c200
--- /dev/null
+++ b/verrou/backend_verrou/interflop_verrou.h
@@ -0,0 +1,105 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Interface for floatin-point operations overloading.          ---*/
+/*---                                                   vr_fpops.h ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#ifndef __INTERFLOP_VERROU_H
+#define __INTERFLOP_VERROU_H
+
+//#define DEBUG_PRINT_OP
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#define IFV_FCTNAME(FCT) interflop_verrou_##FCT
+
+#include "../interflop_backend_interface.h"
+
+
+  
+  enum vr_RoundingMode {
+    VR_NEAREST,
+    VR_UPWARD,
+    VR_DOWNWARD,
+    VR_ZERO,
+    VR_RANDOM, // Must be immediately after standard rounding modes
+    VR_AVERAGE,
+    VR_FARTHEST,
+    VR_FLOAT,
+    VR_NATIVE
+  };
+
+
+
+  void IFV_FCTNAME(configure)(enum vr_RoundingMode mode,void* context);
+  void IFV_FCTNAME(finalyze)(void* context);
+
+  const char* IFV_FCTNAME(get_backend_name)(void);
+  const char* IFV_FCTNAME(get_backend_version)(void);
+
+
+  const char* verrou_rounding_mode_name (enum vr_RoundingMode mode);
+
+  void verrou_begin_instr(void);
+  void verrou_end_instr(void);
+
+  void verrou_set_seed (unsigned int seed);
+  void verrou_set_random_seed (void);
+
+  extern void (*vr_panicHandler)(const char*);
+  void verrou_set_panic_handler(void (*)(const char*));
+  extern void (*vr_nanHandler)(void);
+  void verrou_set_nan_handler(void (*nanHandler)(void));
+
+  extern void (*vr_debug_print_op)(int,const char*, const double* args, const double* res);
+  void verrou_set_debug_print_op(void (*)(int nbArg, const char* name, const double* args, const double* res));
+
+  struct interflop_backend_interface_t IFV_FCTNAME(init)(void ** context);
+
+  void IFV_FCTNAME(add_double) (double a, double b, double* res, void* context);    
+  void IFV_FCTNAME(add_float)  (float a,  float b,  float*  res, void* context);
+  void IFV_FCTNAME(sub_double) (double a, double b, double* res, void* context);
+  void IFV_FCTNAME(sub_float)  (float a,  float b,  float*  res, void* context);
+  void IFV_FCTNAME(mul_double) (double a, double b, double* res, void* context);
+  void IFV_FCTNAME(mul_float)  (float a,  float b,  float*  res, void* context);
+  void IFV_FCTNAME(div_double) (double a, double b, double* res, void* context);
+  void IFV_FCTNAME(div_float)  (float a,  float b,  float*  res, void* context);
+
+  void IFV_FCTNAME(cast_double_to_float) (double a, float* b, void* context);
+
+  void IFV_FCTNAME(madd_double)(double a, double b, double c, double* res, void* context);
+  void IFV_FCTNAME(madd_float) (float a,  float b,  float c,  float*  res, void* context);
+
+  
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* ndef __INTERFLOP_VERROU_H */
diff --git a/verrou/backend_verrou/test_main.cxx b/verrou/backend_verrou/test_main.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..d0952ee52d252320a5931a8c1eede4fcaa5886a7
--- /dev/null
+++ b/verrou/backend_verrou/test_main.cxx
@@ -0,0 +1,56 @@
+
+#define DEBUG_PRINT_OP
+
+#include "interflop_verrou.h"
+#include <stdio.h>
+#include <iostream>
+#include <iomanip>
+
+
+
+void print_debug(int nbArg, const char * op,const  double* a, const double* res){
+
+  if(nbArg==1){
+    std::cout << op << " : "<< a[0] << "->"<<&res << std::endl;  
+  }
+
+  if(nbArg==2){
+    std::cout << op << " : "<< a[0] << "," << a[1]<< "->"<<&res << std::endl;  
+  }
+
+  if(nbArg==3){
+    std::cout << op << " : "<< a[0] << "," << a[1]<< "," << a[2]<< "->"<<&res << std::endl;  
+  }
+
+} ;
+
+
+int main(int argc, char** argv){
+
+  void* context;
+  struct interflop_backend_interface_t ifverrou=interflop_verrou_init(&context);
+
+  //  interflop_verrou_configure(VR_NEAREST, context);
+  interflop_verrou_configure(VR_FLOAT, context);
+
+  verrou_set_debug_print_op(&print_debug);
+  
+
+  double a=0.1;
+  double b=0.1;
+  double c;
+  
+  interflop_verrou_add_double(a,b,&c,context);
+
+  std::cout << std::setprecision(16);
+  std::cout << "c: "<<c << std::endl; 
+
+
+  
+  interflop_verrou_finalyze(context);
+  
+
+  return 0;
+}
+
+
diff --git a/verrou/backend_verrou/vr_fma.hxx b/verrou/backend_verrou/vr_fma.hxx
new file mode 100644
index 0000000000000000000000000000000000000000..8bc404af32a8c0e7682c7b02273a84c43d94206d
--- /dev/null
+++ b/verrou/backend_verrou/vr_fma.hxx
@@ -0,0 +1,68 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- This file contains low-level code calling FMA instructions.  ---*/
+/*---                                                   vr_fma.h ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#pragma once
+
+#ifdef    USE_VERROU_FMA
+#include  <immintrin.h>
+//#include  <fmaintrin.h>
+
+template<class REALTYPE>
+inline REALTYPE vr_fma(const REALTYPE&, const REALTYPE&, const REALTYPE&){
+  return 0./ 0.; //nan to be sur not used
+}
+
+template<>
+inline double vr_fma<double>(const double& a, const double& b, const double& c){
+  double d;
+  __m128d ai, bi,ci,di;
+  ai = _mm_load_sd(&a);
+  bi = _mm_load_sd(&b);
+  ci = _mm_load_sd(&c);
+  di=_mm_fmadd_sd(ai,bi,ci);
+  d=_mm_cvtsd_f64(di);
+  return d;
+}
+
+
+template<>
+inline float vr_fma<float>(const float& a, const float& b, const float& c){
+  float d;
+  __m128 ai, bi,ci,di;
+  ai = _mm_load_ss(&a);
+  bi = _mm_load_ss(&b);
+  ci = _mm_load_ss(&c);
+  di=_mm_fmadd_ss(ai,bi,ci);
+  d=_mm_cvtss_f32(di);
+  return d;
+}
+#endif //USE_VERROU_FMA
diff --git a/verrou/backend_verrou/vr_isNan.hxx b/verrou/backend_verrou/vr_isNan.hxx
new file mode 100644
index 0000000000000000000000000000000000000000..0ffd1ef3eaf73a9ad3c1e95299a8e45f7a0310ab
--- /dev/null
+++ b/verrou/backend_verrou/vr_isNan.hxx
@@ -0,0 +1,94 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Utilities for easier manipulation of floating-point values.  ---*/
+/*---                                                vr_isNan.hxx ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#pragma once
+
+#include <cfloat>
+#include <stdint.h>
+
+
+#include "interflop_verrou.h"
+
+
+template <class REALTYPE>
+inline bool isNan (const REALTYPE & x) {
+  vr_panicHandler("isNan called on an unknown type");
+  return false;
+}
+
+template <>
+inline bool isNan<double> (const double & x) {
+  static const uint64_t maskSpecial = 0x7ff0000000000000;
+  static const uint64_t maskInf     = 0x000fffffffffffff;
+  const uint64_t* X = reinterpret_cast<const uint64_t*>(&x);
+  if ((*X & maskSpecial) == maskSpecial) {
+    if ((*X & maskInf) != 0) {
+      return true;
+    }
+  }
+  return false;
+}
+
+template <>
+inline bool isNan<float> (const float & x) {
+  static const uint32_t maskSpecial = 0x7f800000;
+  static const uint32_t maskInf     = 0x007fffff;
+  const uint32_t* X = reinterpret_cast<const uint32_t*>(&x);
+  if ((*X & maskSpecial) == maskSpecial) {
+    if ((*X & maskInf) != 0) {
+      return true;
+    }
+  }
+  return false;
+}
+
+
+
+template<class REALTYPE>
+inline bool isNanInf (const REALTYPE & x) {
+  vr_panicHandler("isNanInf called on an unknown type");
+  return false;
+}
+
+template <>
+inline bool isNanInf<double> (const double & x) {
+  static const uint64_t mask = 0x7ff0000000000000;
+  const uint64_t* X = reinterpret_cast<const uint64_t*>(&x);
+  return (*X & mask) == mask;
+}
+
+template <>
+inline bool isNanInf<float> (const float & x) {
+  static const uint32_t mask = 0x7f800000;
+  const uint32_t* X = reinterpret_cast<const uint32_t*>(&x);	
+  return (*X & mask) == mask;  
+}
diff --git a/verrou/backend_verrou/vr_nextUlp.hxx b/verrou/backend_verrou/vr_nextUlp.hxx
new file mode 100644
index 0000000000000000000000000000000000000000..496847097e2686356638fca90dd9edd1bd79bd9e
--- /dev/null
+++ b/verrou/backend_verrou/vr_nextUlp.hxx
@@ -0,0 +1,111 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Utilities for easier manipulation of floating-point values.  ---*/
+/*---                                                vr_nextUlp.hxx ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#pragma once
+//#include <string>
+//#include <sstream>
+//#include <math.h>
+#include <cfloat>
+#include <stdint.h>
+#include <limits>
+
+#include "interflop_verrou.h"
+
+
+template<class REALTYPE>
+inline REALTYPE nextAwayFromZero(REALTYPE a){
+   vr_panicHandler("nextAwayFromZero called on unknown type");
+};
+
+template<>
+inline double nextAwayFromZero<double>(double a){
+  double res=a;
+  uint64_t* resU=reinterpret_cast<uint64_t*>(&res);
+  (*resU)+=1;
+  return res;
+};
+
+
+template<>
+inline float nextAwayFromZero<float>(float a){
+  float res=a;
+  uint32_t* resU=reinterpret_cast<uint32_t*>(&res);
+  (*resU)+=1;
+  return res;
+};
+
+
+
+template<class REALTYPE>
+inline REALTYPE nextTowardZero(REALTYPE a){
+  vr_panicHandler("nextTowardZero called on unknown type");
+};
+
+template<>
+inline double nextTowardZero<double>(double a){
+  double res=a;
+  uint64_t* resU=reinterpret_cast<uint64_t*>(&res);
+  (*resU)-=1;
+  return res;
+};
+
+
+template<>
+inline float nextTowardZero<float>(float a){
+  float res=a;
+  uint32_t* resU=reinterpret_cast<uint32_t*>(&res);
+  (*resU)-=1;
+  return res;
+};
+
+
+
+template<class REALTYPE>
+inline REALTYPE nextAfter(REALTYPE a){
+  if(a>=0 ){
+    return nextAwayFromZero(a);
+  }else{
+    return nextTowardZero(a);
+  }
+};
+
+template<class REALTYPE>
+inline REALTYPE nextPrev(REALTYPE a){
+  if(a==0){
+    return -std::numeric_limits<REALTYPE>::denorm_min();
+  }
+  if(a>0 ){
+    return nextTowardZero(a);
+  }else{
+    return nextAwayFromZero(a);
+  }
+};
diff --git a/verrou/backend_verrou/vr_op.hxx b/verrou/backend_verrou/vr_op.hxx
new file mode 100644
index 0000000000000000000000000000000000000000..5287a25acdceb06e315218d0cea44abbf97ef555
--- /dev/null
+++ b/verrou/backend_verrou/vr_op.hxx
@@ -0,0 +1,672 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Implementation of error estimation for all FP operations     ---*/
+/*---                                                    vr_op.hxx ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#pragma once
+
+
+
+template<class> struct realTypeHelper;
+
+template<>
+struct realTypeHelper<float>{
+  typedef float SimdBasicType;
+  static const int SimdLength=1;
+};
+
+
+
+template<>
+struct realTypeHelper<double>{
+  typedef double SimdBasicType;
+  static const int SimdLength=1;
+};
+
+template<>
+struct realTypeHelper<long double>{
+  typedef long double SimdBasicType;
+  static const int SimdLength=1;
+};
+
+
+template<class REALTYPESIMD>
+struct realTypeHelper<const REALTYPESIMD>{
+  typedef typename realTypeHelper<REALTYPESIMD>::SimdBasicType SimdBasicType;
+  static const int SimdLength=realTypeHelper<REALTYPESIMD>::SimdLength;
+};
+
+
+
+
+template<class REALTYPE, int NB>
+struct vr_packArg;
+
+
+template<class REALTYPE>
+struct vr_packArg<REALTYPE,1>{
+  static const int nb= 1;
+  typedef REALTYPE RealType;
+  typedef typename realTypeHelper<REALTYPE>::SimdBasicType SimdBasicType;
+  typedef vr_packArg<SimdBasicType,1> SubPack;
+  
+  inline vr_packArg(const RealType& v1):arg1(v1)
+  {
+  };
+
+  inline const SubPack& getSubPack(int I)const{
+    return SubPack(arg1[I]);
+  }
+
+  inline void serialyzeDouble(double* res)const{
+    res[0]=(double)arg1;
+  }
+
+  inline bool isOneArgNanInf()const{
+    return isNanInf<RealType>(arg1);
+  }
+  
+  const RealType& arg1;
+
+
+};
+
+
+template<class REALTYPE>
+struct vr_packArg<REALTYPE,2>{
+  static const int nb= 2;
+  typedef REALTYPE RealType;
+  typedef typename realTypeHelper<REALTYPE>::SimdBasicType SimdBasicType;
+  typedef vr_packArg<SimdBasicType,2> SubPack;
+  
+  vr_packArg(const RealType& v1,const RealType& v2):arg1(v1),arg2(v2)
+  {
+  };
+
+  inline const SubPack getSubPack(int I)const{
+    return SubPack(arg1[I],arg2[I]);
+  }
+  
+  inline void serialyzeDouble(double* res)const{
+    res[0]=(double)arg1;
+    res[1]=(double)arg2;
+  }
+
+  inline bool isOneArgNanInf()const{
+    return (isNanInf<RealType>(arg1) || isNanInf<RealType>(arg2));
+  }
+
+  
+  const RealType& arg1;
+  const RealType& arg2;
+};
+
+
+template<class REALTYPE>
+struct vr_packArg<REALTYPE,3>{
+  static const int nb= 3;
+  typedef REALTYPE RealType;
+  typedef typename realTypeHelper<REALTYPE>::SimdBasicType SimdBasicType;
+  typedef vr_packArg<SimdBasicType,3> SubPack;
+  
+  vr_packArg(const RealType& v1,const RealType& v2,const RealType& v3):arg1(v1),arg2(v2),arg3(v3){
+  };
+
+  inline const SubPack& getSubPack(int I)const{
+    return SubPack(arg1[I],arg2[I],arg3[I]);
+  }
+  
+  inline void serialyzeDouble(double* res)const{
+    res[0]=(double)arg1;
+    res[1]=(double)arg2;
+    res[2]=(double)arg3;
+  }
+
+  inline bool isOneArgNanInf()const{
+    return (isNanInf<RealType>(arg1) || isNanInf<RealType>(arg2) || isNanInf<RealType>(arg3) );
+  }
+
+  
+  const RealType& arg1;
+  const RealType& arg2;
+  const RealType& arg3;
+};
+
+
+template<class REALTYPE, int NB>
+class vr_roundFloat;
+
+
+template<class REALTYPE>
+struct vr_roundFloat<REALTYPE, 1>{
+  vr_roundFloat(const vr_packArg<REALTYPE,1>& p): arg1(REALTYPE(float(p.arg1))){
+  }
+  vr_packArg<REALTYPE,1> getPack()const{
+    return vr_packArg<REALTYPE,1>(arg1);
+  }
+  const REALTYPE arg1;
+};
+
+
+template<class REALTYPE>
+struct vr_roundFloat<REALTYPE, 2>{
+  vr_roundFloat(const vr_packArg<REALTYPE,2>& p): arg1(REALTYPE(float(p.arg1 ))),
+						  arg2(REALTYPE(float(p.arg2 ))){
+  }
+  vr_packArg<REALTYPE,2> getPack()const{
+    return vr_packArg<REALTYPE,2>(arg1,arg2);
+  }
+  const REALTYPE arg1;
+  const REALTYPE arg2;
+};
+
+template<class REALTYPE>
+struct vr_roundFloat<REALTYPE, 3>{
+  vr_roundFloat(const vr_packArg<REALTYPE,3>& p): arg1(REALTYPE(float(p.arg1 ))),
+						  arg2(REALTYPE(float(p.arg2 ))),
+						  arg3(REALTYPE(float(p.arg3 ))){
+  }
+  vr_packArg<REALTYPE,3> getPack()const{
+    return vr_packArg<REALTYPE,3>(arg1,arg2,arg3);
+  }
+  const REALTYPE arg1;
+  const REALTYPE arg2;
+  const REALTYPE arg3;
+};
+
+
+
+
+
+template<typename REAL>
+class AddOp{
+public:
+  typedef REAL RealType;
+  typedef vr_packArg<RealType,2> PackArgs;
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "add";}
+#endif
+
+  static inline RealType nearestOp (const PackArgs&  p) {
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+    return a+b;
+  }
+
+  static inline RealType error (const PackArgs& p, const RealType& x) {
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+    const RealType z=x-a;
+    return ((a-(x-z)) + (b-z)); //algo TwoSum
+  }
+
+  static inline RealType sameSignOfError (const PackArgs& p,const RealType& c) {
+    return AddOp<RealType>::error(p,c);
+  }
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return p.isOneArgNanInf();
+  }
+
+  static inline void check(const PackArgs& p,const RealType & c){
+  }
+
+  static inline void twoSum(const RealType& a,const RealType& b, RealType& x,RealType& y ){
+    const PackArgs p(a,b);
+    x=AddOp<REAL>::nearestOp(p);
+    y=AddOp<REAL>::error(p,x);
+  }
+
+
+};
+
+
+template<typename REAL>
+class SubOp{
+public:
+  typedef REAL RealType;
+  typedef vr_packArg<RealType,2> PackArgs;
+
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "sub";}
+#endif
+
+
+
+  
+  static inline RealType nearestOp (const PackArgs&  p) {
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+    return a-b;
+  }
+
+  static inline RealType error (const PackArgs& p, const RealType& x) {
+    const RealType & a(p.arg1);
+    const RealType & b(-p.arg2);
+    const RealType z=x-a;
+    return ((a-(x-z)) + (b-z)); //algo TwoSum
+  }
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return p.isOneArgNanInf();
+  }
+
+  static inline RealType sameSignOfError (const PackArgs& p,const RealType& c) {
+    return SubOp<RealType>::error(p,c);
+  }
+
+  static inline void check(const PackArgs& p,const RealType & c){
+  }
+
+};
+
+
+
+
+//splitFactor used by MulOp
+template<class REALTYPE>
+REALTYPE splitFactor(){
+  return 0./ 0.; //nan to make sure not used
+}
+
+template<>
+double splitFactor<double>(){
+  return 134217729; //((2^27)+1); /27 en double  sup(53/2) /
+}
+
+template<>
+float splitFactor<float>(){
+  return 4097; //((2^12)+1); / 24/2 en float/
+}
+
+
+
+
+template<typename REAL>
+class MulOp{
+public:
+  typedef REAL RealType;
+  typedef vr_packArg<RealType,2> PackArgs;
+
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "mul";}
+#endif
+
+
+  static inline RealType nearestOp (const PackArgs& p) {
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+    return a*b;
+  };
+
+  static inline RealType error (const PackArgs& p, const RealType& x) {
+    /*Provient de "Accurate Sum and dot product" OGITA RUMP OISHI */
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+    //    return __builtin_fma(a,b,-x);
+    //    VG_(umsg)("vr_fma \n");
+#ifdef    USE_VERROU_FMA
+    RealType c;
+    c=vr_fma(a,b,-x);
+    return c;
+#else
+    RealType a1,a2;
+    RealType b1,b2;
+    MulOp<RealType>::split(a,a1,a2);
+    MulOp<RealType>::split(b,b1,b2);
+
+    return (((a1*b1-x)+a1*b2+a2*b1)+a2*b2);
+#endif
+  };
+
+
+
+
+  static inline void split(RealType a, RealType& x, RealType& y){
+    //    const RealType factor=134217729; //((2^27)+1); /*27 en double*/
+    const RealType factor(splitFactor<RealType>());
+    const RealType c=factor*a;
+    x=(c-(c-a));
+    y=(a-x);
+  }
+
+
+  static inline RealType sameSignOfError (const PackArgs& p,const RealType& c) {
+    if(c!=0){
+      return MulOp<RealType>::error(p,c);
+    }else{
+      if(p.arg1==0 ||p.arg2==0){
+	return 0;
+      }
+      if(p.arg1>0){
+	return p.arg2;
+      }else{
+	return -p.arg2;
+      }
+    }
+  };
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return p.isOneArgNanInf();
+  }
+
+  static inline void check(const PackArgs& p,const RealType & c){
+  };
+
+  static inline void twoProd(const RealType& a,const RealType& b, RealType& x,RealType& y ){
+    const PackArgs p(a,b);
+    x=MulOp<REAL>::nearestOp(p);
+    y=MulOp<REAL>::error(p,x);
+  }
+
+};
+
+
+template<>
+class MulOp<float>{
+public:
+  typedef float RealType;
+  typedef vr_packArg<RealType,2> PackArgs;
+
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "mul";}
+#endif
+
+  static inline RealType nearestOp (const PackArgs& p) {
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+    return a*b;
+  };
+
+  static inline RealType error (const PackArgs& p, const RealType& x) {
+    /*Provient de "Accurate Sum and dot product" OGITA RUMP OISHI */
+    const RealType a(p.arg1);
+    const RealType b(p.arg2);
+    //    return __builtin_fma(a,b,-x);
+    //    VG_(umsg)("vr_fma \n");
+#ifdef    USE_VERROU_FMA
+    RealType c;
+    c=vr_fma(a,b,-x);
+    return c;
+#else
+    RealType a1,a2;
+    RealType b1,b2;
+    MulOp<RealType>::split(a,a1,a2);
+    MulOp<RealType>::split(b,b1,b2);
+
+    return (((a1*b1-x)+a1*b2+a2*b1)+a2*b2);
+#endif
+  };
+
+
+
+
+  static inline void split(RealType a, RealType& x, RealType& y){
+    //    const RealType factor=134217729; //((2^27)+1); /*27 en double*/
+    const RealType factor(splitFactor<RealType>());
+    const RealType c=factor*a;
+    x=(c-(c-a));
+    y=(a-x);
+  }
+
+
+  static inline RealType sameSignOfError (const PackArgs& p,const RealType& c) {
+    double res=MulOp<double>::error(vr_packArg<double,2>((double)p.arg1,(double)p.arg2) ,(double)c);
+    if(res<0){
+      return -1;
+    }
+    if(res>0){
+      return 1;
+    }
+    return 0.;
+  };
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return p.isOneArgNanInf();
+  }
+
+  static inline void check(const PackArgs& p,const RealType & c){
+  };
+
+  static inline void twoProd(const RealType& a,const RealType& b, RealType& x,RealType& y ){
+    const PackArgs p(a,b);
+    x=MulOp<float>::nearestOp(p);
+    y=MulOp<float>::error(p,x);
+  }
+
+};
+
+
+
+
+
+template<typename REAL>
+class DivOp{
+public:
+  typedef REAL RealType;
+  typedef vr_packArg<RealType,2> PackArgs;
+
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "div";}
+#endif
+
+  static RealType inline nearestOp (const PackArgs& p) {
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+    return a/b;
+  };
+
+  static inline RealType error (const PackArgs& p, const RealType& c) {
+    const RealType & x(p.arg1);
+    const RealType & y(p.arg2);
+#ifdef    USE_VERROU_FMA
+    const RealType r=-vr_fma(c,y,-x);
+    return r/y;
+#else
+    RealType u,uu;
+    MulOp<RealType>::twoProd(c,y,u,uu);
+    return ( x-u-uu)/y ;
+#endif
+  };
+
+  static inline RealType sameSignOfError (const PackArgs& p,const RealType& c) {
+    const RealType & x(p.arg1);
+    const RealType & y(p.arg2);
+#ifdef    USE_VERROU_FMA
+    const RealType r=-vr_fma(c,y,-x);
+    return r*y;
+#else
+    RealType u,uu;
+    MulOp<RealType>::twoProd(c,y,u,uu);
+    return ( x-u-uu)*y ;
+#endif
+  };
+
+
+  static inline void check(const PackArgs& p,const RealType & c){
+  };
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return (isNanInf<RealType>(p.arg1))||(p.arg2==RealType(0.));
+  }
+};
+
+
+template<>
+class DivOp<float>{
+public:
+  typedef float RealType;
+  typedef vr_packArg<RealType,2> PackArgs;
+
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "div";}
+#endif
+
+  static RealType inline nearestOp (const PackArgs& p) {
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+    return a/b;
+  };
+
+  static inline RealType error (const PackArgs& p, const RealType& c) {
+    const RealType & x(p.arg1);
+    const RealType & y(p.arg2);
+#ifdef    USE_VERROU_FMA
+    const RealType r=-vr_fma(c,y,-x);
+    return r/y;
+#else
+    RealType u,uu;
+    MulOp<RealType>::twoProd(c,y,u,uu);
+    return ( x-u-uu)/y ;
+#endif
+  };
+
+  static inline RealType sameSignOfError (const PackArgs& p,const RealType& c) {
+    const double x((double)p.arg1);
+    const double y((double) p.arg2);
+#ifdef    USE_VERROU_FMA
+    const double r=-vr_fma((double)c,y,-x);
+
+    if(r>0){return p.arg2;}
+    if(r<0){return -p.arg2;}
+    //if(r==0){
+      return 0.;
+      //}
+#else
+    RealType u,uu;
+    MulOp<RealType>::twoProd(c,y,u,uu);
+    return ( x-u-uu)*y ;
+#endif
+  };
+
+
+  static inline void check(const PackArgs& p,const RealType & c){
+  };
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return (isNanInf<RealType>(p.arg1))||(p.arg2==RealType(0.));
+  }
+
+};
+
+
+
+template<typename REAL>
+class MAddOp{
+public:
+  typedef REAL RealType;
+  typedef vr_packArg<RealType,3> PackArgs;
+
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "madd";}
+#endif
+
+  
+  static RealType inline nearestOp (const PackArgs& p) {
+#ifdef    USE_VERROU_FMA
+    const RealType & a(p.arg1);
+    const RealType & b(p.arg2);
+    const RealType & c(p.arg3);
+    return vr_fma(a,b,c);
+#else
+    return 0./0.;
+#endif
+  };
+
+  static inline RealType error (const PackArgs& p, const RealType& z) {
+    //ErrFmaApp : Exact and Aproximated Error of the FMA By Boldo and Muller
+    const RealType & a(p.arg1);
+    const RealType & x(p.arg2);
+    const RealType & b(p.arg3);
+
+    RealType ph,pl;
+    MulOp<RealType>::twoProd(a,x, ph,pl);
+
+    RealType uh,ul;
+    AddOp<RealType>::twoSum(b,ph, uh,ul);
+
+    const RealType t(uh-z);
+    return (t+(pl+ul)) ;
+  };
+
+  static inline RealType sameSignOfError (const PackArgs& p,const RealType& c) {
+    return error(p,c) ;
+  };
+
+
+  static inline void check(const PackArgs& p, const RealType& d){
+  };
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return p.isOneArgNanInf();
+  }
+
+};
+
+
+
+template<typename REALINPUT, typename REALOUTPUT>
+class CastOp{
+public:
+  typedef REALINPUT RealTypeIn;
+  typedef REALOUTPUT RealTypeOut;
+  typedef RealTypeOut RealType;
+  typedef vr_packArg<RealTypeIn,1> PackArgs;
+  
+
+#ifdef DEBUG_PRINT_OP
+  static const char* OpName(){return "cast";}
+#endif
+
+
+  
+  static inline RealTypeOut nearestOp (const PackArgs& p) {
+    const RealTypeIn & in(p.arg1);
+    return (RealTypeOut)in;
+  };
+
+  static inline RealTypeOut error (const PackArgs& p, const RealTypeOut& z) {
+    const RealTypeIn & a(p.arg1);
+    const RealTypeIn errorHo= a- (RealTypeIn)z;
+    return (RealTypeOut) errorHo;
+  };
+
+  static inline RealTypeOut sameSignOfError (const PackArgs& p,const RealTypeOut& c) {
+    return error(p,c) ;
+  };
+
+  static inline bool isInfNotSpecificToNearest(const PackArgs&p){
+    return p.isOneArgNanInf();
+  }
+
+  static inline void check(const PackArgs& p, const RealTypeOut& d){
+  };
+
+};
diff --git a/verrou/backend_verrou/vr_rand.h b/verrou/backend_verrou/vr_rand.h
new file mode 100644
index 0000000000000000000000000000000000000000..43d7eff2c0a587a9435dda8d23155c35fe2c7741
--- /dev/null
+++ b/verrou/backend_verrou/vr_rand.h
@@ -0,0 +1,77 @@
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Interface for random number generation.                      ---*/
+/*---                                                    vr_rand.h ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#ifndef __VR_RAND_H
+#define __VR_RAND_H
+
+//#include "pub_tool_basics.h"
+#include <cstdint>
+
+#define VERROU_LOWGEN
+
+#ifndef VERROU_LOWGEN
+#include "../backend_mcaquad/common/tinymt64.h"
+#endif
+
+
+
+typedef struct Vr_Rand_ Vr_Rand;
+struct Vr_Rand_ {
+#ifdef VERROU_LOWGEN
+  uint64_t current_;
+  uint64_t next_;
+  uint64_t seed_;
+  int32_t count_;
+#else
+  uint64_t current_;
+  tinymt64_t gen_;
+  uint64_t seed_;
+  int32_t count_;
+#endif
+};
+
+//extern Vr_Rand vr_rand;
+
+Vr_Rand vr_rand;
+
+#include "vr_rand_implem.h"
+
+
+
+/* void vr_rand_setSeed (Vr_Rand * r, unsigned int c); */
+/* unsigned int vr_rand_getSeed (Vr_Rand * r); */
+/* bool vr_rand_bool (Vr_Rand * r); */
+/* int vr_rand_int (Vr_Rand * r); */
+/* int vr_rand_max (void); */
+
+
+
+#endif
diff --git a/verrou/backend_verrou/vr_rand_implem.h b/verrou/backend_verrou/vr_rand_implem.h
new file mode 100644
index 0000000000000000000000000000000000000000..40316fa396ce93626236eac32b2b33d27ef22047
--- /dev/null
+++ b/verrou/backend_verrou/vr_rand_implem.h
@@ -0,0 +1,98 @@
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Interface for random number generation.                      ---*/
+/*---                                                    vr_rand.c ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+//#include "vr_rand.h"
+
+//Warning FILE include in vr_rand.h
+
+
+
+#ifdef VERROU_LOWGEN
+inline static uint64_t vr_rand_next (Vr_Rand * r){
+  r->next_ = r->next_ * 1103515245 + 12345;
+  return (uint64_t)((r->next_/65536) % 32768);
+}
+inline int32_t vr_rand_max () {
+  return 32767;
+}
+
+inline void vr_rand_setSeed (Vr_Rand * r, uint64_t c) {
+  r->count_   = 0;
+  r->seed_    = c;
+  r->next_    = c;
+  r->current_ = vr_rand_next (r);
+}
+inline int32_t vr_loop(){
+  return 14; // 2**15= 32768
+}
+
+#else
+inline static uint64_t vr_rand_next (Vr_Rand * r){
+  return tinymt64_generate_uint64(&(r->gen_) );
+}
+inline int32_t vr_rand_max () {
+  int32_t max=2147483647;  //2**21-1
+  return max;
+}
+inline int32_t vr_loop(){
+  return 63; // 2**15= 32768
+}
+
+
+inline void vr_rand_setSeed (Vr_Rand * r, uint64_t c) {
+  tinymt64_init(&(r->gen_),c);
+  r->count_   = 0;
+  r->seed_    = c;
+  r->current_ = vr_rand_next (r);
+}
+
+#endif
+
+
+
+inline uint64_t vr_rand_getSeed (Vr_Rand * r) {
+  return r->seed_;
+}
+
+inline bool vr_rand_bool (Vr_Rand * r) {
+  if (r->count_ == vr_loop()){
+    r->current_ = vr_rand_next (r);
+    r->count_ = 0;
+  }
+  bool res = (r->current_ >> (r->count_++)) & 1;
+  // VG_(umsg)("Count : %u  res: %u\n", r->count_ ,res);
+  return res;
+}
+
+inline int32_t vr_rand_int (Vr_Rand * r) {
+  uint64_t res=vr_rand_next (r) % vr_rand_max();
+  return (int32_t)res;
+}
diff --git a/verrou/backend_verrou/vr_roundingOp.hxx b/verrou/backend_verrou/vr_roundingOp.hxx
new file mode 100644
index 0000000000000000000000000000000000000000..37992f2f83919214f347be3c27821171617da735
--- /dev/null
+++ b/verrou/backend_verrou/vr_roundingOp.hxx
@@ -0,0 +1,375 @@
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Implementation of the software implementation of rounding    ---*/
+/*--- mode switching.                                              ---*/
+/*---                                            vr_roundingOp.hxx ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#pragma once
+#include <limits>
+
+//#ifndef LIBMATHINTERP
+extern vr_RoundingMode ROUNDINGMODE;
+//#else
+//extern vr_RoundingMode ROUNDINGMODE;
+//#endif
+
+//#include "vr_fpRepr.hxx"
+#include "vr_nextUlp.hxx"
+#include "vr_isNan.hxx"
+
+#include "vr_op.hxx"
+
+template<class OP>
+class RoundingNearest{
+public:
+  typedef typename OP::RealType RealType;
+  typedef typename OP::PackArgs PackArgs;
+
+  static inline RealType apply(const PackArgs& p){
+    const RealType res=OP::nearestOp(p) ;
+    OP::check(p,res);
+    return res;
+  } ;
+
+};
+
+
+
+template<class OP>
+class RoundingFloat{
+public:
+  typedef typename OP::RealType RealType;
+  typedef typename OP::PackArgs PackArgs;
+
+  static inline RealType apply(const PackArgs& p){
+    vr_roundFloat<typename PackArgs::RealType, PackArgs::nb> roundedArgs (p);
+    const float res=(float) OP::nearestOp(roundedArgs.getPack()) ;
+    return RealType(res);
+  } ;
+
+};
+
+
+
+
+template<class OP>
+class RoundingRandom{
+public:
+  typedef typename OP::RealType RealType;
+  typedef typename OP::PackArgs PackArgs;
+
+  static inline RealType apply(const PackArgs& p ){
+    RealType res=OP::nearestOp(p);
+
+    if (isNanInf<RealType> (res)){
+      return res;
+    }
+
+    OP::check(p,res);
+    const RealType signError=OP::sameSignOfError(p,res);
+    if(signError==0.){
+      return res;
+    }else{
+      const bool doNoChange = vr_rand_bool(&vr_rand);
+      if(doNoChange){
+	return res;
+      }else{
+	if(signError>0){
+	  return nextAfter<RealType>(res);
+	}else{
+	  return nextPrev<RealType>(res);
+	}
+      }
+    }
+  } ;
+};
+
+
+
+template<class OP>
+class RoundingAverage{
+public:
+  typedef typename OP::RealType RealType;
+  typedef typename OP::PackArgs PackArgs;
+
+  static inline RealType apply(const PackArgs& p){
+    const RealType res=OP::nearestOp(p) ;
+
+    if (isNanInf<RealType> (res)){
+      return res;
+    }
+
+    OP::check(p,res);
+    const RealType error=OP::error(p,res);
+    if(error==0.){
+      return res;
+    }
+
+
+    if(error>0){
+      const RealType nextRes(nextAfter<RealType>(res));
+      const RealType u(nextRes -res);
+      const int s(1);
+      const bool doNotChange = ((vr_rand_int(&vr_rand) * u)
+				> (vr_rand_max() * s * error));
+      if(doNotChange){
+	return res;
+      }else{
+	return nextRes;
+      }
+
+    }
+    if(error<0){
+      const RealType prevRes(nextPrev<RealType>(res));
+      const RealType u(res -prevRes);
+      const int s(-1);
+      const bool doNotChange = ((vr_rand_int(&vr_rand) * u)
+				> (vr_rand_max() * s * error));
+      if(doNotChange){
+	return res;
+      }else{
+	return prevRes;
+      }
+    }
+    return res; //Should not occur
+  } ;
+};
+
+
+
+template<class OP>
+class RoundingZero{
+public:
+  typedef typename OP::RealType RealType;
+  typedef typename OP::PackArgs PackArgs;
+
+  static inline RealType apply(const PackArgs& p){
+    RealType res=OP::nearestOp(p) ;
+    OP::check(p,res);
+    const RealType signError=OP::sameSignOfError(p,res);
+
+    if(isNanInf<RealType>(res)){
+      if( (res!=std::numeric_limits<RealType>::infinity()) && (res!=-std::numeric_limits<RealType>::infinity())  ){
+	return res;
+      }else{
+	if(OP::isInfNotSpecificToNearest(p)){
+	  return res;
+	}else{
+	  if(res>0){
+	    return std::numeric_limits<RealType>::max();
+	  }else{
+	    return -std::numeric_limits<RealType>::max();
+	  }
+	  }
+      }
+    }
+
+    if( (signError>0 && res <0)||(signError<0 && res>0) ){
+      return nextTowardZero<RealType>(res);
+    }
+    return res;
+  } ;
+};
+
+
+template<class OP>
+class RoundingUpward{
+public:
+  typedef typename OP::RealType RealType;
+  typedef typename OP::PackArgs PackArgs;
+
+  static inline RealType apply(const PackArgs& p){
+    const RealType res=OP::nearestOp(p) ;
+    OP::check(p,res);
+
+    if(isNanInf<RealType>(res)){
+      if(res!=-std::numeric_limits<RealType>::infinity()){
+	return res;
+      }else{
+	if(OP::isInfNotSpecificToNearest(p)){
+	  return res;
+	}else{
+	  return -std::numeric_limits<RealType>::max();
+	}
+      }
+    }
+
+    const RealType signError=OP::sameSignOfError(p,res);
+
+    if(signError>0.){
+      if(res==0.){
+	return std::numeric_limits<RealType>::denorm_min();
+      }
+      if(res==-std::numeric_limits<RealType>::denorm_min()){
+	return 0.;
+      }
+       return nextAfter<RealType>(res);
+    }
+    return res;
+  } ;
+};
+
+
+template<class OP>
+class RoundingDownward{
+public:
+  typedef typename OP::RealType RealType;
+  typedef typename OP::PackArgs PackArgs;
+
+  static inline RealType apply(const PackArgs& p){
+    const RealType res=OP::nearestOp(p) ;
+    OP::check(p,res);
+
+    if(isNanInf<RealType>(res)){
+      if(res!=std::numeric_limits<RealType>::infinity()){
+	return res;
+      }else{
+	if(OP::isInfNotSpecificToNearest(p)){
+	  return res;
+	}else{
+	  return std::numeric_limits<RealType>::max();
+	}
+      }
+    }
+
+
+    const RealType signError=OP::sameSignOfError(p,res);
+    if(signError<0){
+      if(res==0.){
+	return -std::numeric_limits<RealType>::denorm_min();
+      }
+      if(res==std::numeric_limits<RealType>::denorm_min()){
+	return 0.;
+      }
+      return nextPrev<RealType>(res);
+    }
+    return res;
+  } ;
+};
+
+
+
+template<class OP>
+class RoundingFarthest{
+public:
+  typedef typename OP::RealType RealType;
+  typedef typename OP::PackArgs PackArgs;
+
+  static inline RealType apply(const PackArgs& p){
+    const RealType res=OP::nearestOp(p) ;
+    if (isNanInf<RealType> (res)){
+      return res;
+    }
+
+    OP::check(p,res);
+    const RealType error=OP::error(p,res);
+    if(error==0.){
+      return res;
+    }
+    if(error>0){
+      RealType newRes=nextAfter<RealType>(res);
+      RealType ulp(newRes-res);
+      if(2*error < ulp ){
+	return newRes;
+      }else{
+	return res;
+      }
+    }else{//error<0
+      RealType newRes=nextPrev<RealType>(res);
+      RealType ulp(res-newRes);
+      if(-2*error < ulp ){
+	return newRes;
+      }else{
+	return res;
+      }
+    }
+  }
+};
+
+
+
+
+#include "vr_op.hxx"
+
+template<class OP>
+class OpWithSelectedRoundingMode{
+public:
+  typedef typename OP::RealType RealType;
+  typedef typename OP::PackArgs PackArgs;
+
+  static inline void apply(const PackArgs& p, RealType* res, void* context){
+    *res=applySeq(p,context);
+#ifdef DEBUG_PRINT_OP
+    print_debug(p,res);
+#endif
+    if (isNan(*res)) {
+      vr_nanHandler();
+    }
+  }
+
+#ifdef DEBUG_PRINT_OP
+  static inline void print_debug(const PackArgs& p, const RealType* res){
+    static const int nbParam= OP::PackArgs::nb;
+
+    double args[nbParam];
+    const double resDouble(*res);
+    p.serialyzeDouble(args);
+    if(vr_debug_print_op==NULL) return ;
+    vr_debug_print_op(nbParam,OP::OpName(), args, &resDouble);
+  }
+#endif
+
+
+  static inline RealType applySeq(const PackArgs& p, void* context){
+    switch (ROUNDINGMODE) {
+    case VR_NEAREST:
+      return RoundingNearest<OP>::apply (p);
+    case VR_UPWARD:
+      return RoundingUpward<OP>::apply (p);
+    case VR_DOWNWARD:
+      return RoundingDownward<OP>::apply (p);
+    case VR_ZERO:
+      return RoundingZero<OP>::apply (p);
+    case VR_RANDOM:
+      return RoundingRandom<OP>::apply (p);
+    case VR_AVERAGE:
+      return RoundingAverage<OP>::apply (p);
+    case VR_FARTHEST:
+      return RoundingFarthest<OP>::apply (p);
+    case VR_FLOAT:
+      return RoundingFloat<OP>::apply (p);
+    case VR_NATIVE:
+      return RoundingNearest<OP>::apply (p);
+    }
+
+    return 0;
+  }
+};
+
+//#endif
diff --git a/verrou/configure.ac b/verrou/configure.ac
new file mode 100644
index 0000000000000000000000000000000000000000..03664b925f46ddb7fc8edec98e69aa3ceec000a6
--- /dev/null
+++ b/verrou/configure.ac
@@ -0,0 +1,78 @@
+AM_PATH_PYTHON([3])
+
+#--enable-verrou-fma
+AC_CACHE_CHECK([verrou fma], vg_cv_verrou_fma,
+  [AC_ARG_ENABLE(verrou-fma,
+    [  --enable-verrou-fma          enables verrou to use fma],
+    [vg_cv_verrou_fma=$enableval],
+    [vg_cv_verrou_fma=no])])
+
+if test "$vg_cv_verrou_fma" = yes; then
+  # Check for fmaintrin.h
+  AC_LANG_PUSH(C++)
+  CXXFLAGS="$safe_CXXFLAGS -mfma"
+  AC_MSG_CHECKING([for fmaintrin.h ])
+  AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+      #include  <immintrin.h>
+      #include <stdlib.h>
+]], [[
+       double a,b,c,d;
+      __m128d ai, bi,ci,di;
+      ai = _mm_load_sd(&a);
+      bi = _mm_load_sd(&b);
+      ci = _mm_load_sd(&c);
+      di=_mm_fmadd_sd(ai,bi,ci);
+      d=_mm_cvtsd_f64(di);
+      return EXIT_SUCCESS;
+    ]])],
+    [
+      AC_MSG_RESULT([yes])
+      AM_CONDITIONAL([HAVE_FMA_INTRIN], test x$vg_cv_verrou_fma = xyes,[])
+    ],[
+      AC_MSG_RESULT([no])
+      AC_MSG_ERROR([A compiler with fmaintrin.h is required for --enable-verrou-fma ])
+  ])
+  AC_LANG_POP(C++)
+else
+  AM_CONDITIONAL([HAVE_FMA_INTRIN], test x$vg_cv_verrou_fma = xyes,[])
+fi
+
+AC_SUBST(vg_cv_verrou_fma)
+
+
+#--enable-verrou-quad
+AC_CACHE_CHECK([verrou quad], vg_cv_verrou_quad,
+  [AC_ARG_ENABLE(verrou-quad,
+    [  --enable-verrou-quad          enables verrou to use quad],
+    [vg_cv_verrou_quad=$enableval],
+    [vg_cv_verrou_quad=yes])])
+
+AM_CONDITIONAL([USE_QUAD], test x$vg_cv_verrou_quad = xyes,[])
+
+AC_SUBST(vg_cv_verrou_quad)
+
+
+
+
+# avx512
+vg_test_no_avx512f_flag=-mno-avx512f
+AC_LANG_PUSH(C++)
+CXXFLAGS="$safe_CXXFLAGS $vg_test_no_avx512f_flag"
+AC_MSG_CHECKING([for -mno-avx512f compil option])
+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+      #include <stdlib.h>
+      ]], [[return EXIT_SUCCESS;]])],
+    [
+      CXXFLAGS="$safe_CXXFLAGS"
+      AC_MSG_RESULT([yes])
+    ],[
+    CXXFLAGS="$safe_CXXFLAGS"
+     vg_test_no_avx512f_flag=
+      AC_MSG_RESULT([no])
+  ])
+AC_LANG_POP(C++)
+AC_SUBST(vg_test_no_avx512f_flag)
+
+
+AC_CONFIG_FILES([verrou/env.sh])
+AC_CONFIG_FILES([verrou/unitTest/flag.mk])
diff --git a/verrou/ddTest/ddCmp.py b/verrou/ddTest/ddCmp.py
new file mode 100755
index 0000000000000000000000000000000000000000..1a6ead7c617efe6a02f73b0cc6d6ee7450228b7f
--- /dev/null
+++ b/verrou/ddTest/ddCmp.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python3
+
+
+import sys
+import ddRun
+#DDConfig
+import os
+import pickle
+
+
+def cmpNorm(ref, toCmp, ddCase):
+    print("norm")
+    if "dd.sym" in ref and "dd.line" not in ref:
+        return ddCase.statusOfSymConfig(open(os.path.join(toCmp,"path_exclude")).readline())
+    if "dd.line" in ref:
+        return ddCase.statusOfSourceConfig(open(os.path.join(toCmp,"path_source")).readline())
+if __name__=="__main__":
+    if sys.argv[1]== sys.argv[2]:
+        sys.exit(0)
+    else:
+        ddCase=ddRun.ddConfig()
+        ref=sys.argv[1]
+        ddCase.unpickle(os.path.join(ref,"dd.pickle"))
+        toCmp=sys.argv[2]
+        sys.exit(cmpNorm(ref, toCmp, ddCase))
+    
+
+
+
+
diff --git a/verrou/ddTest/ddCmpFalse.py b/verrou/ddTest/ddCmpFalse.py
new file mode 100755
index 0000000000000000000000000000000000000000..c0446aa29bf722696cf913a1d5100c1011a1d13a
--- /dev/null
+++ b/verrou/ddTest/ddCmpFalse.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python3
+import sys
+sys.exit(1)
+
+
diff --git a/verrou/ddTest/ddCmpTrue.py b/verrou/ddTest/ddCmpTrue.py
new file mode 100755
index 0000000000000000000000000000000000000000..ecbc3b7708b1966dfdec414714563f2a29299def
--- /dev/null
+++ b/verrou/ddTest/ddCmpTrue.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python3
+import sys
+sys.exit(0)
+
+
diff --git a/verrou/ddTest/ddRun.py b/verrou/ddTest/ddRun.py
new file mode 100755
index 0000000000000000000000000000000000000000..b338b82935ce55b670ebc2e5203c1d5c817b612b
--- /dev/null
+++ b/verrou/ddTest/ddRun.py
@@ -0,0 +1,223 @@
+#!/usr/bin/env python3
+
+
+import sys
+import os
+import pickle
+import random
+
+proba=1.
+try:
+    proba = float(os.environ["DD_TEST_PROBA"])
+except:
+    pass
+
+def simulateRandom(fail):
+    if fail!=0:
+        if( random.random()<proba):
+            return fail
+    return 0
+
+
+
+class ddConfig:
+    def __init__(self,listOf1Failure=[], listOf2Failures=[]):
+        self.nbSym=len(listOf1Failure)
+        self.listOf1Failure=listOf1Failure
+        self.listOf2Failures=listOf2Failures
+        self.check2Failure()
+
+    def check2Failure(self):
+        for x in self.listOf2Failures:
+            ((sym0,sym1), fail, tab)=x
+            if sym0>= self.nbSym or sym1 >= self.nbSym:
+                print("failure")
+                sys.exit()
+            #todo check tab
+                #            for (s1, l1, s2, l2) in tab:
+
+    def pickle(self, fileName):
+        """To serialize the ddConfig object in the file fileName"""
+        fileHandler= open(fileName, "wb")
+        pickle.dump(self.listOf1Failure,fileHandler)
+        pickle.dump(self.listOf2Failures,fileHandler)
+
+
+    def unpickle(self, fileName):
+        """To deserialize the ddConfig object from the file fileName"""
+        fileHandler=open(fileName, "rb")
+        self.listOf1Failure=pickle.load(fileHandler)
+        self.listOf2Failures=pickle.load(fileHandler)
+        self.nbSym=len(self.listOf1Failure)
+        
+    def listOfIntSym(self):
+        """Return the int list of symbol"""
+        return range(self.nbSym)
+    
+    def listOfTxtSym(self):
+        """Return a fake list of symbol"""
+        return [("sym-"+str(i), "fake.so") for i in self.listOfIntSym()]
+
+
+    
+    def getExcludeIntSymFromExclusionFile(self, excludeFile):
+        """ Return the Int Symbol list excluded with excludeFile """
+        if excludeFile==None:
+            return []
+        return [int((line.split()[0]).replace("sym-", "")) for line in ((open(excludeFile.strip(), "r")).readlines()) ]
+
+    def getIncludeIntSymFromExclusionFile(self,excludeFile):
+        """ Return the Int Symbol list included defined through the excludeFile"""
+        return [i for i in self.listOfIntSym() if i not in self.getExcludeIntSymFromExclusionFile(excludeFile)]
+
+        
+    def listOfTxtLine(self, excludeFile):
+        """Generate a fake list of line : it takes into account the excludeFile"""
+        listOfSymIncluded=self.getIncludeIntSymFromExclusionFile(excludeFile)
+        res=[]
+        for (symFailureIndex, failure, listOfLine) in self.listOf1Failure:
+            if symFailureIndex in listOfSymIncluded:
+                    for (lineIndex, failureLine) in listOfLine:
+                            res+=[("sym"+str(symFailureIndex)+".c", lineIndex, "sym-"+str(symFailureIndex))]                
+        print("print listOfLine", res)
+        return res
+        
+    def getIncludedLines(self, sourceFile):
+        includedLines=[line.split() for line in  (open(sourceFile.strip(), "r")).readlines()]
+        return includedLines
+
+
+    def statusOfSymConfig(self, config):
+        """Return the status of the config"""
+        print(config)
+        listOfConfigSym=self.getExcludeIntSymFromExclusionFile(config)
+
+        #test single sym
+        for sym in self.listOfIntSym():
+            if sym not in listOfConfigSym and self.listOf1Failure[sym][1]!=0:
+                res=simulateRandom(1)
+                if res==1:
+                    return 1
+        #test couple sym
+        for ((sym1,sym2), failure, tab) in self.listOf2Failures:
+            if failure==0:
+                continue
+            if not sym1 in listOfConfigSym and not sym2 in listOfConfigSym:
+                res=simulateRandom(1)
+                if res==1:
+                    return 1
+        return 0
+
+    def statusOfSourceConfig(self, configLine):
+        print("configLine:", configLine)
+        listOfSym=[]
+        
+        configLineLines=self.getIncludedLines(configLine)
+        print("configLineLines:", configLineLines)
+        for sym in range(self.nbSym):
+            if sym not in listOfSym and self.listOf1Failure[sym][1]!=0:
+                print("sym:", sym)
+                print("listofLineFailure :", self.listOf1Failure[sym][2])
+                
+                
+                selectedConfigLines=[int(line[1]) for line in configLineLines if line[2]=="sym-"+str(sym) ]
+                print("selectedConfigLines:", selectedConfigLines)
+                for (lineFailure, failure) in self.listOf1Failure[sym][2]:
+                    if lineFailure in selectedConfigLines and failure :
+                        print("line return : ", lineFailure)
+                        return 1
+
+        #test couple sym
+        for ((sym1,sym2), failure, tab) in self.listOf2Failures:
+            print ("sym1 sym2 tab", sym1, sym2, tab)
+            if failure==0:
+                continue
+            if not sym1 in listOfSym and not sym2 in listOfSym:
+
+                selectedConfigLines1=[int(line[1]) for line in configLineLines if line[2]=="sym-"+str(sym1) ]
+                selectedConfigLines2=[int(line[1]) for line in configLineLines if line[2]=="sym-"+str(sym2) ]
+                print("selectedConfigLines1:", selectedConfigLines1)
+                print("selectedConfigLines2:", selectedConfigLines2)
+                for (s1, l1, s2,l2) in tab:
+                    if s1==sym1 and s2==sym2:
+                        if l1 in selectedConfigLines1 and l2 in selectedConfigLines2:
+                            return 1
+        return 0
+
+    
+        
+        
+def generateFakeExclusion(ddCase):
+    genExcludeFile=os.environ["VERROU_GEN_EXCLUDE"]
+    genExcludeFile=genExcludeFile.replace("%p", "4242")
+    
+    
+    f=open(genExcludeFile, "w")
+    dataToWrite=ddCase.listOfTxtSym()
+    import random
+    random.shuffle(dataToWrite)
+    for (sym, name,) in dataToWrite:
+        f.write(sym +"\t" + name+"\n")
+    f.close()
+
+def generateFakeSource(ddCase):
+
+
+    genSourceFile=os.environ["VERROU_GEN_SOURCE"]
+    genSourceFile=genSourceFile.replace("%p", "4242")
+
+    excludeFile=None
+    try:
+        excludeFile= os.environ["VERROU_EXCLUDE"]
+    except:
+        excludeFile=None
+    print('excludeFile:',excludeFile) 
+    f=open(genSourceFile, "w")
+    for (source, line,  symName) in ddCase.listOfTxtLine(excludeFile):
+        f.write(source +"\t" + str(line)+"\t"+symName+"\n")
+        
+    f.close()
+
+
+    
+def runRef(dir_path, ddCase):
+    print("ref")
+    if "dd.sym" in dir_path and not "dd.line" in dir_path:
+        generateFakeExclusion(ddCase)
+        ddCase.pickle(os.path.join(dir_path,"dd.pickle"))
+        return 0
+    if "dd.line" in dir_path:
+        generateFakeSource(ddCase)
+        ddCase.pickle(os.path.join(dir_path,"dd.pickle"))
+        return 0
+
+        
+def runNorm(dir_path, ddCase):
+    print("norm")
+    if "dd.sym" in dir_path and not "dd.line" in dir_path:
+        f=open(os.path.join(dir_path , "path_exclude"), "w")
+        f.write(os.environ["VERROU_EXCLUDE"]+"\n")
+        f.close()
+        return 0
+    if "dd.line" in dir_path:
+        f=open(os.path.join(dir_path,"path_source"), "w")
+        f.write(os.environ["VERROU_SOURCE"]+"\n")
+        f.close()
+        
+        
+if __name__=="__main__":
+    ddCase=ddConfig([(sym, max(0, sym-16), [(line, max(0, line-8)) for line in range(11) ] ) for sym in range(20)],
+                    [((0,1), 1, [(0,line, 1,max(0,line-1)) for line in range(4)])  ]
+    )
+#    ddCase=ddConfig([(0, 0, []),
+#                     (1, 1, [(0, 0),(1,1)] )])
+    
+    if "ref" in sys.argv[1]:
+        sys.exit(runRef(sys.argv[1], ddCase))
+    else:
+        sys.exit(runNorm(sys.argv[1], ddCase))
+    
+
+
+
+
diff --git a/verrou/docs/fix-encoding b/verrou/docs/fix-encoding
new file mode 100755
index 0000000000000000000000000000000000000000..d8f5e255747a9f7678862d64543b69ee519751cb
--- /dev/null
+++ b/verrou/docs/fix-encoding
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+TMP=$(mktemp)
+trap "{ echo 'cleaning up'; rm -f ${TMP};}" EXIT
+
+while [ ! -z "$1" ]; do
+    FILE="$1"
+    echo "Fixing encoding of file ${FILE}"
+    iconv -f iso-8859-1 -t utf-8 -o ${TMP} ${FILE}
+    sed 's/iso-8859-1/utf-8/i' ${TMP} >${FILE}
+    shift
+done
diff --git a/verrou/docs/update-docs b/verrou/docs/update-docs
new file mode 100755
index 0000000000000000000000000000000000000000..2cd49f09849821f66fee89fd96c0e91988793ee7
--- /dev/null
+++ b/verrou/docs/update-docs
@@ -0,0 +1,74 @@
+#!/bin/bash
+
+set -e
+
+SRCDIR=$(
+    cd "$(dirname "${BASH_SOURCE[0]}")"/../..
+    pwd)
+echo "SRCDIR=${SRCDIR}"
+
+BUILDDIR="${PWD}"
+echo "BUILDDIR=${BUILDDIR}"
+
+INSTALLDIR="${BUILDDIR}/install"
+echo "INSTALLDIR=${INSTALLDIR}"
+
+[ -r "${INSTALLDIR}/env.sh" ] || (
+    echo "Could not find ${INSTALLDIR}/env.sh"
+    false
+)
+
+(
+    cd ${SRCDIR}/verrou;
+    git diff --no-ext-diff --ignore-submodules --quiet --exit-code
+) || (
+    echo "Please commit your local changes"
+    false
+)
+
+HASH=$(cd ${SRCDIR}/verrou;
+       git rev-parse --short=14 HEAD)
+echo "HASH=${HASH}"
+
+echo "Press a key to continue (Ctrl+C to abort)"
+read
+
+# Build and install documentation
+make -C docs html-docs man-pages
+make install
+${SRCDIR}/verrou/docs/fix-encoding ${INSTALLDIR}/share/doc/valgrind/html/*.html
+
+
+# Update usage string in source files
+${SRCDIR}/verrou/docs/update-vr-clo
+
+# Upload HTML documentation to github
+TMPDIR=$(mktemp -d)
+trap "echo -e '\n\nCleaning up...'; rm -rf ${TMPDIR}" EXIT
+(
+    cd ${TMPDIR}
+    git clone --branch gh-pages --single-branch https://github.com/edf-hpc/verrou.git html
+    cd html
+)
+rsync -av --delete --exclude .git ${INSTALLDIR}/share/doc/valgrind/html/ ${TMPDIR}/html/
+(
+    cd ${TMPDIR}/html
+    git add .
+    git status
+    (
+        echo "Updated documentation to ${HASH}"
+        echo ""
+        echo "REMOVE THIS LINE TO COMMIT THESE CHANGES AND UPLOAD THEM"
+    ) >../commit.msg
+    (
+        echo "echo 'Staged changes'"
+        echo "git status --porcelain | grep '^??'"
+        echo "git diff --staged --stat"
+    ) >../bashrc
+    git config user.name  "François Févotte"
+    git config user.email "francois.fevotte@triscale-innov.com"
+    x-terminal-emulator -e "bash --init-file ../bashrc" &
+    firefox --new-window "vr-manual.html" &
+    git commit -t ../commit.msg
+    git push origin gh-pages
+)
diff --git a/verrou/docs/update-vr-clo b/verrou/docs/update-vr-clo
new file mode 100755
index 0000000000000000000000000000000000000000..7c97461a6144baf59e064ddb510faab94883f64d
--- /dev/null
+++ b/verrou/docs/update-vr-clo
@@ -0,0 +1,45 @@
+#!/usr/bin/env python3
+
+import os
+import subprocess
+import re
+import os.path
+
+env = os.environ
+env["COLUMNS"] = "81"
+
+installConfig= subprocess.Popen(["./config.status", "--config"],
+                       env = env,
+                       stdout = subprocess.PIPE)
+
+line = (installConfig.stdout.readline()).decode("utf8")
+spline=(line.strip()).split()
+pathInstall="./install"
+for conf in spline:
+    if conf[1:-1].startswith("--prefix"):
+        pathInstall=(conf[1:-1].partition('='))[2]
+
+man = subprocess.Popen(["man", os.path.join(pathInstall, "share/man/man1/valgrind.1")],
+                       env = env,
+                       stdout = subprocess.PIPE)
+
+
+for line in man.stdout:
+    line=line.decode("utf8")
+    if line.startswith("VERROU"):
+        break
+
+title = re.compile("^[A-Z]")
+subtitle = re.compile("^\s{3}[A-Z]")
+with open("verrou/vr_clo.txt", "w") as f:
+    for line in man.stdout:
+        line=line.decode("utf8")
+        if title.search(line):
+            break
+
+        if subtitle.search(line):
+            line = " " + line
+        else:
+            line = line[1:]
+
+        f.write ('"'+line.rstrip()+'\\n"'+"\n")
diff --git a/verrou/docs/valgrind-manpage.xml b/verrou/docs/valgrind-manpage.xml
new file mode 100644
index 0000000000000000000000000000000000000000..f83a6fdc90698abb2cc6b299ad4e470f4372adea
--- /dev/null
+++ b/verrou/docs/valgrind-manpage.xml
@@ -0,0 +1,38 @@
+<refsect1 id="verrou-options">
+  <title>Verrou Options</title>
+  
+  <refsect2>
+    <title>General options</title>
+    <xi:include href="../../verrou/docs/vr-manual.xml"
+                xpointer="vr.opts.general.list"
+                xmlns:xi="http://www.w3.org/2001/XInclude" />
+  </refsect2>
+    
+  <refsect2>
+    <title>Perturbation of floating-point operations</title>
+    <xi:include href="../../verrou/docs/vr-manual.xml"
+                xpointer="vr.opts.instr.list"
+                xmlns:xi="http://www.w3.org/2001/XInclude" />
+  </refsect2>
+
+  <refsect2>
+    <title>Instrumentation scope</title>
+    <xi:include href="../../verrou/docs/vr-manual.xml"
+                xpointer="vr.opts.scope.list"
+                xmlns:xi="http://www.w3.org/2001/XInclude" />
+  </refsect2>
+
+  <refsect2>
+    <title>Detection options</title>
+    <xi:include href="../../verrou/docs/vr-manual.xml"
+                xpointer="vr.opts.detection.list"
+                xmlns:xi="http://www.w3.org/2001/XInclude" />
+  </refsect2>
+
+  <refsect2>
+    <title>Performance optimization</title>
+    <xi:include href="../../verrou/docs/vr-manual.xml"
+                xpointer="vr.opts.optim.list"
+                xmlns:xi="http://www.w3.org/2001/XInclude" />
+  </refsect2>
+</refsect1>
diff --git a/verrou/docs/verrou_dd-manpage.xml b/verrou/docs/verrou_dd-manpage.xml
new file mode 100644
index 0000000000000000000000000000000000000000..a8bd39edbab1553a19548032d91eb7267f8ef9fb
--- /dev/null
+++ b/verrou/docs/verrou_dd-manpage.xml
@@ -0,0 +1,58 @@
+<?xml version="1.0"?> <!-- -*- sgml -*- -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+          "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
+          [ <!ENTITY % vg-entities SYSTEM "../../docs/xml/vg-entities.xml"> %vg-entities; ]>
+
+
+<refentry id="verrou-dd">
+
+  <refmeta>
+    <refentrytitle>Verrou Delta-Debug</refentrytitle>
+    <manvolnum>1</manvolnum>
+    <refmiscinfo>Release &rel-version;</refmiscinfo>
+  </refmeta>
+
+  <refnamediv>
+    <refname>verrou_dd</refname>
+    <refpurpose>Delta-Debugging tool for Verrou</refpurpose>
+  </refnamediv>
+
+  <refsynopsisdiv>
+    <xi:include href="vr-manual.xml" xpointer="vr-manual.verrou_dd.synopsis"
+                xmlns:xi="http://www.w3.org/2001/XInclude" />
+  </refsynopsisdiv>
+
+
+  <refsect1>
+    <title>Description</title>
+    <xi:include href="vr-manual.xml" xpointer="vr-manual.verrou_dd.desc"
+                xmlns:xi="http://www.w3.org/2001/XInclude" />
+    <xi:include href="vr-manual.xml" xpointer="vr-manual.verrou_dd.results"
+                xmlns:xi="http://www.w3.org/2001/XInclude" />
+  </refsect1>
+
+  <refsect1>
+    <title>Environment Variables</title>
+    <xi:include href="vr-manual.xml" xpointer="vr-manual.verrou_dd.envvars"
+                xmlns:xi="http://www.w3.org/2001/XInclude" />
+  </refsect1>
+
+  <refsect1>
+    <title>See Also</title>
+    <para>
+      valgrind(1),
+      <filename>&vg-docs-path;</filename> or
+      <filename>&vg-docs-url;</filename>.
+    </para>
+  </refsect1>
+
+  <refsect1>
+    <title>Authors</title>
+    <para>Fran&ccedil;ois F&eacute;votte and Bruno Lathuili&egrave;re.</para>
+  </refsect1>
+</refentry>
+
+<!-- Local Variables: -->
+<!-- mode: nxml -->
+<!-- fill-column: 100 -->
+<!-- End: -->
diff --git a/verrou/docs/vr-localization.xml b/verrou/docs/vr-localization.xml
new file mode 100644
index 0000000000000000000000000000000000000000..bf6d8d5137011068e01b6c474ffed8e76c64e23f
--- /dev/null
+++ b/verrou/docs/vr-localization.xml
@@ -0,0 +1,358 @@
+<?xml version="1.0"?> <!-- -*- sgml -*- -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+          "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<section id="vr-manual.localization" xreflabel="Error localization">
+  <title>Debugging and error localization</title>
+
+  <para>
+    We describe in this section techniques which can help determining the origin of numerical errors
+    in the source code of the analyzed program.
+  </para>
+  
+  <section id="vr-manual.localization.dd" xreflabel="Delta-Debugging">
+    <title>Delta-debugging with the <command>verrou_dd</command> command</title>
+
+    <para>
+      The first technique builds upon Verrou's <xref linkend="vr-manual.feat.exclude"/> feature in
+      order to perform a search of functions and source code lines whose perturbation produces the
+      most important errors in final results. The idea is the following:
+      <itemizedlist>
+        <listitem><simpara>First, a complete list of all symbols (which contain perturbated
+	floating point instruction) is generated. It is expected that
+        perturbing all functions in this list will produce inexact results. On the other hand,
+        excluding all symbols from the scope of perturbations should produce unperturbed
+        results.</simpara></listitem>
+        <listitem><simpara>By splitting the symbols list in two parts, and perturbing each half
+        separately, it is possible to determine whether each perturbed half produces inexact
+        results.</simpara></listitem>
+        <listitem><simpara>Going on like this and performing a bisection of the list of symbols, the
+        algorithm eventually finds a subset of functions whose only perturbation is enough to
+        produce inexact results.</simpara></listitem>
+        <listitem><simpara>The same process can be iterated over all source code lines belonging to
+        the identified unstable symbols (provided that the binary embeds debugging
+        information).</simpara></listitem>
+      </itemizedlist>
+    </para>
+
+    <para>
+      This algorithm is called <emphasis>Delta-Debugging</emphasis>.
+      <!-- , and is for example used in <command>git bisect</command>. -->
+      It is automated by the <command>verrou_dd</command> command,
+      which can be used in the following way:
+    </para>
+    
+    <simplesect>
+      <title>Synopsis</title>
+
+      <screen><cmdsynopsis id="vr-manual.verrou_dd.synopsis">
+        <command>verrou_dd</command>    
+        <arg choice="req"><replaceable>/path/to/run_script</replaceable></arg>
+        <arg choice="req"><replaceable>/path/to/cmp_script</replaceable></arg>
+      </cmdsynopsis></screen>
+
+    </simplesect>
+    <simplesect>
+      <title>Description</title>
+        
+      <para id="vr-manual.verrou_dd.desc">
+        Arguments passed to <command>verrou_dd</command> are scripts, which must be identified by an
+        absolute path. These scripts will be called during the delta-debugging process to automate
+        the tasks of running your program under Verrou, and verifying the results. The scripts will
+        be called with the following arguments:
+        
+        <variablelist>
+          <varlistentry>
+            <term>
+              <command><replaceable>run_script</replaceable></command>
+              <arg choice="req"><replaceable>dir</replaceable></arg>
+            </term>
+            <listitem><para> Should run your program under Verrou, and put the results somewhere
+            under the <replaceable>dir</replaceable> directory. A minimal example could look like
+            the following:</para>
+            <programlisting>
+#!/bin/bash
+DIR="$1"
+valgrind --tool=verrou --rounding-mode=random <replaceable>program args</replaceable> &gt; ${DIR}/results.dat</programlisting>
+            </listitem>
+          </varlistentry>
+          <varlistentry>
+            <term>
+              <command><replaceable>cmp_script</replaceable></command>
+              <arg choice="req"><replaceable>ref_dir</replaceable></arg>
+              <arg choice="req"><replaceable>run_dir</replaceable></arg>
+            </term>
+            <listitem>
+              <simpara>
+                Should check whether program results in <replaceable>run_dir</replaceable> (as
+                stored by a previous call to
+                <command><replaceable>run_script</replaceable></command>) are "valid". The precise
+                meaning of "valid" is left for the user to determine, depending on the program being
+                analyzed. If a comparison to reference results is needed, directory
+                <replaceable>ref_dir</replaceable> contains the results produced by an
+                uninstrumented run of the
+                program. <command><replaceable>cmp_script</replaceable></command> should return 0 if
+                and only if the results are "valid".
+              </simpara>
+              <simpara>
+                A minimal example could look like the following:
+              </simpara>
+              <programlisting>
+#!/bin/bash 
+REF="$1"
+RUN="$2"
+diff ${REF}/results.dat ${RUN}/results.dat</programlisting>
+            </listitem>
+          </varlistentry>
+        </variablelist>
+      </para>
+    </simplesect>
+    
+    <simplesect>
+      <title>Environment Variables</title>
+      
+      <para>
+        The following environment variables affect the behaviour of <command>verrou_dd</command>:
+      </para>
+      
+      <variablelist id="vr-manual.verrou_dd.envvars">
+        <varlistentry>
+          <term>
+            <computeroutput>VERROU_DD_NRUNS</computeroutput>
+          </term>
+          <listitem>
+            <para>An integer setting the required number successful runs needed to consider a
+            configuration as stable. If this environment variable is not set, the default value is
+            5.</para>
+          </listitem>
+        </varlistentry>
+        <varlistentry>
+          <term>
+            <computeroutput>VERROU_DD_UNSAFE</computeroutput>
+          </term>
+          <listitem>
+            <para>If this variable is set (whatever its value), some Delta-Debugging assertions
+            are transformed in mere warnings. This allows getting a result faster, at the price of
+            said result being potentially incorrect (functions/lines listed as stable when they
+            are not, or vice versa).
+            </para>
+          </listitem>
+        </varlistentry>
+
+	<varlistentry>
+          <term>
+            <computeroutput>VERROU_DD_NUM_THREADS</computeroutput>
+          </term>
+          <listitem>
+            <para> Experimental feature : Allows to perform <computeroutput>VERROU_DD_NUM_THREADS</computeroutput> runs in parallel.
+	     If this environment variable is not set, the parallelism is not activated.
+	    </para>
+          </listitem>
+        </varlistentry>
+
+	<varlistentry>
+          <term>
+            <computeroutput>VERROU_DD_ALGO</computeroutput>
+          </term>
+          <listitem>
+            <para>
+	      Environment variable to choose one of the two Delta-Debugging algorithms (ddmax and rddmin).
+	    </para>
+	    <para>
+	      <computeroutput>rddmin</computeroutput>: return recursively the min-set of unstable
+	      symbols. <computeroutput>VERROU_DD_NUM_THREADS</computeroutput> is ignored.
+	    </para>
+	    <para>
+	      <computeroutput>ddmax</computeroutput>: return the max-set of stable symbols (or lines). Symlinks are created for each element of the complementary set.
+	    </para>
+
+	    <para>
+	    If this environment variable is not set, the default value is set to <computeroutput>rddmin</computeroutput>.
+	    </para>
+          </listitem>
+        </varlistentry>
+
+	<varlistentry>
+          <term>
+            <computeroutput>VERROU_DD_RDDMIN</computeroutput>
+          </term>
+          <listitem>
+            <para>
+	      Environment variable to choose one of the variant of rddmin algorithms.
+	    </para>
+	    <para>
+	      "", <computeroutput>strict</computeroutput>: rddmin algorithm with constant number of samples
+	    </para>
+	    <para>
+	      <computeroutput>s</computeroutput>, <computeroutput>stoch</computeroutput>: rddmin algorithm with increasing number of samples (called srddmin)
+	    </para>
+	    <para>
+	      <computeroutput>d</computeroutput>, <computeroutput>dicho</computeroutput>: srddmin algorithm pre-filtered by a dichotomy method (called drddmin)
+	    </para>
+
+	    <para>
+	    If this environment variable is not set, the default value is set to <computeroutput>dicho</computeroutput>.
+	    </para>
+          </listitem>
+        </varlistentry>
+
+
+	<varlistentry>
+          <term>
+            <computeroutput>VERROU_DD_RDDMIN_TAB</computeroutput>
+          </term>
+          <listitem>
+            <para>
+	      Environment variable to choose the evolution of the number of samples used by the srddmin algorithm.
+	    </para>
+	    <para>
+	      <computeroutput>exp</computeroutput>: increases the number of samples from 1 to
+	      <computeroutput>VERROU_DD_NRUNS</computeroutput> with exponential rate.
+	    </para>
+	    <para>
+	      <computeroutput>all</computeroutput>: consecutively try all numbers of samples from 1
+	      to <computeroutput>VERROU_DD_NRUNS</computeroutput>.
+	    </para>
+	    <para>
+	      <computeroutput>single</computeroutput>: directly use the number of samples given by
+	      <computeroutput>VERROU_DD_NRUNS</computeroutput>. srddmin is equivalent to rddmin in
+              this case.
+	    </para>
+
+	    <para>
+	    If this environment variable is not set, the default value is set to <computeroutput>exp</computeroutput>.
+	    </para>
+          </listitem>
+        </varlistentry>
+
+	<varlistentry>
+          <term>
+            <computeroutput>VERROU_DD_DICHO_TAB</computeroutput>
+          </term>
+          <listitem>
+            <para>
+	      Environment variable to choose the evolution of the number of samples used by the
+              binary search in the first part of the drddmin algorithm.
+	    </para>
+	    <para>
+	      <computeroutput>exp</computeroutput>: increases the number of samples from 1 to
+	      <computeroutput>VERROU_DD_NRUNS</computeroutput> with exponential rate.
+	    </para>
+	    <para>
+	      <computeroutput>all</computeroutput>: consecutively try all numbers of samples from 1
+	      to <computeroutput>VERROU_DD_NRUNS</computeroutput>.
+	    </para>
+	    <para>
+	      <computeroutput>single</computeroutput>: use the number of samples given by
+	      <computeroutput>VERROU_DD_NRUNS</computeroutput>.
+	    </para>
+	    <para>
+	      <computeroutput>half</computeroutput>: use half the number of samples given by
+	      <computeroutput>VERROU_DD_NRUNS</computeroutput>.
+	    </para>
+	    <para>
+	      <replaceable>N</replaceable>: user-provided value (must be an integer comprised
+	      between 1 and <computeroutput>VERROU_DD_NRUNS</computeroutput>).
+	    </para>
+
+	    <para>
+	    If this environment variable is not set, the default value is set to <computeroutput>half</computeroutput>.
+	    </para>
+          </listitem>
+        </varlistentry>
+
+	<varlistentry>
+	  <term>
+            <computeroutput>VERROU_DD_DICHO_GRANULARITY</computeroutput>
+          </term>
+        <listitem>
+            <para>
+	      Environment variable to choose the granularity of the splitFunction of the dichotomy algorithm (used by drddmin).
+	    </para>
+	    <para>
+	    If this environment variable is not set, the default value is set to 2.
+	    </para>
+          </listitem>
+        </varlistentry>
+
+	<varlistentry>
+	  <term>
+            <computeroutput>VERROU_DD_QUIET</computeroutput>
+          </term>
+        <listitem>
+            <para>
+	      Environment variable to reduce the verbosity of <command>verrou_dd</command>.
+	    </para>
+          </listitem>
+        </varlistentry>
+
+	<varlistentry>
+	  <term>
+          <computeroutput>VERROU_DD_IGNORE_LINE</computeroutput>
+          </term>
+        <listitem>
+            <para>
+	      Environment variable to limit the search to symbols.
+	    </para>
+          </listitem>
+        </varlistentry>
+
+
+
+      </variablelist>
+    </simplesect>
+    
+    <simplesect>
+      <title>Results ddmax</title>
+      <para id="vr-manual.verrou_dd.results">
+        <command>verrou_dd</command> stores many intermediate results within the
+        <computeroutput>dd.sym</computeroutput> and <computeroutput>dd.line</computeroutput>
+        directories. Synthetic results are present in the form of symbolic links named in the
+        following way:
+        <variablelist>
+          <varlistentry>
+              <term>dd.sym.<replaceable>SYMNAME</replaceable></term>
+              <listitem><simpara>The presence of such a symbolic link indicates that the function
+              associated to symbol <replaceable>SYMNAME</replaceable> has been detected as
+              unstable. When the <option><xref linkend="opt.demangle"/>=no</option> option was
+              provided in the verrou command line in
+              <command><replaceable>run_script</replaceable></command>,
+              <replaceable>SYMNAME</replaceable> is in mangled form. In this case, the
+              human-readable name of the function can be retrieved using
+              <command>c++filt</command>.</simpara></listitem>
+          </varlistentry>
+          <varlistentry>
+            <term>dd.sym.<replaceable>FILENAME</replaceable>:<replaceable>LINENO</replaceable></term>
+            <listitem><simpara>Such a symbolic link indicates that an instability was found in line
+            <replaceable>LINENO</replaceable> of file
+            <replaceable>FILENAME</replaceable>.</simpara></listitem>
+          </varlistentry>
+        </variablelist>
+      </para>
+    </simplesect>
+
+    <simplesect>
+      <title>Results *rddmin </title>
+      <para id="vr-manual.verrou_dd.results-rddmin">
+        <command>verrou_dd</command> stores many intermediate results within the
+        <computeroutput>dd.sym</computeroutput> directory. Synthetic results are present in the form
+        of symbolic links named after the pattern: <computeroutput>ddmin-<replaceable>N</replaceable></computeroutput>.
+      </para>
+      <para>
+	The list of all symbols found to be unstable in the
+	<computeroutput>ddmin-<replaceable>N</replaceable></computeroutput> set can be retrieved in
+	the file: <computeroutput>ddmin-<replaceable>N</replaceable>/dd.include</computeroutput>.
+      </para>
+      <para>
+	Each <computeroutput>ddmin-<replaceable>N</replaceable></computeroutput> contains a
+	<computeroutput>dd.line</computeroutput> directory, organized in the same as described above
+	for the ddmax algorithm.
+      </para>
+    </simplesect>
+  </section>
+</section>
+
+<!-- Local Variables: -->
+<!-- mode: nxml -->
+<!-- fill-column: 100 -->
+<!-- End: -->
diff --git a/verrou/docs/vr-manual.xml b/verrou/docs/vr-manual.xml
new file mode 100644
index 0000000000000000000000000000000000000000..4f9874df4e9b0c1dd7c49b5fd088e3e2ab3f2ebb
--- /dev/null
+++ b/verrou/docs/vr-manual.xml
@@ -0,0 +1,574 @@
+<?xml version="1.0"?> <!-- -*- sgml -*- -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+          "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<chapter id="vr-manual" xreflabel="Verrou">
+  <title>Verrou: a floating-point rounding errors checker</title>
+
+  <para>
+    To use this tool, you must specify <option>--tool=verrou</option> on the
+    Valgrind command line.
+  </para>
+
+  <xi:include href="vr-overview.xml"
+              xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+  <xi:include href="vr-std.xml"
+              xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+  <xi:include href="vr-scope.xml"
+              xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+  <xi:include href="vr-localization.xml"
+              xmlns:xi="http://www.w3.org/2001/XInclude" />
+
+
+  <section id="vr-manual.advanced" xreflabel="Advanced features">
+    <title>Advanced features</title>
+
+    <section id="vr-manual.feat.deterministic" xreflabel="Deterministic sections">
+      <title>Deterministic sections</title>
+      <para>
+        Sometimes you want a part of your program to be instrumented and perturbed by
+        rounding-mode switches, but you don't want to lose determinism. For example, in
+        the following program, <function>det</function> is called twice with the same
+        arguments, and the correct execution relies on the strong hypothesis that both
+        calls will return the same result.
+      </para>
+
+      <programlisting>
+float det (float x) {
+  return 2*x;
+}
+
+int main () {
+  float x1 = det (42);
+  float x2 = det (42);
+  assert (x1 == x2);
+} </programlisting>
+
+      <para>
+        In this situation, you know that <function>det</function> can contain
+        floating-point errors, which you want to quantify. However, you also know that
+        whatever these errors, <function>det</function> will remain deterministic and
+        the assertion only fails due to the instrumentation added by
+        Verrou. The <computeroutput><xref linkend="vr-cr.start-deterministic"/></computeroutput>
+        client request can help dealing with such problems.
+      </para>
+
+      <para>
+        At the beginning of a deterministic section, the pseudo-random number generator
+        (pRNG) used for random rounding mode switching is seeded with a new value. This
+        value is computed deterministically from the location in the program source
+        code. This ensures that each time the instrumented program enters the same
+        deterministic section (same location in the source code), the pRNG is seeded
+        with the same value, leading to the same sequence of rounding mode switches. The
+        seed value also depends on the PID of the current process, so that different
+        program executions lead to different results.
+      </para>
+
+      <section>
+        <title>Basic usage</title>
+
+        <para>
+          Use the <computeroutput>VERROU_START_DETERMINISTIC(0)</computeroutput> client
+          request to mark the beginning of a deterministic section. Similarly, put
+          a <computeroutput>VERROU_STOP_DETERMINISTIC(0)</computeroutput> client request
+          at the end of the deterministic section to go back to (pseudo-)random rounding
+          mode switching.
+        </para>
+
+        <para>
+          Here is an example instrumented program:
+          <programlisting>
+#include &lt;valgrind/verrou.h&gt;
+
+float det (float x) {
+  VERROU_START_DETERMINISTIC(0);
+  float result = 2*x;
+  VERROU_STOP_DETERMINISTIC(0);
+  return result;
+}
+
+int main () {
+  float x1 = det (42);
+  float x2 = det (42);
+  assert (x1 == x2);
+} </programlisting>
+          whose execution yields the following output:
+          <screen>
+--8523-- Entering deterministic section 70660: det() (deterministic.c:4)
+--8523-- Leaving deterministic section: det() (deterministic.c:6)
+--8523-- Entering deterministic section 70660: det() (deterministic.c:4)
+--8523-- Leaving deterministic section: det() (deterministic.c:6) </screen>
+
+          Here we can see that both calls to the <function>det()</function> functions used
+          the same value to seed the pRNG (based on the client request location in the
+          source).
+        </para>
+      </section>
+
+      <section>
+        <title>Advanced usage</title>
+
+        <para>
+          Assume the following program, in which two distinct deterministic sections are
+          instrumented, but the client requests have been abstracted out in separate
+          function calls (this is actually required for example for Fortran programs,
+          which have to call a C function to issue client requests):
+
+          <programlisting>
+#include &lt;valgrind/verrou.h&gt;
+
+void verrou_startDeterministic() {
+  VERROU_START_DETERMINISTIC(0);
+}
+
+void verrou_stopDeterministic() {
+  VERROU_STOP_DETERMINISTIC(0);
+}
+
+float det1 () {
+  verrou_startDeterministic();
+  /* ... */
+  verrou_stopDeterministic();
+}
+
+float det2 () {
+  verrou_startDeterministic();
+  /* ... */
+  verrou_stopDeterministic();
+}
+
+int main () {
+  fprintf (stderr, "   det1\n");
+  assert (det1() == det1());
+
+  fprintf (stderr, "   det2\n");
+  assert (det2() == det2());
+} </programlisting>
+
+          Executing this program in Verrou yields the following output:
+          <screen>
+det1
+--2909-- Entering deterministic section 82435: verrou_startDeterministic() (deterministic2.c:4)
+--2909-- Leaving deterministic section: verrou_stopDeterministic() (deterministic2.c:8)
+--2909-- Entering deterministic section 82435: verrou_startDeterministic() (deterministic2.c:4)
+--2909-- Leaving deterministic section: verrou_stopDeterministic() (deterministic2.c:8)
+det2
+--2909-- Entering deterministic section 82435: verrou_startDeterministic() (deterministic2.c:4)
+--2909-- Leaving deterministic section: verrou_stopDeterministic() (deterministic2.c:8)
+--2909-- Entering deterministic section 82435: verrou_startDeterministic() (deterministic2.c:4)
+--2909-- Leaving deterministic section: verrou_stopDeterministic() (deterministic2.c:8) </screen>
+          since the client requests are always issued from the same source location, the
+          two deterministic sections are seeded with the same value.
+        </para>
+
+        <para>
+          It is possible to give
+          the <computeroutput>VERROU_START_DETERMINISTIC</computeroutput> a non-0 LEVEL
+          argument to look at the source location of a calling function in the stack. In
+          the case described above, replacing
+          the <function>verrou_startDeterminisic</function>
+          and <function>verrou_stopDeterministic</function> function definitions like
+          this:
+          <programlisting>
+void verrou_startDeterministic() {
+  VERROU_START_DETERMINISTIC(1);
+}
+
+void verrou_stopDeterministic() {
+  VERROU_STOP_DETERMINISTIC(1);
+} </programlisting>
+          yields the following output:
+          <screen>
+det1
+--4523-- Entering deterministic section 14298: det1() (deterministic2.c:12)
+--4523-- Leaving deterministic section: det1() (deterministic2.c:14)
+--4523-- Entering deterministic section 14298: det1() (deterministic2.c:12)
+--4523-- Leaving deterministic section: det1() (deterministic2.c:14)
+det2
+--4523-- Entering deterministic section 65473: det() (deterministic2.c:18)
+--4523-- Leaving deterministic section: det2() (deterministic2.c:20)
+--4523-- Entering deterministic section 65473: det() (deterministic2.c:18)
+--4523-- Leaving deterministic section: det2() (deterministic2.c:20) </screen>
+          in which the pRNG is seeded using source locations one level up the stack from
+          the client request.
+        </para>
+        <para>
+          Since the source location is not needed to go back to (pseudo-)random rounding
+          mode switching, the LEVEL argument
+          to <computeroutput>VERROU_STOP_DETERMINISTIC</computeroutput> is only used for
+          cosmetic and debug purposes.
+        </para>
+      </section>
+    </section>
+  </section>
+
+  <section id="vr-manual.reference" xreflabel="Reference">
+    <title>Reference</title>
+    <section id="vr-manual.clo" xreflabel="Command-line options">
+      <title>Command-line options</title>
+
+      <section>
+        <title>General options</title>
+        <variablelist id="vr.opts.general.list">
+
+          <varlistentry id="vr-opt.verbose" xreflabel="--vr-verbose">
+            <term><option><![CDATA[--vr-verbose=<yes|no> [default=no]]]></option></term>
+            <listitem><para>Toggle verbosity: prints messages for x387
+                instructions and client requests.
+            </para></listitem>
+          </varlistentry>
+
+          <varlistentry id="vr-opt.count-op" xreflabel="--count-op">
+            <term><option><![CDATA[--count-op=<yes|no> [default=yes]]]></option></term>
+            <listitem><para>
+                Toggle <link linkend="vr-manual.feat.count">floating-point
+                  operations counting</link>.
+            </para></listitem>
+          </varlistentry>
+
+	  <varlistentry id="vr-opt.backend" xreflabel="--backend">
+            <term><option><![CDATA[--backend=<verrou|mcaquad> [default=verrou]]]></option></term>
+            <listitem><para>
+		Select the <computeroutput>verrou</computeroutput> or
+		<computeroutput>mcaquad</computeroutput> backend. <computeroutput>verrou</computeroutput>
+		enables to perform several rounding mode (See <option><xref linkend="vr-opt.rounding-mode"/></option> option).
+		<computeroutput>mcaquad</computeroutput> enables to perform MCA (Monte Carlo Arithmetics)
+		based on extented precision in quad (See  <option><xref linkend="vr-opt.mca-mode"/></option>
+		and <option><xref linkend="vr-opt.mca-precision"/></option> options).
+		The integration <computeroutput>mcaquad</computeroutput> backend in the frontend verrou
+		is still considered as experimental.
+            </para></listitem>
+          </varlistentry>
+
+        </variablelist>
+      </section>
+
+      <section>
+        <title>Perturbation of floating-point operations</title>
+        <variablelist id="vr.opts.instr.list">
+
+          <varlistentry id="vr-opt.rounding-mode" xreflabel="--rounding-mode">
+            <term><option><![CDATA[--rounding-mode=<random|average|upward|downward|toward_zero|farthest|float> [default=nearest]]]></option></term>
+            <listitem>
+              <para>
+                Emulate the given <link linkend="vr-manual.feat.rounding-mode">rounding mode</link>
+		for operations instrumented with the verrou backend. If
+                this option is not provided, Verrou always rounds to the nearest
+                floating-point. Supported rounding modes are:
+              </para>
+              <itemizedlist>
+                <listitem><para>
+                  <command>Random rounding modes:</command> <computeroutput>random</computeroutput>,
+                  <computeroutput>average</computeroutput>.
+                </para></listitem>
+                <listitem><para>
+                  <command>IEEE-754 rounding modes:</command>
+                  <computeroutput>nearest</computeroutput> (default),
+                  <computeroutput>upward</computeroutput>,
+                  <computeroutput>downward</computeroutput>,
+                  <computeroutput>toward_zero</computeroutput>.
+                </para></listitem>
+                <listitem><para>
+                    <command>Other:</command> <computeroutput>farthest</computeroutput>,
+		    <computeroutput>float</computeroutput>.
+                </para></listitem>
+              </itemizedlist>
+            </listitem>
+          </varlistentry>
+
+          <varlistentry id="vr-opt.mca-mode" xreflabel="--mca-mode">
+            <term><option><![CDATA[--mca-mode=<mca|rr|pb|ieee> [default=mca]]]></option></term>
+            <listitem>
+              <para>
+                Emulate the given MCA mode for operations instrumented with the mcaquad backend. Supported mca modes are:
+              </para>
+              <itemizedlist>
+                <listitem><para>
+                  <command>mca : </command> <computeroutput>full mca</computeroutput> (default)
+                </para></listitem>
+                <listitem><para>
+                  <command>rr : </command> <computeroutput> random rounding</computeroutput>
+                </para></listitem>
+		<listitem><para>
+                  <command>pb : </command> <computeroutput> precision bounding</computeroutput>
+                </para></listitem>
+		<listitem><para>
+                  <command>ieee : </command> <computeroutput> ieee (rounding to nearest)</computeroutput>
+                </para></listitem>
+              </itemizedlist>
+
+	      <para> The mcaquad backend implementation come from Verificarlo : <ulink url="https://github.com/verificarlo/verificarlo"> More information on Verificalo github </ulink>
+	      </para>
+            </listitem>
+          </varlistentry>
+	    <varlistentry id="vr-opt.mca-precision" xreflabel="--mca-precision">
+            <term><option><![CDATA[--mca-precision= [default=53]]]></option></term>
+            <listitem>
+              <para>
+                Configure the magnitude of inexact function used by mcaquad backend.
+              </para>
+            </listitem>
+          </varlistentry>
+
+          <varlistentry id="vr-opt.seed" xreflabel="--vr-seed">
+            <term><option><![CDATA[--vr-seed=RNG_SEED [default=automatically generated]]]></option></term>
+            <listitem>
+              <para>If present, this option allows setting the seed of the
+              pseudo-Random Number Generator used for the
+		<command>random</command> or
+		<command>average </command> <link linkend="vr-manual.feat.rounding-mode">rounding modes</link>.
+		This helps reproducing the behaviour of a program under Verrou.
+              </para>
+              <para>If this option is omitted, the pRNG is seeded with a value
+              based on the current time and process id, so that it should change
+              at each execution.</para>
+            </listitem>
+          </varlistentry>
+
+          <varlistentry id="vr-opt.instr" xreflabel="--vr-instr-">
+            <term><option><![CDATA[--vr-instr=<add|sub|mul|div|mAdd|mSub|conv> [default=all]]]></option></term>
+            <listitem>
+              <para>Toggle instrumentation of floating-point additions,
+                subtractions, multiplications, divisions, fused multiply
+                additions, fused multiply subtractions, conversions (only double to float cast) respectively.
+		This option can be set multiple times to instrument multiple types of
+                operations.</para>
+              <para>If this option is not provided, all supported operations
+              types are instrumented.</para>
+            </listitem>
+          </varlistentry>
+
+          <varlistentry id="vr-opt.instr-scalar" xreflabel="--vr-instr-scalar-">
+            <term><option><![CDATA[--vr-instr-scalar=<yes|no> [default=no]]]></option></term>
+            <listitem><para>
+                Toggle instrumentation of x387 scalar instructions.
+            </para></listitem>
+          </varlistentry>
+
+        </variablelist>
+      </section>
+
+      <section>
+        <title>Restriction of instrumentation scope</title>
+        <variablelist id="vr.opts.scope.list">
+
+          <varlistentry id="vr-opt.instr-atstart" xreflabel="--instr-atstart">
+            <term><option><![CDATA[--instr-atstart=<yes|no> [default=yes]]]></option></term>
+            <listitem><para>Toggle <link linkend="vr-manual.feat.instr">instrumentation
+                  state</link> on or off at program start. Useful in combination
+                  with <link linkend="vr-cr.start-instrumentation">client
+                  requests</link>.
+            </para></listitem>
+          </varlistentry>
+
+          <varlistentry id="vr-opt.exclude" xreflabel="--exclude">
+            <term><option><![CDATA[--exclude=FILE]]></option></term>
+            <listitem><para>Symbols listed
+                in <computeroutput>FILE</computeroutput> will be
+                left <link linkend="vr-manual.feat.exclude">uninstrumented</link>.
+              </para>
+            </listitem>
+          </varlistentry>
+
+          <varlistentry id="vr-opt.gen-exclude" xreflabel="--gen-exclude">
+            <term><option><![CDATA[--gen-exclude=FILE]]></option></term>
+            <listitem>
+              <para>Generate in <computeroutput>FILE</computeroutput> a list of
+                all symbols (which contain perturbated floating point instruction)
+		encountered during program execution. This is useful
+                to build an <link linkend="vr-manual.feat.exclude">exclusion
+                list</link>.</para>
+              <para>In combination
+              with <option><xref linkend="vr-opt.exclude"/></option>, only list
+              symbols which were not already present in the provided exclusion
+              list.</para>
+              <para>
+                WARNING: in order to generate a correct list, the whole binary
+                (including symbols listed in the list provided
+                using <option><xref linkend="vr-opt.exclude"/></option>) must be
+                instrumented. When using
+                both <option><xref linkend="vr-opt.gen-exclude"/></option>
+                and <option><xref linkend="vr-opt.exclude"/></option>, it is
+                advised to avoid perturbating rounding-modes
+                using <option><xref linkend="vr-opt.rounding-mode"/><![CDATA[=nearest]]></option>.
+              </para>
+            </listitem>
+          </varlistentry>
+
+          <varlistentry id="vr-opt.source" xreflabel="--source">
+            <term><option><![CDATA[--source=FILE]]></option></term>
+            <listitem><para>When this option is present, only instructions
+                coming from <link linkend="vr-manual.feat.source">source code
+                lines</link> listed in <computeroutput>FILE</computeroutput>
+                are instrumented.</para></listitem>
+          </varlistentry>
+
+          <varlistentry id="vr-opt.gen-source" xreflabel="--gen-source">
+            <term><option><![CDATA[--gen-source=FILE]]></option></term>
+            <listitem>
+              <para>Generate in <computeroutput>FILE</computeroutput> the list
+                of all <link linkend="vr-manual.feat.source">source code
+                  lines</link> (which contain perturbated floating point instruction)
+		encountered during program execution.</para>
+              <para>In combination with
+                <option><xref linkend="vr-opt.source"/></option>, only list
+                source code lines which were not already present in the provided
+                list.</para>
+            </listitem>
+          </varlistentry>
+        </variablelist>
+      </section>
+
+      <section>
+        <title>Detection</title>
+        <variablelist id="vr.opts.detection.list">
+
+          <varlistentry id="vr.opt.check-nan" xreflabel="--check-nan">
+            <term><option><![CDATA[--check-nan=<yes|no> [default=yes]]]></option></term>
+            <listitem><para>
+		Activate NaN detection. NaN produces a valgrind error.
+		This functionnality requires the verrou backend.
+            </para></listitem>
+          </varlistentry>
+
+	  <varlistentry id="vr.opt.check-cancellation" xreflabel="--check-cancellation">
+            <term><option><![CDATA[--check-cancellation=<yes|no> [default=no]]]></option></term>
+            <listitem><para>
+		Activate cancellation detection. Cancellation produces a valgrind error.
+		This functionnality is available for the verrou and mcaquad backends. The level
+		of detected cancellations can be configured with  <option><xref linkend="vr.opt.cc-threshold-float"/></option> and
+		<option><xref linkend="vr.opt.cc-threshold-double"/></option>.
+            </para></listitem>
+          </varlistentry>
+
+	  <varlistentry id="vr.opt.cc-gen-file" xreflabel="--cc-gen-file">
+            <term><option><![CDATA[--cc-gen-file=<FILE>]]></option></term>
+            <listitem><para>
+		Generate in <computeroutput>FILE </computeroutput> with the source format for each code source line which produces at least one cancellation.
+		This functionnality is available for verrou and mcaquad backends. The level
+		of detected cancellations can be configured with  <option><xref linkend="vr.opt.cc-threshold-float"/></option> and
+		<option><xref linkend="vr.opt.cc-threshold-double"/></option>.
+            </para></listitem>
+          </varlistentry>
+
+
+	  <varlistentry id="vr.opt.cc-threshold-float" xreflabel="--cc-threshold-float">
+            <term><option><![CDATA[--cc-threshold-float=<integer> [default=24]]]></option></term>
+            <listitem><para>
+		Configure the cancellation  dectection threshold for float operations.
+		Default value is still experimental and could have to change.
+            </para></listitem>
+          </varlistentry>
+
+	  <varlistentry id="vr.opt.cc-threshold-double" xreflabel="--cc-threshold-double">
+            <term><option><![CDATA[--cc-threshold-double=<integer> [default=40]]]></option></term>
+            <listitem><para>
+		Configure the cancellation  dectection threshold for double operations.
+		Default value is still experimental and could have to change.
+            </para></listitem>
+          </varlistentry>
+
+        </variablelist>
+      </section>
+
+      <section>
+        <title>Performance optimization</title>
+        <variablelist id="vr.opts.optim.list">
+          <varlistentry id="vr.opt.unsafe-llo-optim" xreflabel="--vr-unsafe-llo-optim">
+            <term><option><![CDATA[--vr-unsafe-llo-optim=<yes|no> [default=no]]]></option></term>
+            <listitem><para>
+		Activate faster instrumentation process but unsafe when binary mixes
+		llo and vect instructions.
+            </para></listitem>
+          </varlistentry>
+        </variablelist>
+      </section>
+    </section>
+
+    <section id="vr-manual.client-requests" xreflabel="Verrou specific client requests">
+      <title>Client requests</title>
+
+      <para>Verrou provides the
+        following <link linkend="manual-core-adv.clientreq">client
+        requests</link> in the <filename>valgrind/verrou.h</filename>
+        header.
+      </para>
+
+      <variablelist>
+
+        <varlistentry id="vr-cr.display-counters"
+                      xreflabel="VERROU_DISPLAY_COUNTERS">
+          <term><computeroutput>VERROU_DISPLAY_COUNTERS</computeroutput></term>
+          <listitem><para>Display the
+              current <link linkend="vr-manual.feat.count">instructions
+              counters</link>.</para></listitem>
+        </varlistentry>
+
+        <varlistentry id="vr-cr.start-instrumentation"
+                      xreflabel="VERROU_START_INSTRUMENTATION">
+          <term><computeroutput>VERROU_START_INSTRUMENTATION</computeroutput></term>
+          <listitem><para>Start full
+              Verrou <link linkend="vr-manual.feat.instr">instrumentation</link>
+              (including rounding mode switching) if not already
+              enabled.</para></listitem>
+        </varlistentry>
+
+        <varlistentry id="vr-cr.stop-instrumentation"
+                      xreflabel="VERROU_STOP_INSTRUMENTATION">
+          <term><computeroutput>VERROU_STOP_INSTRUMENTATION</computeroutput></term>
+          <listitem><para>Stop full
+              Verrou <link linkend="vr-manual.feat.instr">instrumentation</link>
+              (don't switch rounding modes) if not already disabled.</para></listitem>
+        </varlistentry>
+
+        <varlistentry id="vr-cr.start-deterministic"
+                      xreflabel="VERROU_START_DETERMINISTIC(LEVEL)">
+          <term><computeroutput>VERROU_START_DETERMINISTIC(LEVEL)</computeroutput></term>
+          <listitem><para>Start
+              a <link linkend="vr-manual.feat.deterministic">deterministic
+              section</link>, i.e. one in which floating point operations are
+              perturbed, but in a deterministic way.</para></listitem>
+        </varlistentry>
+
+        <varlistentry id="vr-cr.stop-deterministic"
+                      xreflabel="VERROU_STOP_DETERMINISTIC(LEVEL)">
+          <term><computeroutput>VERROU_STOP_DETERMINISTIC(LEVEL)</computeroutput></term>
+          <listitem><para>Stop
+              a <link linkend="vr-manual.feat.deterministic">deterministic
+              section</link>, i.e. resume rounding mode switching in a
+              (pseudo-)random way.</para></listitem>
+        </varlistentry>
+
+      </variablelist>
+    </section>
+
+    <section>
+      <title>Monitor commands</title>
+
+      See <xref linkend="manual-core-adv.gdbserver"/> to get more information
+      about the Valgrind gdbserver and monitor commands. Below is a list of
+      specific monitor commands provided by Verrou:
+      <variablelist>
+        <varlistentry id="vr.monitor_count" xreflabel="count">
+          <term><computeroutput>count</computeroutput></term>
+          <listitem><para>Display the
+              current <link linkend="vr-manual.feat.count">instructions
+                counters</link>.
+          </para></listitem>
+        </varlistentry>
+        <varlistentry id="vr.monitor_instrumentation" xreflabel="instrumentation">
+          <term><computeroutput>instrumentation [on|off]</computeroutput></term>
+          <listitem><para> Set the
+              current <link linkend="vr-manual.feat.instr">instrumentation
+              state</link> (or print it if
+              no <computeroutput>on</computeroutput>
+              / <computeroutput>off</computeroutput> parameter is given).
+          </para></listitem>
+        </varlistentry>
+      </variablelist>
+    </section>
+  </section>
+</chapter>
diff --git a/verrou/docs/vr-overview.xml b/verrou/docs/vr-overview.xml
new file mode 100644
index 0000000000000000000000000000000000000000..83b80d1b7754984086e2173718f5c01caa72197f
--- /dev/null
+++ b/verrou/docs/vr-overview.xml
@@ -0,0 +1,194 @@
+<?xml version="1.0"?> <!-- -*- sgml -*- -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+          "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<section id="vr-manual.overview" xreflabel="Overview">
+  <title>Overview</title>
+
+  <section>
+    <title>Basic usage</title>
+    <para>
+      Verrou helps you look for floating-point round-off errors in programs. It implements a
+      stochastic floating-point arithmetic based on random rounding: all floating-point operations
+      are perturbed by randomly switching rounding modes.  This can be seen as an asynchronous
+      variant of the CESTAC method, or a subset of Monte Carlo Arithmetic, performing only output
+      randomization.
+    </para>
+    
+    <para>
+      As with many other Valgrind tools, you probably want to recompile your program with debugging
+      info (the <option>-g</option> option) and with optimization turned on.
+    </para>
+    
+    <para>
+      To start a floating-point check for a program, execute:
+      <screen><cmdsynopsis><command>valgrind</command><arg choice="plain">--tool=verrou</arg><arg choice="opt"><replaceable>verrou options</replaceable></arg><arg choice="req"><replaceable>program</replaceable></arg><arg choice="opt"><replaceable>program options</replaceable></arg></cmdsynopsis></screen>
+    </para>
+
+    <para>For example:<screen>
+<prompt>$</prompt> valgrind --tool=verrou --rounding-mode=random python
+==18644== Verrou, Check floating-point rounding errors
+==18644== Copyright (C) 2014-2016, F. Fevotte &amp; B. Lathuiliere.
+==18644== Using Valgrind-3.12.0+verrou-1.0.0 and LibVEX; rerun with -h for copyright info
+==18644== Command: python
+==18644== 
+==18644== First seed : 194638
+==18644== Simulating RANDOM rounding mode
+==18644== Instrumented operations :
+==18644==       add : yes
+==18644==       sub : yes
+==18644==       mul : yes
+==18644==       div : yes
+==18644==       mAdd : yes
+==18644==       mSub : yes
+==18644==       cmp : no
+==18644==       conv : no
+==18644==       max : no
+==18644==       min : no
+==18644== Instrumented scalar operations : no
+Python 2.7.12+ (default, Sep  1 2016, 20:27:38) 
+[GCC 6.2.0 20160822] on linux2
+Type "help", "copyright", "credits" or "license" for more information.
+<prompt>>>></prompt> sum([0.1*i for i in xrange(1000)])
+49949.99999999988
+<prompt>>>></prompt> sum([0.1*i for i in xrange(1000)])
+49950.00000000036
+<prompt>>>></prompt> sum([0.1*i for i in xrange(1000)])
+49949.99999999983
+<prompt>>>></prompt> sum([0.1*i for i in xrange(1000)])
+49950.00000000032
+<prompt>>>></prompt> sum([0.1*i for i in xrange(1000)])
+49949.99999999976
+<prompt>>>></prompt> sum([0.1*i for i in xrange(1000)])
+49949.99999999981
+<prompt>>>></prompt> sum([0.1*i for i in xrange(1000)])
+49949.999999999854
+<prompt>>>></prompt> exit()
+==18913== 
+==18913==  ---------------------------------------------------------------------
+==18913==  Operation                            Instruction count
+==18913==   `- Precision
+==18913==       `- Vectorization          Total             Instrumented
+==18913==  ---------------------------------------------------------------------
+==18913==  add                     7044                     7044          (100%)
+==18913==   `- dbl                     7044                     7044      (100%)
+==18913==       `- llo                     7044                     7044  (100%)
+==18913==  ---------------------------------------------------------------------
+==18913==  sub                       21                       21          (100%)
+==18913==   `- dbl                       21                       21      (100%)
+==18913==       `- llo                       21                       21  (100%)
+==18913==  ---------------------------------------------------------------------
+==18913==  mul                     7073                     7073          (100%)
+==18913==   `- dbl                     7073                     7073      (100%)
+==18913==       `- llo                     7073                     7073  (100%)
+==18913==  ---------------------------------------------------------------------
+==18913==  div                        7                        7          (100%)
+==18913==   `- dbl                        7                        7      (100%)
+==18913==       `- llo                        7                        7  (100%)
+==18913==  ---------------------------------------------------------------------
+==18913==  cmp                       78                        0          (  0%)
+==18913==   `- dbl                       78                        0      (  0%)
+==18913==       `- scal                      78                        0  (  0%)
+==18913==  ---------------------------------------------------------------------
+==18913==  conv                   14042                        0          (  0%)
+==18913==   `- dbl=>int                  28                        0      (  0%)
+==18913==       `- scal                      28                        0  (  0%)
+==18913==   `- dbl=>sht               14014                        0      (  0%)
+==18913==       `- scal                   14014                        0  (  0%)
+==18913==  ---------------------------------------------------------------------
+==18913== ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)</screen>
+    </para>
+    
+    <para>
+      First, Verrou will output a header indicating which floating-point instructions will be
+      perturbed, and how. By default, nothing will be perturbed. The <option><xref
+      linkend="vr-opt.rounding-mode"/>=random</option> command-line option is the most standard way
+      to perturb floating-point rounding-modes; see <xref linkend="vr-manual.feat.rounding-mode"/>
+      for more details.
+    </para>
+
+    <para>
+      During program execution, floating-point operations will be perturbed by constantly and
+      randomly switching the rounding-mode. This makes you program output (hopefully slightly)
+      different results than in a normal execution. See Rounding-mode switching for more details on
+      the different rounding-mode switching strategies. In the example above, the same python
+      calculation performed several times yields varying results, whose analysis show that the first
+      14 decimal digits are always the same, and are thus probably reliable.
+    </para>
+
+    <para>
+      After program termination, a summary of floating point operations will be printed on
+      screen. See <xref linkend="vr-manual.feat.count"/> for more details.
+    </para>
+  </section>
+
+  <section id="vr-manual.overview.scope">
+    <title>Restriction of the perturbations scope</title>
+
+    <para>
+      Some algorithms are specifically and carefully designed to work in IEEE nearest rounding, and
+      do not behave well when Verrou perturbs floating-point rounding modes.
+    </para>
+
+    <para>
+      This is for example the case of the douple-precision cosine implementation of the Gnu
+      mathematical library (<literal>libm</literal>). Getting back to the previous python example:
+    </para> 
+    <screen>
+Python 2.7.12+ (default, Sep  1 2016, 20:27:38) 
+[GCC 6.2.0 20160822] on linux2
+Type "help", "copyright", "credits" or "license" for more information.
+<prompt>>>></prompt> import math
+<prompt>>>></prompt> math.cos(42.)
+-1.0050507702291946
+<prompt>>>></prompt> math.cos(42.)
+-0.3999853149883513
+<prompt>>>></prompt> math.cos(42.)
+-1.0050507702291946
+<prompt>>>></prompt> math.cos(42.)
+-0.9972351459047304</screen>
+
+    <para>
+      Here, we see that the same calculation performed several times did not only produce different
+      results; it also produced completely wrong values, well outside the expected
+      <mathphrase>[-1;1]</mathphrase> interval.
+    </para>
+
+    <para>
+      In such cases, where random rounding will almost always yield false positives, it might be a
+      good idea to temporarily disable the perturbations during the execution of functions which are
+      known to be correct. This can be performed by adding a <option><xref
+      linkend="vr-opt.exclude"/>=<replaceable>LIST</replaceable></option> command-line switch in the
+      following way:
+    </para>
+    <screen>valgrind --tool=verrou --rounding-mode=random --exclude=libm.ex python</screen>
+    <para>
+      where the file <filename>libm.ex</filename> contains a list of functions to be left
+      unperturbed. For example, in order to disable random rounding modes in the whole
+      <literal>libm</literal>:
+    </para>
+    <screen>
+# sym  lib
+*      /lib/x86_64-linux-gnu/libm-2.23.so
+    </screen>
+
+    <warning>
+      <para>
+        The library name listed in the second column of an exclusion file must be identified by a
+        fully canonical path, as produced by <command>readlink -f</command>
+      </para>
+    </warning>
+    
+    <para>
+      When the <literal>libm</literal> is excluded from perturbations in such a way, the python
+      example above works as expected: the cosine is accurately computed, as usual. See <xref
+      linkend="vr-manual.feat.exclude"/> for more details about exclusion lists.
+    </para>
+  </section>
+</section>
+
+
+<!-- Local Variables: -->
+<!-- mode: nxml -->
+<!-- fill-column: 100 -->
+<!-- End: -->
diff --git a/verrou/docs/vr-scope.xml b/verrou/docs/vr-scope.xml
new file mode 100644
index 0000000000000000000000000000000000000000..f693e839893e056a1de9abc19f47b6bb451eb32f
--- /dev/null
+++ b/verrou/docs/vr-scope.xml
@@ -0,0 +1,266 @@
+<?xml version="1.0"?> <!-- -*- sgml -*- -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+          "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<section id="vr-manual.feat.scope"
+         xreflabel="Restriction of the perturbations scope">
+  <title>Restriction of the perturbations scope</title>
+
+  <para>
+    There is a number of reasons why it could be desirable to only perturb parts of a program:
+    <itemizedlist>
+      <listitem><para>when an algorithm is designed to work only in standard "rounding to nearest"
+      mode, such as the <literal>libm</literal> example described in the <link
+      linkend="vr-manual.overview.scope">overview</link>; </para></listitem>
+      <listitem><para>to save time by avoiding costly perturbations in places which are known to be
+      particularly stable; </para></listitem>
+      <listitem><para>to help locating the origin of numerical instabilities by studying the overall
+      impact of localized perturbation; see the <xref linkend="vr-manual.localization.dd"/> section
+      for an instability localization method that build upon this idea.</para>
+      </listitem>
+    </itemizedlist>
+
+    Controlling which parts of the program are perturbed can be performed in one of two ways,
+    described hereafter.
+  </para>
+
+  <section id="vr-manual.feat.exclude" xreflabel="Excluded symbols">
+    <title>Excluded symbols</title>
+    <para>
+      A first way to restrict the scope of instrumentation is based on either the function (or
+      rather symbol) name, the object (library) name, or both.
+    </para>
+    
+    <section>
+      <title>Exclusion files</title>
+      <para>
+        This can be done by providing an exclusion file via the <option><xref
+        linkend="vr-opt.exclude"/></option> command-line option. The file should follow the
+        following format:
+        <itemizedlist>
+          <listitem><para>one rule per line;</para></listitem>
+          <listitem>
+            <para>each rule of the form <computeroutput>FNNAME OBJNAME</computeroutput>, where
+            <computeroutput>FNNAME</computeroutput> is the function name, and
+            <computeroutput>OBJNAME</computeroutput> is the symbol name (the name of the executable
+            or the shared library). Either can be replaced by a star
+            (<computeroutput>*</computeroutput>) to match anything. The two columns can be separated
+            by any number of spaces and tabulations (<computeroutput>\t</computeroutput>);</para>
+          </listitem>
+          <listitem><simpara>each line beginning with a hash (<computeroutput>#</computeroutput>) is
+          considered as a comment and disregarded.</simpara></listitem>
+        </itemizedlist>
+      </para>
+      
+      <para>
+        When verrou finds a block of instructions (an IRSB, Intermediate Representation SuperBlock
+        in valgrind terminology) whose address matches the
+        <computeroutput>FNNAME</computeroutput>/<computeroutput>OBJNAME</computeroutput>
+        specification, the whole chunk is left uninstrumented.
+      </para>
+    
+      <warning>
+        <para>
+          The library name listed in the second column of an exclusion file must be identified by a
+          fully canonical path, as produced by <command>readlink -f</command>.
+        </para>
+      </warning>
+
+      <para>A recommanded procedure to find the correct library path is the following: <screen>
+<prompt>$</prompt> ldd <replaceable>program</replaceable>
+        linux-vdso.so.1 (0x00007fffebbc6000)
+        libpthread.so.0 => /lib/x86_64-linux-gnu/libpthread.so.0 (0x00007fdbe1760000)
+        libdl.so.2 => /lib/x86_64-linux-gnu/libdl.so.2 (0x00007fdbe155c000)
+        libutil.so.1 => /lib/x86_64-linux-gnu/libutil.so.1 (0x00007fdbe1358000)
+        libz.so.1 => /lib/x86_64-linux-gnu/libz.so.1 (0x00007fdbe113d000)
+        libm.so.6 => /lib/x86_64-linux-gnu/libm.so.6 (0x00007fdbe0e38000)
+        libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007fdbe0a96000)
+        /lib64/ld-linux-x86-64.so.2 (0x000055ba64113000)
+        
+<prompt>$</prompt> readlink -f <replaceable>/lib/x86_64-linux-gnu/libm.so.6</replaceable>
+/lib/x86_64-linux-gnu/libm-2.23.so </screen>
+      </para>
+    
+      <para>
+        Here are a few examples of exclusion rules for mathematics functions:
+        <itemizedlist>
+          <listitem>
+            <para>
+              exclude the whole <computeroutput>libm</computeroutput>:
+              <screen>
+#FNNAME   OBJNAME  
+*         /lib/x86_64-linux-gnu/libm-2.23.so</screen>
+            </para>
+          </listitem>
+          <listitem>
+            <para>
+              exclude specific functions based on their names only (beware to list all functions in
+              which the program will pass; not only top-level ones):<screen>
+#FNNAME        OBJNAME
+exp            *
+__ieee754_exp  *
+__exp1         *</screen>
+            </para>
+          </listitem>
+          <listitem>
+            <para>
+              exclude a specific function of a specific library: <screen>
+#FNNAME  OBJNAME
+exp      /lib/x86_64-linux-gnu/libm-2.23.so</screen>
+            </para>
+          </listitem>
+        </itemizedlist>
+      </para>
+    </section>
+    
+    <section>
+      <title>Automatic generation of exclusion lists</title>
+      <para>
+        Instead of manually establishing an exclusion list, using the <option><xref
+        linkend="vr-opt.gen-exclude"/></option> command-line option can help producing the complete
+        list of <computeroutput>FNNAME</computeroutput>/<computeroutput>OBJNAME</computeroutput>
+        pairs through which a given program passes. This complete list can then be filtered to keep
+        only functions which really need to be excluded.
+      </para>
+
+      <para>For example:<screen>
+<prompt>$</prompt> valgrind --tool=verrou --gen-exclude=all.ex --gen-above=Py_Main python
+==26049== Verrou, Check floating-point rounding errors
+==26049== Copyright (C) 2014-2016, F. Fevotte &amp; B. Lathuiliere.
+==26049== Using Valgrind-3.12.0+verrou-dev and LibVEX; rerun with -h for copyright info 
+==26049== Command: python
+==26049== 
+==26049== Simulating NEAREST rounding mode
+Python 2.7.12+ (default, Sep  1 2016, 20:27:38) 
+[GCC 6.2.0 20160822] on linux2
+Type "help", "copyright", "credits" or "license" for more information.
+<prompt>>>></prompt> import math
+<prompt>>>></prompt> math.cos(42.)
+-0.39998531498835127
+<prompt>>>></prompt> exit() 
+==26049== 
+==26049== Dumping exclusions list to `all.ex'... OK.
+==26049== ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
+
+<prompt>$</prompt> grep libm all.ex | tee libm.ex
+do_sin.isra.2   /lib/x86_64-linux-gnu/libm-2.23.so
+__cos_avx       /lib/x86_64-linux-gnu/libm-2.23.so
+modf            /lib/x86_64-linux-gnu/libm-2.23.so</screen>
+      </para>
+      
+      <note>
+        <title>Note on C++ and symbol name mangling</title>
+        <para>When analyzing C++ programs, it is advised to also use
+        the <option><xref linkend="opt.demangle"/>=no</option> option in order
+        for C++ symbol names compared in their mangled form. Symbol names in
+        the exclusion list can still be demangled using an external program
+        such as <computeroutput>c++filt</computeroutput>.
+        </para>
+      </note>
+    </section>
+
+    <section id="vr-manual.feat.source" xreflabel="Source code lines selection">
+      <title>Source code lines selection</title>
+      <para>
+        A more fine-grained way to restrict the scope of instrumentation is based on source code
+        lines. In order to effectively use this feature, the instrumented binary file must embed
+        debugging information (i.e. it must have been compiled with the
+        <computeroutput>-g</computeroutput> switch or something equivalent).
+      </para>
+      <para>
+        This can be done by providing a list of source code lines to instrument, via the
+        <option><xref linkend="vr-opt.source"/></option> command-line option. The file should have
+        the following format:
+        <itemizedlist>
+          <listitem><para>one item per line,</para></listitem>
+          <listitem>
+            <para>each item of the form <computeroutput>FILENAME LINENUM [SYMNAME]</computeroutput>,
+            where <computeroutput>FILENAME</computeroutput> is the source file name (not path!),
+            <computeroutput>LINENUM</computeroutput> is the line number within the source file, and
+            <computeroutput>SYMNAME</computeroutput> is optional and indicates the symbol name in
+            which the source code has been compiled. The columns can be separated by any number of
+            spaces and tabulations (<computeroutput>\t</computeroutput>).</para>
+          </listitem>
+        </itemizedlist>
+
+        Here is an example of a sources list file:<screen>
+#FILENAME       LINENUM  SYMNAME
+s_sin.c         594      __cos_avx
+s_sin.c         598      __cos_avx
+fenv_private.h  433      __cos_avx
+s_sin.c         581      __cos_avx
+e_asin.c        10       __acos_finite
+s_sin.c         12       cos</screen>
+      </para>
+      <para>
+        When verrou finds an instruction coming from a listed source code line, it instruments
+        it. Only the source file name and line number are considered during this selection. If the
+        <option><xref linkend="vr-opt.source"/></option> option is not specified, or the list of
+        instrumented source code lines is empty, all instructions are instrumented.
+      </para> 
+      <para>
+        The <option><xref linkend="vr-opt.gen-source"/></option> command-line switch can help
+        generate the list of all source code lines encountered during program execution, for later
+        filtering. It can be wise to combine it with the <option><xref
+        linkend="vr-opt.exclude"/></option> switch in order to restrict the source code lines list
+        to a specific set of symbols.
+      </para>
+      <note>
+        <para>This fine-grained selection of code parts to instrument is primarily meant to be used
+        by automatic debugging tools (see <xref linkend="vr-manual.localization.dd"/>), not directly
+        by humans.</para>
+      </note>
+    </section>
+  </section>
+    
+
+
+  <section id="vr-manual.feat.instr" xreflabel="Instrumented sections">
+    <title>Instrumented sections</title>
+    <para>
+      Another way of controlling the scope of perturbations consists in toggling instrumentation on
+      and off during program execution.
+    </para>
+    <para>
+      Instrumentation can be toggled off at program start using the <option><xref
+      linkend="vr-opt.instr-atstart"/></option> command-line option. Furthermore, it can be
+      (de)activated programatically using the <computeroutput><xref
+      linkend="vr-cr.start-instrumentation"/></computeroutput> and <computeroutput><xref
+      linkend="vr-cr.stop-instrumentation"/></computeroutput> client requests. This feature is
+      greatly inspired by Callgrind (see <xref linkend="cl-manual.limits"/> in the Callgrind
+      manual). Below is an example use:
+    <programlisting>
+#include &lt;valgrind/verrou.h&gt;
+#include &lt;stdio.h&gt;
+
+float compute ();
+
+int main () {
+  VERROU_START_INSTRUMENTATION;
+  float result = compute();
+  VERROU_STOP_INSTRUMENTATION;
+
+  fprintf (stdout, "result = %f", result);
+} </programlisting>
+    
+    This program should be run in Verrou using the following command:
+    <screen>valgrind --tool=verrou --instr-atstart=no <replaceable>program</replaceable></screen>
+    </para>
+    <para>
+      The same thing can be achieved interactively (and without the need for a recompilation of the
+      program) using the <computeroutput><xref
+      linkend="vr.monitor_instrumentation"/></computeroutput> monitor command.
+    </para>
+
+    <note>
+      <para> This method differs from <xref linkend="vr-manual.feat.exclude"/> in that
+      floating-point instructions within unistrumented sections still appear in the <link
+      linkend="vr-manual.feat.count">count of floating-point instructions</link>.</para>
+    </note>
+  </section>
+</section>
+
+<!-- Local Variables: -->
+<!-- mode: nxml -->
+<!-- fill-column: 100 -->
+<!-- End: -->
diff --git a/verrou/docs/vr-std.xml b/verrou/docs/vr-std.xml
new file mode 100644
index 0000000000000000000000000000000000000000..8c06739cd7e972061b0fd3a3f1d078aa8d031207
--- /dev/null
+++ b/verrou/docs/vr-std.xml
@@ -0,0 +1,172 @@
+<?xml version="1.0"?> <!-- -*- sgml -*- -->
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+          "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+
+<section id="vr-manual.feat" xreflabel="Features">
+  <title>Standard features</title>
+  
+  <section id="vr-manual.feat.count" xreflabel="Floating-point instructions counting">
+    <title>Floating-point instructions counting</title>
+    <para>
+      Verrou detects and counts floating-point operations. A summary is printed after each program
+      execution, listing the number of floating-point operations executed by the program, broken
+      down into categories according to various criteria :
+    </para>
+    <variablelist>
+      <varlistentry>
+        <term><command>Operation type:</command></term>
+        <listitem><para>
+            <itemizedlist>
+              <listitem><simpara><command>add</command>: addition</simpara></listitem>
+              <listitem><simpara><command>sub</command>: subtraction</simpara></listitem>
+              <listitem><simpara><command>mul</command>: multiplication</simpara></listitem>
+              <listitem><simpara><command>div</command>: division</simpara></listitem>
+              <listitem><simpara><command>mAdd</command>: Fused Multiply-Add (FMA)</simpara></listitem>
+              <listitem><simpara><command>mSub</command>: Fused Multiply-Sub</simpara></listitem>
+              <listitem><simpara><command>cmp</command>: comparison</simpara></listitem>
+              <listitem><simpara><command>conv</command>: conversion (or cast)</simpara></listitem>
+            </itemizedlist>
+        </para></listitem>
+      </varlistentry>
+      <varlistentry>
+        <term><command>Floating-point precision:</command></term>
+        <listitem><para>
+            <itemizedlist>
+              <listitem><simpara><command>sht</command>: half precision (IEEE-754
+              <computeroutput>binary16</computeroutput>)</simpara></listitem>
+              <listitem><simpara><command>flt</command>: single precision (C
+              <computeroutput>float</computeroutput> / IEEE-754
+              <computeroutput>binary32</computeroutput>)</simpara></listitem>
+              <listitem><simpara><command>dbl</command>: double precision (C
+              <computeroutput>double</computeroutput> / IEEE-754
+              <computeroutput>binary64</computeroutput>)</simpara></listitem>
+            </itemizedlist>
+        </para></listitem>
+      </varlistentry>
+      <varlistentry>
+        <term><command>Vector nature of the instruction:</command></term>
+        <listitem><para>
+            <itemizedlist>
+              <listitem><simpara><command>scal</command>: scalar instruction</simpara></listitem>
+              <listitem><simpara><command>llo</command>: lowest-lane-only (unpacked) instruction
+              (<foreignphrase>i.e.</foreignphrase> scalar instruction from the SSE set, such as
+              <computeroutput>addss</computeroutput>)</simpara></listitem>
+              <listitem><simpara><command>vec</command>: full (packed) vector instruction (such as
+              <computeroutput>addps</computeroutput>)</simpara></listitem>
+            </itemizedlist>
+        </para></listitem>
+      </varlistentry>
+    </variablelist>
+
+    <para>Below is an example output of Verrou's summary of floating-point
+      operations: <screen>
+==18913==  ---------------------------------------------------------------------
+==18913==  Operation                            Instruction count
+==18913==   `- Precision
+==18913==       `- Vectorization          Total             Instrumented
+==18913==  ---------------------------------------------------------------------
+==18913==  add                     7044                     7044          (100%)
+==18913==   `- dbl                     7044                     7044      (100%)
+==18913==       `- llo                     7044                     7044  (100%)
+==18913==  ---------------------------------------------------------------------
+==18913==  sub                       21                       21          (100%)
+==18913==   `- dbl                       21                       21      (100%)
+==18913==       `- llo                       21                       21  (100%)
+==18913==  ---------------------------------------------------------------------
+==18913==  mul                     7073                     7073          (100%)
+==18913==   `- dbl                     7073                     7073      (100%)
+==18913==       `- llo                     7073                     7073  (100%)
+==18913==  ---------------------------------------------------------------------
+==18913==  div                        7                        7          (100%)
+==18913==   `- dbl                        7                        7      (100%)
+==18913==       `- llo                        7                        7  (100%)
+==18913==  ---------------------------------------------------------------------
+==18913==  cmp                       78                        0          (  0%)
+==18913==   `- dbl                       78                        0      (  0%)
+==18913==       `- scal                      78                        0  (  0%)
+==18913==  ---------------------------------------------------------------------
+==18913==  conv                   14042                        0          (  0%)
+==18913==   `- dbl=>int                  28                        0      (  0%)
+==18913==       `- scal                      28                        0  (  0%)
+==18913==   `- dbl=>sht               14014                        0      (  0%)
+==18913==       `- scal                   14014                        0  (  0%)
+==18913==  ---------------------------------------------------------------------
+==18913== ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)</screen>
+    </para>
+
+    <para>
+      The set of columns labeled "Total" shows the total number of floating-point instructions as
+      seen by Verrou. In the second set of columns (labeled "Instrumented"), instructions are only
+      accounted for if they come from <xref linkend="vr-manual.feat.instr"/>. The last column shows
+      the fraction of instrumented instructions.
+    </para>
+
+    <note><para> Instructions coming from <xref linkend="vr-manual.feat.exclude"/> appear neither in
+    the "Total" nor the "Instrumented" count: they are simply not seen by Verrou.  </para></note>
+
+    <para>
+      Floating-point instructions counters can be displayed at any point during the program
+      execution using the <computeroutput><xref linkend="vr-cr.display-counters"/></computeroutput>
+      client request or the <computeroutput><xref linkend="vr.monitor_count"/></computeroutput>
+      monitor command. It is also possible to disable this feature using the <option><xref
+      linkend="vr-opt.count-op"/>=no</option>.
+    </para>
+  </section>
+
+  <section id="vr-manual.feat.rounding-mode" xreflabel="Rounding-mode switching">
+    <title>Rounding-mode switching</title>
+    <para>
+      When the instrumented program performs a floating-point operation, Verrou can replace it with
+      a perturbed version of the same operation, using another rounding mode. The <option><xref
+      linkend="vr-opt.rounding-mode"/></option> command-line option allows choosing between
+      different rounding strategies, described hereafter.
+    </para>
+    <para>
+      Verrou can simulate any of the four IEEE-754 standard rounding modes:
+      <itemizedlist>
+        <listitem><simpara><command>nearest</command> (default),</simpara></listitem>
+        <listitem><simpara><command>upward,</command></simpara></listitem>
+        <listitem><simpara><command>downward,</command></simpara></listitem>
+        <listitem><simpara><command>toward zero,</command></simpara></listitem>
+      </itemizedlist>
+      as well as two other (non-IEEE-754) deterministic mode:
+      <itemizedlist>
+        <listitem><para><command>farthest</command>: in this mode, all non-exact results are rounded
+        in the opposite way to IEEE-754 nearest rounding mode. This helps producing results that are
+        different from <command>nearest</command> rounding mode, while still being
+        deterministic.</para></listitem>
+
+	<listitem><para><command>float</command>: in this mode, all double precision floating-point
+	operations are replaced by simple precision equivalent. Float operations remain unchanged.
+	</para></listitem>
+
+      </itemizedlist>
+    </para>
+    <para>
+      Finally, the main use for Verrou is to randomly switch rounding mode at each floating-point
+      operation, in order to implement the "random rounding" variant of Monte Carlo Arithmetic
+      (MCA). Two strategies can be used to choose the rounding mode for a given operation:
+      <itemizedlist>
+        <listitem><para><command>random:</command> Randomly pick one among upward and downward
+        rounding, with equal probabilities. This is a form of asynchronous CESTAC method.</para>
+        </listitem>
+        <listitem><para><command>average:</command> Randomly choose between upward and downward
+        rounding mode, in such a way that the expectation of the random result equals the exact
+        operation result (without rounding). This is called "uniform_absolute output randomization"
+        in the MCA literature.</para></listitem>
+      </itemizedlist>
+      A pseudo-Random Number Generator (pRNG) is used to generate the randomness used in these
+      modes. The pRNG is normally seeded with a value that changes at each execution in order to
+      produce different "random rounding" results at each run. In order to reproduce results from a
+      given run, it is however possible to use the <option><xref linkend="vr-opt.seed"/></option>
+      command-line option.
+    </para>
+  </section>
+</section>
+
+
+
+<!-- Local Variables: -->
+<!-- mode: nxml -->
+<!-- fill-column: 100 -->
+<!-- End: -->
diff --git a/verrou/env.sh.in b/verrou/env.sh.in
new file mode 100644
index 0000000000000000000000000000000000000000..e8271459b41dd9edf834ac1261fc2f76d665cf49
--- /dev/null
+++ b/verrou/env.sh.in
@@ -0,0 +1,9 @@
+# @configure_input@
+
+export PATH=@prefix@/bin:${PATH}
+export PYTHONPATH=$(prefix=@prefix@; echo @pythondir@):${PYTHONPATH}
+export MANPATH=@prefix@/share/man:${MANPATH}
+export CPATH=@prefix@/include:${CPATH}
+
+export VERROU_COMPILED_WITH_FMA=@vg_cv_verrou_fma@
+export VERROU_COMPILED_WITH_QUAD=@vg_cv_verrou_quad@
diff --git a/verrou/generateBackendInterOperator.py b/verrou/generateBackendInterOperator.py
new file mode 100755
index 0000000000000000000000000000000000000000..5ca233268889d28dd4209c71561e5ee37c5f1544
--- /dev/null
+++ b/verrou/generateBackendInterOperator.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+
+import sys
+import re
+
+
+
+def generateNargs(fileOut, fileNameTemplate, listOfBackend, listOfOp, nargs, post=""):
+
+    templateStr=open(fileNameTemplate, "r").readlines()
+
+    FctNameRegExp=re.compile("(.*)FCTNAME\(([^,]*),([^)]*)\)(.*)")
+    BckNameRegExp=re.compile("(.*)BACKENDFUNC\(([^)]*)\)(.*)")
+
+
+    for backend in listOfBackend:
+        for op in listOfOp:
+            if nargs in [1,2]:
+                applyTemplate(fileOut, templateStr, FctNameRegExp, BckNameRegExp, backend,op, post)
+            if nargs==3:
+                sign=""
+                if "msub" in op:
+                    sign="-"
+                applyTemplate(fileOut, templateStr,FctNameRegExp,BckNameRegExp, backend, op, post, sign)
+
+
+
+def applyTemplate(fileOut, templateStr, FctRegExp, BckRegExp, backend, op, post, sign=None):
+    fileOut.write("// generation of operation %s backend %s\n"%(op,backend))
+    def fctName(typeVal,opt):
+        return "vr_"+backend+post+op+typeVal+opt
+    def bckName(typeVal):
+        if sign!="-":
+            return "interflop_"+backend+"_"+op+"_"+typeVal
+        else:
+            return "interflop_"+backend+"_"+op.replace("sub","add")+"_"+typeVal
+
+    def bckNamePost(typeVal):
+        if sign!="-":
+            return "interflop_"+post+"_"+op+"_"+typeVal
+        else:
+            return "interflop_"+post+"_"+op.replace("sub","add")+"_"+typeVal
+
+
+    contextName="backend_"+backend+"_context"
+    contextNamePost="backend_"+post+"_context"
+
+    for line in templateStr:
+        if "CONTEXT" in line:
+            line=line.replace("CONTEXT", contextName)
+        if "SIGN" in line:
+            if sign!=None:
+                line=line.replace("SIGN", sign)
+            else:
+                print("Generation failed")
+                sys.exit()
+        result=FctRegExp.match(line)
+        if result!=None:
+            res=result.group(1) + fctName(result.group(2), result.group(3)) + result.group(4)
+            fileOut.write(res+"\n")
+            continue
+        result=BckRegExp.match(line)
+        if result!=None:
+            res=result.group(1) + bckName(result.group(2)) + result.group(3)
+            fileOut.write(res+"\n")
+            if post!="":
+                res=result.group(1) + bckNamePost(result.group(2)) + result.group(3)
+                res=res.replace(contextName, contextNamePost)
+                fileOut.write(res+"\n")
+            continue
+
+        fileOut.write(line)
+
+
+
+
+
+if __name__=="__main__":
+    fileNameOutput="vr_generated_from_templates.h"
+    fileOut=open(fileNameOutput,"w")
+    fileOut.write("//Generated by %s\n"%(str(sys.argv)[1:-1]))
+
+    template1Args="vr_interp_operator_template_cast.h"
+    listOfOp1Args=["cast"]
+    generateNargs(fileOut,template1Args, ["verrou","mcaquad"], listOfOp1Args, 1)
+
+    template2Args="vr_interp_operator_template_2args.h"
+    listOfOp2Args=["add","sub","mul","div"]
+    generateNargs(fileOut,template2Args, ["verrou","mcaquad"], listOfOp2Args, 2)
+
+    listOfOp2Args=["add","sub"]
+    generateNargs(fileOut,template2Args, ["verrou","mcaquad"], listOfOp2Args, 2, post="checkcancellation")
+
+    template3Args="vr_interp_operator_template_3args.h"
+    listOfOp3Args=["madd","msub"]
+    generateNargs(fileOut,template3Args, ["verrou","mcaquad"], listOfOp3Args, 3)
+
+    generateNargs(fileOut,template3Args, ["verrou","mcaquad"], listOfOp3Args, 3, post="checkcancellation")
+
+    fileOut.close()
diff --git a/verrou/interflop_backend_interface.h b/verrou/interflop_backend_interface.h
new file mode 100644
index 0000000000000000000000000000000000000000..94902e09343960e02adb96c1b012095c78418460
--- /dev/null
+++ b/verrou/interflop_backend_interface.h
@@ -0,0 +1,27 @@
+/* interflop backend interface */
+#pragma once
+
+
+
+struct interflop_backend_interface_t {
+  void (*interflop_add_float)(float, float, float*, void*);
+  void (*interflop_sub_float)(float, float, float*, void*);
+  void (*interflop_mul_float)(float, float, float*, void*);
+  void (*interflop_div_float)(float, float, float*, void*);
+
+  void (*interflop_add_double)(double, double, double*, void*);
+  void (*interflop_sub_double)(double, double, double*, void*);
+  void (*interflop_mul_double)(double, double, double*, void*);
+  void (*interflop_div_double)(double, double, double*, void*);
+
+  void (*interflop_cast_double_to_float)(double, float*, void*);
+
+  void (*interflop_madd_float)(float, float, float, float*, void*);
+  void (*interflop_madd_double)(double, double, double, double*, void*);
+};
+
+/* interflop_init: called at initialization before using a backend.
+ * It returns an interflop_backend_interface_t structure with callbacks
+ * for each of the numerical instrument hooks */
+
+//struct interflop_backend_interface_t interflop_BACKENDNAME_init(void ** context);
diff --git a/verrou/newbackend.md b/verrou/newbackend.md
new file mode 100644
index 0000000000000000000000000000000000000000..c9c836fe787db9ce7f5601375ba24c0ee5afced8
--- /dev/null
+++ b/verrou/newbackend.md
@@ -0,0 +1,51 @@
+This document aims to explain shortly how to add a new backend in verrou.
+
+
+1- add a new rep backend_NEWNAME with the same structure as backend_verrou
+   Remark : there are Makefile and test_main.cxx to test the backend without frontend
+
+2- add the new source file  in Makefile.am  (You may need to do again ./autogen.sh && ./configure .... cf. README.md)
+
+#At this step you can test compilation
+
+3- modify vr_main.h :
+   - add #include "backend_mcaquad/interflop_NEWNAME.h"
+   - add vr_NEWNAME in  enum vr_backend_name
+   - add useful data for backend configuration in Vr_State struct
+
+#At this step you can test compilation
+
+4- modify the file generateBackendInterOperator.py
+   - In the __main__ section add NEWNAME in the generateNArgs call.
+   - generate a new vr_generated_from_templates.h file : ./generateBackendInterOperator.py
+   - Remark : both file are followed in git
+
+#At this step you can't test compilation (you can after the first item of the next step)
+
+5- modify vr_main.c :
+   - add instanciation of backend_NEWNAME and  backend_NEWNAME_context as for verrou or mcaquad
+   - modify vr_instrumentOp with a new switch. If there are missing call in the backend you can
+   change the inclusion with #define macro. You may need to customize the file vr_instrumentOp_impl.h.
+   - add   interflop_NEWNAME_finalyze(backend_NEWNAME_context) in vr_fini
+
+   - configure the backend in vr_post_clo_init
+   - add message to display option taken into account
+
+#At this step you can test compilation
+
+6- modify vr_clo.c :
+   - default option in vr_clo_defaults (initialization of new attribute defined in vr_main.h )
+   - add user option in vr_process_clo
+
+#At this step you can test compilation and the use
+
+7- modify vr_clreq.c [if backend use random generator]
+   - add NEWBACKEND_set_seed(hash) in  vr_[start/stop]_deterministic_section functions
+
+8- add client-request to configure dynamically the backend [optional]
+
+9- update documentation
+  - modify vr-manual.xml
+  - compile documentation : make -C docs html-docs man-pages
+  - adapt docs/update-vr-clo to install path [no need to commit]
+  - generate new vr_clo.txt (to be commited) from valgrind directory : ./verrou/docs/update-vr-clo
diff --git a/verrou/pyTools/DD.py b/verrou/pyTools/DD.py
new file mode 100755
index 0000000000000000000000000000000000000000..ced493dc4b46416aec7100969c60e1ff426ba183
--- /dev/null
+++ b/verrou/pyTools/DD.py
@@ -0,0 +1,1047 @@
+#!/usr/bin/env python3
+
+# $Id: DD.py,v 1.2 2001/11/05 19:53:33 zeller Exp $
+# Enhanced Delta Debugging class
+# Copyright (c) 1999, 2000, 2001 Andreas Zeller.
+
+# This module (written in Python) implements the base delta debugging
+# algorithms and is at the core of all our experiments.  This should
+# easily run on any platform and any Python version since 1.6.
+#
+# To plug this into your system, all you have to do is to create a
+# subclass with a dedicated `test()' method.  Basically, you would
+# invoke the DD test case minimization algorithm (= the `ddmin()'
+# method) with a list of characters; the `test()' method would combine
+# them to a document and run the test.  This should be easy to realize
+# and give you some good starting results; the file includes a simple
+# sample application.
+#
+# This file is in the public domain; feel free to copy, modify, use
+# and distribute this software as you wish - with one exception.
+# Passau University has filed a patent for the use of delta debugging
+# on program states (A. Zeller: `Isolating cause-effect chains',
+# Saarland University, 2001).  The fact that this file is publicly
+# available does not imply that I or anyone else grants you any rights
+# related to this patent.
+#
+# The use of Delta Debugging to isolate failure-inducing code changes
+# (A. Zeller: `Yesterday, my program worked', ESEC/FSE 1999) or to
+# simplify failure-inducing input (R. Hildebrandt, A. Zeller:
+# `Simplifying failure-inducing input', ISSTA 2000) is, as far as I
+# know, not covered by any patent, nor will it ever be.  If you use
+# this software in any way, I'd appreciate if you include a citation
+# such as `This software uses the delta debugging algorithm as
+# described in (insert one of the papers above)'.
+#
+# All about Delta Debugging is found at the delta debugging web site,
+#
+#               http://www.st.cs.uni-sb.de/dd/
+#
+# Happy debugging,
+#
+# Andreas Zeller
+
+import sys
+import os
+
+# Start with some helpers.
+class OutcomeCache:
+    # This class holds test outcomes for configurations.  This avoids
+    # running the same test twice.
+
+    # The outcome cache is implemented as a tree.  Each node points
+    # to the outcome of the remaining list.
+    #
+    # Example: ([1, 2, 3], PASS), ([1, 2], FAIL), ([1, 4, 5], FAIL):
+    #
+    #      (2, FAIL)--(3, PASS)
+    #     /
+    # (1, None)
+    #     \
+    #      (4, None)--(5, FAIL)
+
+    def __init__(self):
+        self.tail = {}                  # Points to outcome of tail
+        self.result = None              # Result so far
+
+    def add(self, c, result):
+        """Add (C, RESULT) to the cache.  C must be a list of scalars."""
+        cs = c[:]
+        cs.sort()
+
+        p = self
+        for start in range(len(c)):
+            if not c[start] in p.tail:
+                p.tail[c[start]] = OutcomeCache()
+            p = p.tail[c[start]]
+
+        p.result = result
+
+    def lookup(self, c):
+        """Return RESULT if (C, RESULT) is in the cache; None, otherwise."""
+        p = self
+        for start in range(len(c)):
+            if not c[start] in p.tail:
+                return None
+            p = p.tail[c[start]]
+
+        return p.result
+
+    def lookup_superset(self, c, start = 0):
+        """Return RESULT if there is some (C', RESULT) in the cache with
+        C' being a superset of C or equal to C.  Otherwise, return None."""
+
+        # FIXME: Make this non-recursive!
+        if start >= len(c):
+            if self.result:
+                return self.result
+            elif self.tail != {}:
+                # Select some superset
+                superset = self.tail[self.tail.keys()[0]]
+                return superset.lookup_superset(c, start + 1)
+            else:
+                return None
+
+        if c[start] in self.tail:
+            return self.tail[c[start]].lookup_superset(c, start + 1)
+
+        # Let K0 be the largest element in TAIL such that K0 <= C[START]
+        k0 = None
+        for k in self.tail.keys():
+            if (k0 == None or k > k0) and k <= c[start]:
+                k0 = k
+
+        if k0 != None:
+            return self.tail[k0].lookup_superset(c, start)
+
+        return None
+
+    def lookup_subset(self, c):
+        """Return RESULT if there is some (C', RESULT) in the cache with
+        C' being a subset of C or equal to C.  Otherwise, return None."""
+        p = self
+        for start in range(len(c)):
+            if c[start] in p.tail:
+                p = p.tail[c[start]]
+
+        return p.result
+
+
+
+
+# Test the outcome cache
+def oc_test():
+    oc = OutcomeCache()
+
+    assert oc.lookup([1, 2, 3]) == None
+    oc.add([1, 2, 3], 4)
+    assert oc.lookup([1, 2, 3]) == 4
+    assert oc.lookup([1, 2, 3, 4]) == None
+
+    assert oc.lookup([5, 6, 7]) == None
+    oc.add([5, 6, 7], 8)
+    assert oc.lookup([5, 6, 7]) == 8
+
+    assert oc.lookup([]) == None
+    oc.add([], 0)
+    assert oc.lookup([]) == 0
+
+    assert oc.lookup([1, 2]) == None
+    oc.add([1, 2], 3)
+    assert oc.lookup([1, 2]) == 3
+    assert oc.lookup([1, 2, 3]) == 4
+
+    assert oc.lookup_superset([1]) == 3 or oc.lookup_superset([1]) == 4
+    assert oc.lookup_superset([1, 2]) == 3 or oc.lookup_superset([1, 2]) == 4
+    assert oc.lookup_superset([5]) == 8
+    assert oc.lookup_superset([5, 6]) == 8
+    assert oc.lookup_superset([6, 7]) == 8
+    assert oc.lookup_superset([7]) == 8
+    assert oc.lookup_superset([]) != None
+
+    assert oc.lookup_superset([9]) == None
+    assert oc.lookup_superset([7, 9]) == None
+    assert oc.lookup_superset([-5, 1]) == None
+    assert oc.lookup_superset([1, 2, 3, 9]) == None
+    assert oc.lookup_superset([4, 5, 6, 7]) == None
+
+    assert oc.lookup_subset([]) == 0
+    assert oc.lookup_subset([1, 2, 3]) == 4
+    assert oc.lookup_subset([1, 2, 3, 4]) == 4
+    assert oc.lookup_subset([1, 3]) == None
+    assert oc.lookup_subset([1, 2]) == 3
+
+    assert oc.lookup_subset([-5, 1]) == None
+    assert oc.lookup_subset([-5, 1, 2]) == 3
+    assert oc.lookup_subset([-5]) == 0
+
+
+# Main Delta Debugging algorithm.
+class DD:
+    # Delta debugging base class.  To use this class for a particular
+    # setting, create a subclass with an overloaded `test()' method.
+    #
+    # Main entry points are:
+    # - `ddmin()' which computes a minimal failure-inducing configuration, and
+    # - `dd()' which computes a minimal failure-inducing difference.
+    #
+    # See also the usage sample at the end of this file.
+    #
+    # For further fine-tuning, you can implement an own `resolve()'
+    # method (tries to add or remove configuration elements in case of
+    # inconsistencies), or implement an own `split()' method, which
+    # allows you to split configurations according to your own
+    # criteria.
+    # 
+    # The class includes other previous delta debugging alorithms,
+    # which are obsolete now; they are only included for comparison
+    # purposes.
+
+    # Test outcomes.
+    PASS       = "PASS"
+    FAIL       = "FAIL"
+    UNRESOLVED = "UNRESOLVED"
+
+    # Resolving directions.
+    ADD    = "ADD"			# Add deltas to resolve
+    REMOVE = "REMOVE"			# Remove deltas to resolve
+
+    # Debugging output (set to 1 to enable)
+    debug_test      = 0
+    debug_dd        = 0
+    debug_split     = 0
+    debug_resolve   = 0
+
+    def __init__(self):
+        self.__resolving = 0
+        self.__last_reported_length = 0
+        self.monotony = 0
+        self.outcome_cache  = OutcomeCache()
+        self.cache_outcomes = 1
+        self.minimize = 1
+        self.maximize = 1
+        self.assume_axioms_hold = 1
+
+    # Helpers
+    def __listminus(self, c1, c2):
+        """Return a list of all elements of C1 that are not in C2."""
+        s2 = {}
+        for delta in c2:
+            s2[delta] = 1
+
+        c = []
+        for delta in c1:
+            if not delta in s2:
+                c.append(delta)
+
+        return c
+
+    def __listintersect(self, c1, c2):
+        """Return the common elements of C1 and C2."""
+        s2 = {}
+        for delta in c2:
+            s2[delta] = 1
+
+        c = []
+        for delta in c1:
+            if delta in s2:
+                c.append(delta)
+
+        return c
+
+    def __listunion(self, c1, c2):
+        """Return the union of C1 and C2."""
+        s1 = {}
+        for delta in c1:
+            s1[delta] = 1
+
+        c = c1[:]
+        for delta in c2:
+            if not delta in s1:
+                c.append(delta)
+
+        return c
+
+    def __listsubseteq(self, c1, c2):
+        """Return 1 if C1 is a subset or equal to C2."""
+        s2 = {}
+        for delta in c2:
+            s2[delta] = 1
+
+        for delta in c1:
+            if not delta in s2:
+                return 0
+
+        return 1
+
+    # Output
+    def coerce(self, c):
+        """Return the configuration C as a compact string"""
+        # Default: use printable representation
+        return repr(c)
+
+    def pretty(self, c):
+        """Like coerce(), but sort beforehand"""
+        sorted_c = c[:]
+        sorted_c.sort()
+        return self.coerce(sorted_c)
+
+    # Testing
+    def test(self, c):
+        """Test the configuration C.  Return PASS, FAIL, or UNRESOLVED"""
+        #c.sort()
+
+        # If we had this test before, return its result
+        if self.cache_outcomes:
+            cached_result = self.outcome_cache.lookup(c)
+            if cached_result != None:
+                return cached_result
+
+        if self.monotony:
+            # Check whether we had a passing superset of this test before
+            cached_result = self.outcome_cache.lookup_superset(c)
+            if cached_result == self.PASS:
+                return self.PASS
+
+            cached_result = self.outcome_cache.lookup_subset(c)
+            if cached_result == self.FAIL:
+                return self.FAIL
+
+        if self.debug_test:
+            print()
+            print("test(" + self.coerce(c) + ")...")
+
+        outcome = self._test(c)
+
+        if self.debug_test:
+            print("test(" + self.coerce(c) + ") = " + repr(outcome))
+
+        if self.cache_outcomes:
+            self.outcome_cache.add(c, outcome)
+
+        return outcome
+
+    def _test(self, c):
+        """Stub to overload in subclasses"""
+        return self.UNRESOLVED		# Placeholder
+
+
+    # Splitting
+    def split(self, c, n):
+        """Split C into [C_1, C_2, ..., C_n]."""
+        if self.debug_split:
+            print("split(" + self.coerce(c) + ", " + repr(n) + ")...")
+
+        outcome = self._split(c, n)
+
+        if self.debug_split:
+            print( "split(" + self.coerce(c) + ", " + repr(n) + ") = " + repr(outcome))
+
+        return outcome
+
+    def _split(self, c, n):
+        """Stub to overload in subclasses"""
+        subsets = []
+        start = 0
+        for i in range(n):
+            subset = c[start:start + (len(c) - start) // (n - i)]
+            subsets.append(subset)
+            start = start + len(subset)
+        return subsets
+
+
+    # Resolving
+    def resolve(self, csub, c, direction):
+        """If direction == ADD, resolve inconsistency by adding deltas
+        to CSUB.  Otherwise, resolve by removing deltas from CSUB."""
+
+        if self.debug_resolve:
+            print("resolve(" + repr(csub) + ", " + self.coerce(c) + ", " + \
+                repr(direction) + ")...")
+
+        outcome = self._resolve(csub, c, direction)
+
+        if self.debug_resolve:
+            print("resolve(" + repr(csub) + ", " + self.coerce(c) + ", " + \
+                  repr(direction) + ") = " + repr(outcome))
+
+        return outcome
+
+
+    def _resolve(self, csub, c, direction):
+        """Stub to overload in subclasses."""
+        # By default, no way to resolve
+        return None
+
+
+    # Test with fixes
+    def test_and_resolve(self, csub, r, c, direction):
+        """Repeat testing CSUB + R while unresolved."""
+
+        initial_csub = csub[:]
+        c2 = self.__listunion(r, c)
+
+        csubr = self.__listunion(csub, r)
+        t = self.test(csubr)
+
+        # necessary to use more resolving mechanisms which can reverse each
+        # other, can (but needn't) be used in subclasses
+        self._resolve_type = 0
+
+        while t == self.UNRESOLVED:
+            self.__resolving = 1
+            csubr = self.resolve(csubr, c, direction)
+
+            if csubr == None:
+                # Nothing left to resolve
+                break
+
+            if len(csubr) >= len(c2):
+                # Added everything: csub == c2. ("Upper" Baseline)
+                # This has already been tested.
+                csubr = None
+                break
+
+            if len(csubr) <= len(r):
+                # Removed everything: csub == r. (Baseline)
+                # This has already been tested.
+                csubr = None
+                break
+
+            t = self.test(csubr)
+
+        self.__resolving = 0
+        if csubr == None:
+            return self.UNRESOLVED, initial_csub
+
+        # assert t == self.PASS or t == self.FAIL
+        csub = self.__listminus(csubr, r)
+        return t, csub
+
+    # Inquiries
+    def resolving(self):
+        """Return 1 while resolving."""
+        return self.__resolving
+
+
+    # Logging
+    def report_progress(self, c, title):
+        if len(c) != self.__last_reported_length:
+            print()
+            print(title + ": " + repr(len(c)) + " deltas left:", self.coerce(c))
+            self.__last_reported_length = len(c)
+
+
+    # Delta Debugging (old ESEC/FSE version)
+    def old_dd(self, c, r = [], n = 2):
+        """Return the failure-inducing subset of C"""
+
+        assert self.test([]) == dd.PASS
+        assert self.test(c)  == dd.FAIL
+
+        if self.debug_dd:
+            print ("dd(" + self.pretty(c) + ", " + repr(r) + ", " + repr(n) + ")...")
+
+        outcome = self._old_dd(c, r, n)
+
+        if self.debug_dd:
+            print ("dd(" + self.pretty(c) + ", " + repr(r) + ", " + repr(n) +
+                   ") = " + repr(outcome))
+
+        return outcome
+
+
+
+    def test_mix(self, csub, c, direction):
+        if self.minimize:
+            (t, csub) = self.test_and_resolve(csub, [], c, direction)
+            if t == self.FAIL:
+                return (t, csub)
+
+        if self.maximize:
+            csubbar = self.__listminus(self.CC, csub)
+            cbar    = self.__listminus(self.CC, c)
+            if direction == self.ADD:
+                directionbar = self.REMOVE
+            else:
+                directionbar = self.ADD
+
+            (tbar, csubbar) = self.test_and_resolve(csubbar, [], cbar,
+                                                    directionbar)
+
+            csub = self.__listminus(self.CC, csubbar)
+
+            if tbar == self.PASS:
+                t = self.FAIL
+            elif tbar == self.FAIL:
+                t = self.PASS
+            else:
+                t = self.UNRESOLVED
+
+        return (t, csub)
+
+
+
+    # Delta Debugging (new ISSTA version)
+    def ddgen(self, c, minimize, maximize):
+        """Return a 1-minimal failing subset of C"""
+
+        self.minimize = minimize
+        self.maximize = maximize
+
+        n = 2
+        self.CC = c
+
+        if self.debug_dd:
+            print ("dd(" + self.pretty(c) + ", " + repr(n) + ")...")
+
+        outcome = self._dd(c, n)
+
+        if self.debug_dd:
+            print ("dd(" + self.pretty(c) + ", " + repr(n) + ") = " + repr(outcome))
+
+        return outcome
+
+    def _dd(self, c, n):
+        """Stub to overload in subclasses"""
+
+        testNoDelta=self.test([])
+        if testNoDelta!=self.PASS:
+            self.noDeltaSucceedMsg()
+            print("ERROR: test([]) == FAILED")
+            sys.exit()
+#        assert self.test([]) == self.PASS
+
+        run = 1
+        cbar_offset = 0
+
+        # We replace the tail recursion from the paper by a loop
+        while 1:
+            tc = self._test(c)
+            if tc != self.FAIL and tc != self.UNRESOLVED:
+                if run==1:
+                    self.deltaFailedMsg(c)
+
+                if "VERROU_DD_UNSAFE" in os.environ:
+                    print ("WARNING: test([all deltas]) == PASS")
+                else:
+                    self.allDeltaFailedMsg(c)
+                    print ("ERROR: test([all deltas]) == PASS")
+                    sys.exit(1)
+
+
+            if n > len(c):
+                # No further minimizing
+                print ("dd: done")
+                return c
+
+            self.report_progress(c, "dd")
+
+            cs = self.split(c, n)
+
+            print ()
+            print ("dd (run #" + repr(run) + "): trying", "+".join([repr(len(cs[i])) for i in range(n)] ) )
+
+            c_failed    = 0
+            cbar_failed = 0
+
+            next_c = c[:]
+            next_n = n
+
+            # Check subsets
+            for i in range(n):
+                if self.debug_dd:
+                    print ("dd: trying", self.pretty(cs[i]))
+
+                (t, cs[i]) = self.test_mix(cs[i], c, self.REMOVE)
+
+                if t == self.FAIL:
+                    # Found
+                    if self.debug_dd:
+                        print ("dd: found", len(cs[i]), "deltas:",)
+                        print (self.pretty(cs[i]))
+
+                    c_failed = 1
+                    next_c = cs[i]
+                    next_n = 2
+                    cbar_offset = 0
+                    self.report_progress(next_c, "dd")
+                    break
+
+            if not c_failed:
+                # Check complements
+                cbars = n * [self.UNRESOLVED]
+
+                # print "cbar_offset =", cbar_offset
+
+                for j in range(n):
+                    i = (j + cbar_offset) % n
+                    cbars[i] = self.__listminus(c, cs[i])
+                    t, cbars[i] = self.test_mix(cbars[i], c, self.ADD)
+
+                    doubled = self.__listintersect(cbars[i], cs[i])
+                    if doubled != []:
+                        cs[i] = self.__listminus(cs[i], doubled)
+
+                    if t == self.FAIL:
+                        if self.debug_dd:
+                            print ("dd: reduced to", len(cbars[i]),)
+                            print ("deltas:", end="")
+                            print (self.pretty(cbars[i]))
+
+                        cbar_failed = 1
+                        next_c = self.__listintersect(next_c, cbars[i])
+                        next_n = next_n - 1
+                        self.report_progress(next_c, "dd")
+
+                        # In next run, start removing the following subset
+                        cbar_offset = i
+                        break
+
+            if not c_failed and not cbar_failed:
+                if n >= len(c):
+                    # No further minimizing
+                    print ("dd: done")
+                    return c
+
+                next_n = min(len(c), n * 2)
+                print ("dd: increase granularity to", next_n)
+                cbar_offset = (cbar_offset * next_n) // n
+
+            c = next_c
+            n = next_n
+            run = run + 1
+
+    def verrou_dd_max(self, c):
+        """Stub to overload in subclasses"""
+        self.maximize=1
+        self.minimize=0
+        n = 2
+        self.CC = c
+        algo_name="dd_max"
+        
+        testNoDelta=self.test([])
+        if testNoDelta!=self.PASS:
+            self.noDeltaSucceedMsg()
+            print("ERROR: test([]) == FAILED")
+            sys.exit()
+#        assert self.test([]) == self.PASS
+
+        run = 1
+        cbar_offset = 0
+
+        # We replace the tail recursion from the paper by a loop
+        while 1:
+            tc = self.test(c)
+            if tc != self.FAIL and tc != self.UNRESOLVED:
+                if run==1:
+                    self.deltaFailedMsg(c)
+
+                if "VERROU_DD_UNSAFE" in os.environ:
+                    print ("WARNING: test([all deltas]) == PASS")
+
+
+            if n > len(c):
+                # No further minimizing
+                print (algo_name+": done")
+                return c
+
+            self.report_progress(c, algo_name)
+
+            cs = self.split(c, n)
+
+            print ()
+            print (algo_name+" (run #" + repr(run) + "): trying", "+".join([repr(len(cs[i])) for i in range(n)] ) )
+
+            c_failed    = 0
+            cbar_failed = 0
+
+            next_c = c[:]
+            next_n = n
+
+
+            if not c_failed:
+                # Check complements
+                cbars = n * [self.UNRESOLVED]
+
+                # print "cbar_offset =", cbar_offset
+
+                for j in range(n):
+                    i = (j + cbar_offset) % n
+                    cbars[i] = self.__listminus(c, cs[i])
+                    t, cbars[i] = self.test_mix(cbars[i], c, self.ADD)
+
+                    doubled = self.__listintersect(cbars[i], cs[i])
+                    if doubled != []:
+                        cs[i] = self.__listminus(cs[i], doubled)
+
+                    if t == self.FAIL:
+                        if self.debug_dd:
+                            print (algo_name+": reduced to", len(cbars[i]),)
+                            print ("deltas:", end="")
+                            print (self.pretty(cbars[i]))
+
+                        cbar_failed = 1
+                        next_c = self.__listintersect(next_c, cbars[i])
+                        next_n = next_n - 1
+                        self.report_progress(next_c, algo_name)
+
+                        # In next run, start removing the following subset
+                        cbar_offset = i
+                        break
+
+            if not c_failed and not cbar_failed:
+                if n >= len(c):
+                    # No further minimizing
+                    print (algo_name+": done")
+                    return c
+
+                next_n = min(len(c), n * 2)
+                print (algo_name+": increase granularity to", next_n)
+                cbar_offset = (cbar_offset * next_n) // n
+
+            c = next_c
+            n = next_n
+            run = run + 1
+
+
+
+    def verrou_dd_min(self, c , nbRun):
+        """Stub to overload in subclasses"""
+        n = 2
+        algo_name="ddmin"
+
+        testNoDelta=self._test([],nbRun)
+        if testNoDelta!=self.PASS:
+            print("ERROR: test([]) == FAILED")
+            self.noDeltaSucceedMsg()
+
+
+        run = 1
+        cbar_offset = 0
+
+        # We replace the tail recursion from the paper by a loop
+        while 1:
+            tc = self._test(c ,nbRun)
+            if tc != self.FAIL and tc != self.UNRESOLVED:
+                if run==1:
+                    self.deltaFailedMsg(c)
+
+                if "VERROU_DD_UNSAFE" in os.environ:
+                    print ("WARNING: test([all deltas]) == PASS")
+                else:
+                    print ("ERROR: test([all deltas]) == PASS")
+                    self.allDeltaFailedMsg(c)
+
+
+
+
+            if n > len(c):
+                # No further minimizing
+                print (algo_name+": done")
+                return c
+
+            self.report_progress(c, algo_name)
+
+            cs = self.split(c, n)
+
+            print ()
+            print (algo_name+" (run #" + repr(run) + "): trying", "+".join([repr(len(cs[i])) for i in range(n)] ) )
+
+            c_failed    = False
+            cbar_failed = False
+
+            next_c = c[:]
+            next_n = n
+
+            # Check subsets
+            for i in range(n):
+                if self.debug_dd:
+                    print (algo_name+": trying", self.pretty(cs[i]))
+
+                t = self._test(cs[i],nbRun)
+
+                if t == self.FAIL:
+                    # Found
+                    if self.debug_dd:
+                        print (algo_name+": found", len(cs[i]), "deltas:",)
+                        print (self.pretty(cs[i]))
+
+                    c_failed = True
+                    next_c = cs[i]
+                    next_n = 2
+                    cbar_offset = 0
+                    self.report_progress(next_c, algo_name)
+                    break
+
+            if not c_failed:
+                # Check complements
+                cbars = n * [self.UNRESOLVED]
+
+                # print "cbar_offset =", cbar_offset
+
+                for j in range(n):
+                    i = (j + cbar_offset) % n
+                    cbars[i] = self.__listminus(c, cs[i])
+                    t = self._test(cbars[i],nbRun)
+
+                    if t == self.FAIL:
+                        if self.debug_dd:
+                            print (algo_name+": reduced to", len(cbars[i]),)
+                            print ("deltas:", end="")
+                            print (self.pretty(cbars[i]))
+
+                        cbar_failed = True
+                        next_c = cbars[i]
+                        next_n = next_n - 1
+                        self.report_progress(next_c, algo_name)
+
+                        # In next run, start removing the following subset
+                        cbar_offset = i
+                        break
+
+            if not c_failed and not cbar_failed:
+                if n >= len(c):
+                    # No further minimizing
+                    print (algo_name+": done")
+                    return c
+
+                next_n = min(len(c), n * 2)
+                print (algo_name+": increase granularity to", next_n)
+                cbar_offset = (cbar_offset * next_n) // n
+
+            c = next_c
+            n = next_n
+            run = run + 1
+
+    def ddmin(self, c):
+        return self.ddgen(c, 1, 0)
+
+    def ddmax(self, c):
+        return self.ddgen(c, 0, 1)
+
+    def ddmix(self, c):
+        return self.ddgen(c, 1, 1)
+
+
+    # General delta debugging (new TSE version)
+    def dddiff(self, c):
+        n = 2
+
+        if self.debug_dd:
+            print ("dddiff(" + self.pretty(c) + ", " + repr(n) + ")...")
+
+        outcome = self._dddiff([], c, n)
+
+        if self.debug_dd:
+            print ("dddiff(" + self.pretty(c) + ", " + repr(n) + ") = " +
+                   repr(outcome))
+
+        return outcome
+
+    def _dddiff(self, c1, c2, n):
+        run = 1
+        cbar_offset = 0
+
+        # We replace the tail recursion from the paper by a loop
+        while 1:
+            if self.debug_dd:
+                print ("dd: c1 =", self.pretty(c1))
+                print ("dd: c2 =", self.pretty(c2))
+
+            if self.assume_axioms_hold:
+                t1 = self.PASS
+                t2 = self.FAIL
+            else:
+                t1 = self.test(c1)
+                t2 = self.test(c2)
+
+            assert t1 == self.PASS
+            assert t2 == self.FAIL
+            assert self.__listsubseteq(c1, c2)
+
+            c = self.__listminus(c2, c1)
+
+            if self.debug_dd:
+                print ("dd: c2 - c1 =", self.pretty(c))
+
+            if n > len(c):
+                # No further minimizing
+                print ("dd: done")
+                return (c, c1, c2)
+
+            self.report_progress(c, "dd")
+
+            cs = self.split(c, n)
+
+            print ()
+            print ("dd (run #" + repr(run) + "): trying",)
+            for i in range(n):
+                if i > 0:
+                    print ("+",)
+                print (len(cs[i]),)
+            print ()
+
+            progress = 0
+
+            next_c1 = c1[:]
+            next_c2 = c2[:]
+            next_n = n
+
+            # Check subsets
+            for j in range(n):
+                i = (j + cbar_offset) % n
+
+                if self.debug_dd:
+                    print ("dd: trying", self.pretty(cs[i]))
+
+                (t, csub) = self.test_and_resolve(cs[i], c1, c, self.REMOVE)
+                csub = self.__listunion(c1, csub)
+
+                if t == self.FAIL and t1 == self.PASS:
+                    # Found
+                    progress    = 1
+                    next_c2     = csub
+                    next_n      = 2
+                    cbar_offset = 0
+
+                    if self.debug_dd:
+                        print ("dd: reduce c2 to", len(next_c2), "deltas:",)
+                        print (self.pretty(next_c2))
+                    break
+
+                if t == self.PASS and t2 == self.FAIL:
+                    # Reduce to complement
+                    progress    = 1
+                    next_c1     = csub
+                    next_n      = max(next_n - 1, 2)
+                    cbar_offset = i
+
+                    if self.debug_dd:
+                        print ("dd: increase c1 to", len(next_c1), "deltas:",)
+                        print (self.pretty(next_c1))
+                    break
+
+
+                csub = self.__listminus(c, cs[i])
+                (t, csub) = self.test_and_resolve(csub, c1, c, self.ADD)
+                csub = self.__listunion(c1, csub)
+
+                if t == self.PASS and t2 == self.FAIL:
+                    # Found
+                    progress    = 1
+                    next_c1     = csub
+                    next_n      = 2
+                    cbar_offset = 0
+
+                    if self.debug_dd:
+                        print ("dd: increase c1 to", len(next_c1), "deltas:",)
+                        print (self.pretty(next_c1))
+                    break
+
+                if t == self.FAIL and t1 == self.PASS:
+                    # Increase
+                    progress    = 1
+                    next_c2     = csub
+                    next_n      = max(next_n - 1, 2)
+                    cbar_offset = i
+
+                    if self.debug_dd:
+                        print ("dd: reduce c2 to", len(next_c2), "deltas:",)
+                        print (self.pretty(next_c2))
+                    break
+
+            if progress:
+                self.report_progress(self.__listminus(next_c2, next_c1), "dd")
+            else:
+                if n >= len(c):
+                    # No further minimizing
+                    print ("dd: done")
+                    return (c, c1, c2)
+
+                next_n = min(len(c), n * 2)
+                print ("dd: increase granularity to", next_n)
+                cbar_offset = (cbar_offset * next_n) // n
+
+            c1  = next_c1
+            c2  = next_c2
+            n   = next_n
+            run = run + 1
+
+    def dd(self, c):
+        return self.dddiff(c)           # Backwards compatibility
+
+
+
+
+if __name__ == '__main__':
+    # Test the outcome cache
+    oc_test()
+
+    # Define our own DD class, with its own test method
+    class MyDD(DD):
+        def _test_a(self, c):
+            "Test the configuration C.  Return PASS, FAIL, or UNRESOLVED."
+
+            # Just a sample
+            # if 2 in c and not 3 in c:
+            #	return self.UNRESOLVED
+            # if 3 in c and not 7 in c:
+            #   return self.UNRESOLVED
+            if 7 in c and not 2 in c:
+                return self.UNRESOLVED
+            if 5 in c and 8 in c:
+                return self.FAIL
+            return self.PASS
+
+        def _test_b(self, c):
+            if c == []:
+                return self.PASS
+            if 1 in c and 2 in c and 3 in c and 4 in c and \
+               5 in c and 6 in c and 7 in c and 8 in c:
+                return self.FAIL
+            return self.UNRESOLVED
+
+        def _test_c(self, c):
+            if 1 in c and 2 in c and 3 in c and 4 in c and \
+               6 in c and 8 in c:
+                if 5 in c and 7 in c:
+                    return self.UNRESOLVED
+                else:
+                    return self.FAIL
+            if 1 in c or 2 in c or 3 in c or 4 in c or \
+               6 in c or 8 in c:
+                return self.UNRESOLVED
+            return self.PASS
+
+        def __init__(self):
+            self._test = self._test_c
+            DD.__init__(self)
+
+
+    print ("WYNOT - a tool for delta debugging.")
+    mydd = MyDD()
+    # mydd.debug_test     = 1			# Enable debugging output
+    # mydd.debug_dd       = 1			# Enable debugging output
+    # mydd.debug_split    = 1			# Enable debugging output
+    # mydd.debug_resolve  = 1			# Enable debugging output
+
+    # mydd.cache_outcomes = 0
+    # mydd.monotony = 0
+
+    print ("Minimizing failure-inducing input...")
+    c = mydd.ddmin([1, 2, 3, 4, 5, 6, 7, 8])  # Invoke DDMIN
+    print ("The 1-minimal failure-inducing input is", c)
+    print ("Removing any element will make the failure go away.")
+    print ()
+
+    print ("Computing the failure-inducing difference...")
+    (c, c1, c2) = mydd.dd([1, 2, 3, 4, 5, 6, 7, 8])	# Invoke DD
+    print ("The 1-minimal failure-inducing difference is", c)
+    print (c1, "passes,", c2, "fails")
+
+
+
+# Local Variables:
+# mode: python
+# End:
diff --git a/verrou/pyTools/DD_exec_stat.py b/verrou/pyTools/DD_exec_stat.py
new file mode 100644
index 0000000000000000000000000000000000000000..f99b659ccf772555e0f8f871f92e8b02985a6699
--- /dev/null
+++ b/verrou/pyTools/DD_exec_stat.py
@@ -0,0 +1,38 @@
+import sys
+import os
+import time
+
+class exec_stat:
+    def __init__(self,repName):
+        self.repName=repName
+        self.timeInit()
+
+    def terminate(self):
+        self.timeEnd()
+        self.printElapsed(int(self.end- self.start))
+        self.printNbRun()
+
+    def timeInit(self):
+        self.start = time.time()
+
+    def timeEnd(self):
+        self.end = int(time.time())
+
+    def printElapsed(self,duration):
+        s= duration % 60
+        rm= duration //60
+        m=rm%60
+        rh=rm//60
+        h=rh%24
+        rd=rh//24
+        print ("\nElapsed Time: %id %ih %imin %is   "%(rd,h,m,s) )
+
+    def isNew(self, filename):
+        return ((os.stat(filename).st_mtime) > self.start)
+
+    def printNbRun(self,dirName="."):
+        import glob
+
+        runTab=glob.glob(dirName+"/"+self.repName+"/*/dd.run*/dd.run.out")
+        runFilter=[filename for filename in runTab if self.isNew(filename)]
+        print(self.repName+"  search : %i run (with cache included: %i)"%(len(runFilter),len(runTab)) )
diff --git a/verrou/pyTools/DD_stoch.py b/verrou/pyTools/DD_stoch.py
new file mode 100644
index 0000000000000000000000000000000000000000..7dcb61d5996e6a99cea43842b12f7a1fb795296d
--- /dev/null
+++ b/verrou/pyTools/DD_stoch.py
@@ -0,0 +1,552 @@
+import sys
+import os
+
+import subprocess
+
+import shutil
+import hashlib
+import copy
+from valgrind import DD
+
+
+def runCmdAsync(cmd, fname, envvars=None):
+    """Run CMD, adding ENVVARS to the current environment, and redirecting standard
+    and error outputs to FNAME.out and FNAME.err respectively.
+
+    Returns CMD's exit code."""
+    if envvars is None:
+        envvars = {}
+
+    with open("%s.out"%fname, "w") as fout:
+        with open("%s.err"%fname, "w") as ferr:
+            env = copy.deepcopy(os.environ)
+            for var in envvars:
+                env[var] = envvars[var]
+            return subprocess.Popen(cmd, env=env, stdout=fout, stderr=ferr)
+
+def getResult(subProcess):
+    subProcess.wait()
+    return subProcess.returncode
+
+
+def runCmd(cmd, fname, envvars=None):
+    """Run CMD, adding ENVVARS to the current environment, and redirecting standard
+    and error outputs to FNAME.out and FNAME.err respectively.
+
+    Returns CMD's exit code."""
+
+    return getResult(runCmdAsync(cmd,fname,envvars))
+
+
+
+class verrouTask:
+
+    def __init__(self, dirname, refDir,runCmd, cmpCmd,nbRun, maxNbPROC, runEnv):
+        self.dirname=dirname
+        self.refDir=refDir
+        self.runCmd=runCmd
+        self.cmpCmd=cmpCmd
+        self.nbRun=nbRun
+        self.FAIL=DD.DD.FAIL
+        self.PASS=DD.DD.PASS
+
+        self.subProcessRun={}
+        self.maxNbPROC= maxNbPROC
+        self.runEnv=runEnv
+
+        print(self.dirname,end="")
+
+    def nameDir(self,i):
+        return  os.path.join(self.dirname,"dd.run%i" % (i+1))
+
+    def mkdir(self,i):
+         os.mkdir(self.nameDir(i))
+    def rmdir(self,i):
+        shutil.rmtree(self.nameDir(i))
+
+    def runOneSample(self,i):
+        rundir= self.nameDir(i)
+
+        self.subProcessRun[i]=runCmdAsync([self.runCmd, rundir],
+                                          os.path.join(rundir,"dd.run"),
+                                          self.runEnv)
+
+    def cmpOneSample(self,i):
+        rundir= self.nameDir(i)
+        if self.subProcessRun[i]!=None:
+            getResult(self.subProcessRun[i])
+        retval = runCmd([self.cmpCmd, self.refDir, rundir],
+                        os.path.join(rundir,"dd.compare"))
+
+        with open(os.path.join(self.dirname, rundir, "returnVal"),"w") as f:
+            f.write(str(retval))
+        if retval != 0:
+            print("FAIL(%d)" % i)
+            return self.FAIL
+        else:
+            return self.PASS
+
+    def sampleToComputeToGetFailure(self, nbRun):
+        """Return the list of samples which have to be computed to perforn nbRun Success run : None mean Failure [] Mean Success """
+        listOfDir=[runDir for runDir in os.listdir(self.dirname) if runDir.startswith("dd.run")]
+        done=[]
+        for runDir in listOfDir:
+            status=int((open(os.path.join(self.dirname, runDir, "returnVal")).readline()))
+            if status!=0:
+                return None
+            done+=[runDir]
+
+        res= [x for x in range(nbRun) if not ('dd.run'+str(x+1)) in done]
+        return res
+
+    def run(self):
+        workToDo=self.sampleToComputeToGetFailure(self.nbRun)
+        if workToDo==None:
+            print(" --(cache) -> FAIL")
+            return self.FAIL
+
+        if len(workToDo)!=0:
+            print(" --( run )-> ",end="",flush=True)
+
+            if self.maxNbPROC==None:
+                returnVal=self.runSeq(workToDo)
+            else:
+                returnVal=self.runPar(workToDo)
+
+            if(returnVal==self.PASS):
+                print("PASS(+" + str(len(workToDo))+"->"+str(self.nbRun)+")" )
+            return returnVal
+        print(" --(cache)-> PASS("+str(self.nbRun)+")")
+        return self.PASS
+
+    def runSeq(self,workToDo):
+
+        for run in workToDo:
+            self.mkdir(run)
+            self.runOneSample(run)
+            retVal=self.cmpOneSample(run)
+
+            if retVal=="FAIL":
+                return self.FAIL
+        return self.PASS
+
+    def runPar(self,workToDo):
+
+        for run in workToDo:
+            self.mkdir(run)
+            self.runOneSample(run)
+        for run in workToDo:
+            retVal=self.cmpOneSample(run)
+
+            if retVal=="FAIL":
+                return self.FAIL
+
+        return self.PASS
+
+
+def md5Name(deltas):
+    copyDeltas=copy.copy(deltas)
+    copyDeltas.sort()
+    return hashlib.md5(("".join(copyDeltas)).encode('utf-8')).hexdigest()
+
+
+def prepareOutput(dirname):
+     shutil.rmtree(dirname, ignore_errors=True)
+     os.makedirs(dirname)
+
+
+            
+def failure():
+    sys.exit(42)
+
+
+
+
+def symlink(src, dst):
+    if os.path.lexists(dst):
+        os.remove(dst)
+    os.symlink(src, dst)
+
+
+class DDStoch(DD.DD):
+    def __init__(self, config, prefix):
+        DD.DD.__init__(self)
+        self.config_=config
+        self.run_ =  self.config_.get_runScript()
+        self.compare_ = self.config_.get_cmpScript()
+        self.cache_outcomes = False
+        self.index=0
+        self.prefix_ = os.path.join(os.getcwd(),prefix)
+        self.ref_ = os.path.join(self.prefix_, "ref")
+
+        prepareOutput(self.ref_)
+        self.reference()
+        self.mergeList()
+        self.checkReference()
+
+
+    def mergeList(self):
+        """merge the file name.$PID into a uniq file called name """
+        dirname=self.ref_
+        name=self.getDeltaFileName()
+
+        listOfExcludeFile=[ x for x in os.listdir(dirname) if self.isFileValidToMerge(x) ]
+        if len(listOfExcludeFile)<1:
+            print("The generation of exclusion/source files failed")
+            failure()
+
+        with open(os.path.join(dirname,listOfExcludeFile[0]), "r") as f:
+                excludeMerged=f.readlines()
+
+        for excludeFile in listOfExcludeFile[1:]:
+            with open(os.path.join(dirname,excludeFile), "r") as f:
+                for line in f.readlines():
+                    if line not in excludeMerged:
+                        excludeMerged+=[line]
+        with open(os.path.join(dirname, name), "w" )as f:
+            for line in excludeMerged:
+                f.write(line)
+
+        
+    def checkReference(self):
+        retval = runCmd([self.compare_,self.ref_, self.ref_],
+                        os.path.join(self.ref_,"checkRef"))
+        if retval != 0:
+            print("FAILURE: the reference is not valid ")
+            print("Suggestions:")
+            print("\t1) check the correctness of the %s script"%self.compare_)
+            print("\t2) if your code contains C++ code (libraries included), check the presence of the valgrind option --demangle=no in the run script")
+
+            print("Files to analyze:")
+            print("\t run output: " +  os.path.join(self.ref_,"dd.out") + " " + os.path.join(self.ref_,"dd.err"))
+            print("\t cmp output: " +  os.path.join(self.ref_,"checkRef.out") + " "+ os.path.join(self.ref_,"checkRef.err"))
+            failure()
+
+    def testWithLink(self, deltas, linkname):
+        #by default the symlinks are generated when the test fail
+        testResult=self._test(deltas)
+        dirname = os.path.join(self.prefix_, md5Name(deltas))
+        symlink(dirname, os.path.join(self.prefix_,linkname))
+        return testResult
+
+    def report_progress(self, c, title):
+        if not self.config_.get_quiet:
+            super().report_progress(c,title)
+
+    def configuration_found(self, kind_str, delta_config,verbose=True):
+        if verbose:
+            print("%s (%s):"%(kind_str,self.coerce(delta_config)))
+        self.testWithLink(delta_config, kind_str)
+
+    def run(self, deltas=None):
+        if deltas==None:
+            deltas=self.getDelta0()
+
+        algo=self.config_.get_ddAlgo()
+        resConf=None
+        if algo=="rddmin":
+            resConf = self.RDDMin(deltas, self.config_.get_nbRUN())
+        if algo.startswith("srddmin"):
+            resConf= self.SRDDMin(deltas, self.config_.get_rddMinTab())
+        if algo.startswith("drddmin"):
+            resConf = self.DRDDMin(deltas,
+                                   self.config_.get_rddMinTab(),
+                                   self.config_.get_splitTab(),
+                                   self.config_.get_splitGranularity())
+        if algo=="ddmax":
+            resConf= self.DDMax(deltas)
+        else:
+            if resConf!=None:
+                flatRes=[c  for conf in resConf for c in conf]
+                cmp= [delta for delta in deltas if  delta not in flatRes ]
+                self.configuration_found("rddmin-cmp", cmp)
+
+        return resConf
+
+    def DDMax(self, deltas):
+        res=self.verrou_dd_max(deltas)
+        cmp=[delta for delta in deltas if delta not in res]
+        self.configuration_found("ddmax", cmp)
+        self.configuration_found("ddmax-cmp", res)
+
+        return cmp
+
+    def RDDMin(self, deltas,nbRun):
+        ddminTab=[]
+        testResult=self._test(deltas)
+        if testResult!=self.FAIL:
+            self.deltaFailedMsg(deltas)
+
+        while testResult==self.FAIL:
+            conf = self.verrou_dd_min(deltas,nbRun)
+
+            ddminTab += [conf]
+            self.configuration_found("ddmin%d"%(self.index), conf)
+            #print("ddmin%d (%s):"%(self.index,self.coerce(conf)))
+
+            #update deltas
+            deltas=[delta for delta in deltas if delta not in conf]
+            testResult=self._test(deltas,nbRun)
+            self.index+=1
+        return ddminTab
+
+    def splitDeltas(self, deltas,nbRun,granularity):
+        if self._test(deltas, self.config_.get_nbRUN())==self.PASS:
+            return [] #short exit
+
+        res=[] #result : set of smallest (each subset with repect with granularity lead to success)
+
+        #two lists which contain tasks
+        # -the fail status is known
+        toTreatFailed=[deltas]
+        # -the status is no not known
+        toTreatUnknown=[]
+
+        #name for progression
+        algo_name="splitDeltas"
+
+        def treatFailedCandidat(candidat):
+            #treat a failing configuration
+            self.report_progress(candidat, algo_name)
+
+            # create subset
+            cutSize=min(granularity, len(candidat))
+            ciTab=self.split(candidat, cutSize)
+
+            cutAbleStatus=False
+            for i in range(len(ciTab)):
+                ci=ciTab[i]
+                #test each subset
+                status=self._test(ci ,nbRun)
+                if status==self.FAIL:
+                    if len(ci)==1:
+                        #if the subset size is one the subset is a valid ddmin : treat as such
+                        self.configuration_found("ddmin%d"%(self.index), ci)
+                        #print("ddmin%d (%s):"%(self.index,self.coerce(ci)))
+                        self.index+=1
+                        res.append(ci)
+                    else:
+                        #insert the subset in the begin of the failed task list
+                        toTreatFailed.insert(0,ci)
+                        #insert the remaining subsets to the unknown task list
+                        tail= ciTab[i+1:]
+                        tail.reverse() # to keep the same order
+                        for cip in tail:
+                            toTreatUnknown.insert(0,cip)
+                        return
+                    cutAbleStatus=True
+            #the failing configuration is failing
+            if cutAbleStatus==False:
+                res.append(candidat)
+
+        def treatUnknownStatusCandidat(candidat):
+            #test the configuration : do nothing in case of success and add to the failed task list in case of success
+            self.report_progress(candidat, algo_name+ "(unknownStatus)")
+            status=self._test(candidat, nbRun)
+            if status==self.FAIL:
+                toTreatFailed.insert(0,candidat)
+            else:
+                pass
+
+        # loop over tasks
+        while len(toTreatFailed)!=0 or len(toTreatUnknown)!=0:
+
+            unknownStatusSize=len(deltas) #to get a max
+            if len(toTreatUnknown)!=0:
+                unknownStatusSize=len(toTreatUnknown[0])
+
+            if len(toTreatFailed)==0:
+                treatUnknownStatusCandidat(toTreatUnknown[0])
+                toTreatUnknown=toTreatUnknown[1:]
+                continue
+
+            #select the smallest candidat : in case of equality select a fail
+            toTreatCandidat=toTreatFailed[0]
+            if  len(toTreatCandidat) <= unknownStatusSize:
+                cutCandidat=toTreatCandidat
+                toTreatFailed=toTreatFailed[1:]
+                treatFailedCandidat(cutCandidat)
+            else:
+                treatUnknownStatusCandidat(toTreatUnknown[0])
+                toTreatUnknown=toTreatUnknown[1:]
+        return res
+
+    def SsplitDeltas(self, deltas, runTab, granularity):#runTab=splitTab ,granularity=2):
+        #apply splitDeltas recussivly with increasing sample number (runTab)
+        #remarks the remain treatment do not respect the binary split structure
+
+        #name for progression
+        algo_name="ssplitDelta"
+
+        currentSplit=[deltas]
+        for run in runTab:
+            nextCurrent=[]
+            for candidat in currentSplit:
+                if len(candidat)==1:
+                    nextCurrent.append(candidat)
+                    continue
+                self.report_progress(candidat,algo_name)
+                res=self.splitDeltas(candidat,run, granularity)
+                nextCurrent.extend(res)
+
+            #the remainDeltas in recomputed from the wall list (indeed the set can increase with the apply )
+            flatNextCurrent=[flatItem  for nextCurrentItem in nextCurrent for flatItem in nextCurrentItem]
+            remainDeltas=[delta for delta in deltas if delta not in flatNextCurrent ]
+
+            #apply split to remainDeltas
+            self.report_progress(remainDeltas,algo_name)
+            nextCurrent.extend(self.splitDeltas(remainDeltas, run, granularity))
+
+            currentSplit=nextCurrent
+
+        return currentSplit
+
+    def DRDDMin(self, deltas, SrunTab, dicRunTab, granularity):#SrunTab=rddMinTab, dicRunTab=splitTab, granularity=2):
+        #name for progression
+        algo_name="DRDDMin"
+
+        #assert with the right nbRun number
+        nbRun=SrunTab[-1]
+        testResult=self._test(deltas,nbRun)
+        if testResult!=self.FAIL:
+            self.deltaFailedMsg(deltas)
+
+        #apply dichotomy
+        candidats=self.SsplitDeltas(deltas,dicRunTab, granularity)
+        print("Dichotomy split done")
+
+        res=[]
+        for candidat in candidats:
+            if len(candidat)==1: #is a valid ddmin
+                res+=[candidat]
+                deltas=[delta for delta in deltas if delta not in candidat]
+            else:
+                self.report_progress(candidat, algo_name)
+                #we do not known id candidat is a valid ddmin (in case of sparse pattern)
+                resTab=self.SRDDMin(candidat,SrunTab)
+                for resMin in resTab:
+                    res+=[resMin] #add to res
+                    deltas=[delta for delta in deltas if delta not in resMin] #reduce search space
+        print("Dichotomy split analyze done")
+
+        #after the split filter a classic (s)rddmin is applied
+        testResult=self._test(deltas,nbRun)
+        if testResult!=self.FAIL:
+            return res
+        else:
+            return res+self.SRDDMin(deltas, SrunTab)
+
+
+
+    def SRDDMin(self, deltas,runTab):#runTab=rddMinTab):
+        #name for progression
+        algo_name="SRDDMin"
+        #assert with the right nbRun number
+        nbRun=runTab[-1]
+        testResult=self._test(deltas,nbRun)
+        if testResult!=self.FAIL:
+            self.deltaFailedMsg(deltas)
+
+        ddminTab=[]
+
+        #increasing number of run
+        for run in runTab:
+            testResult=self._test(deltas,run)
+
+            #rddmin loop
+            while testResult==self.FAIL:
+                self.report_progress(deltas, algo_name)
+                conf = self.verrou_dd_min(deltas,run)
+                if len(conf)!=1:
+                    #may be not minimal due to number of run)
+                    for runIncValue in [x for x in runTab if x>run ]:
+                        conf = self.verrou_dd_min(conf,runIncValue)
+                        if len(conf)==1:
+                            break
+
+                ddminTab += [conf]
+                self.configuration_found("ddmin%d"%(self.index), conf)
+                #print("ddmin%d (%s):"%(self.index,self.coerce(conf)))
+                self.index+=1
+                #update search space
+                deltas=[delta for delta in deltas if delta not in conf]
+                #end test loop of rddmin
+                testResult=self._test(deltas,nbRun)
+
+        return ddminTab
+
+    #Error Msg
+    def deltaFailedMsg(self,delta):
+        print("FAILURE: nothing to debug (the run with all symbols activated succeed)")
+        print("Suggestions:")
+        print("\t1) check the correctness of the %s script : the failure criteria may be too large"%self.compare_)
+        print("\t2) check if the number of samples VERROU_DD_NRUNS is sufficient ")
+        print("\t3) if your code contains C++ code (libraries included), check the presence of the valgrind option --demangle=no in the run script")
+
+        dirname = md5Name(delta)
+        print("Directory to analyze: %s"%dirname)
+        failure()
+
+    def allDeltaFailedMsg(self,deltas):
+        print ("FAILURE: when verrou perturbs all parts of the program, its output is still detected as stable.")
+        print ("Suggestions:")
+        print ("\t1) check if the number of samples VERROU_DD_NRUNS is sufficient")
+        print ("\t2) check the correctness of the %s script : the failure criteria may be too large"%self.compare_)
+        print ("\t3) set the env variable VERROU_DD_UNSAFE : be careful it is realy unsafe")
+
+        dirname = md5Name(delta)
+        print("Directory to analyze: %s"%dirname)
+        failure()
+
+
+    def noDeltaSucceedMsg(self,deltas=[]):
+        print("FAILURE: the comparison between verrou with activated symbols in nearest mode (ref) and verrou without activated symbols failed")
+
+        print("Suggestions:")
+        print("\t1) check the libm library is correctly excluded")
+        print("\t2) check if reproducibilty discrepancies are larger than the failure criteria of the script %s"%self.compare_)
+        failure()
+
+    def reference(self):
+        retval = runCmd([self.run_, self.ref_],
+                        os.path.join(self.ref_,"dd"),
+                        self.referenceRunEnv())
+        assert retval == 0, "Error during reference run"
+
+    def getDelta0(self):
+        with open(os.path.join(self.ref_ ,self.getDeltaFileName()), "r") as f:
+            return f.readlines()
+
+
+    def genExcludeIncludeFile(self, dirname, deltas, include=False, exclude=False):
+        """Generate the *.exclude and *.include file in dirname rep from deltas"""
+        excludes=self.getDelta0()
+        dd=self.getDeltaFileName()
+
+        if include:
+            with open(os.path.join(dirname,dd+".include"), "w") as f:
+                for d in deltas:
+                    f.write(d)
+
+        if exclude:
+            with open(os.path.join(dirname,dd+".exclude"), "w") as f:
+                for d in deltas:
+                    excludes.remove(d)
+
+                for line in excludes:
+                    f.write(line)
+
+
+    def _test(self, deltas,nbRun=None):
+        if nbRun==None:
+            nbRun=self.config_.get_nbRUN()
+
+        dirname=os.path.join(self.prefix_, md5Name(deltas))
+        if not os.path.exists(dirname):
+            os.makedirs(dirname)
+            self.genExcludeIncludeFile(dirname, deltas, include=True, exclude=True)
+
+        vT=verrouTask(dirname, self.ref_, self.run_, self.compare_ ,nbRun, self.config_.get_maxNbPROC() , self.sampleRunEnv(dirname))
+
+        return vT.run()
+
diff --git a/verrou/pyTools/cmpCov.py b/verrou/pyTools/cmpCov.py
new file mode 100755
index 0000000000000000000000000000000000000000..bf88319507fb954c19347c75b60871520c2fb573
--- /dev/null
+++ b/verrou/pyTools/cmpCov.py
@@ -0,0 +1,651 @@
+#!/usr/bin/env python3
+import re
+import sys
+from operator import itemgetter, attrgetter
+import gzip
+import os
+
+
+class openGz:
+    """ Class to read/write  gzip file or ascii file """
+    def __init__(self,name, mode="r", compress=None):
+        self.name=name
+        if os.path.exists(name+".gz") and compress==None:
+            self.name=name+".gz"
+
+        if (self.name[-3:]==".gz" and compress==None) or compress==True:
+            self.compress=True
+            self.handler=gzip.open(self.name, mode)
+        else:
+            self.compress=False
+            self.handler=open(self.name, mode)
+
+    def readline(self):
+        if self.compress:
+            return self.handler.readline().decode("ascii")
+        else:
+            return self.handler.readline()
+    def readlines(self):
+        if self.compress:
+            return [line.decode("ascii") for line in self.handler.readlines()]
+        else:
+            return self.handler.readlines()
+
+
+    def write(self, line):
+        self.handler.write(line)
+
+
+class bbInfoReader:
+    """ Class to read trace_bb_info_log-PID(.gz) file and to provide a string describing
+    a basic bloc defined by an index (called addr) from the MARK (debug symbol)
+
+    After init the usual call are :
+      .compressMarksWithoutSym(addr)
+      .getStrToPrint(addr)
+
+    """
+    def __init__(self,fileName):
+        self.read(fileName)
+
+    def read(self,fileName):
+
+        self.data={}
+        self.dataMax={}
+        self.dataCorrupted={}
+        regularExp=re.compile("([0-9]+) : (.*) : (\S*) : ([0-9]+)")
+        fileHandler=openGz(fileName)
+
+        line=fileHandler.readline()
+        counter=0
+        while not line in [None, ''] :
+            m=(regularExp.match(line.strip()))
+            if m==None :
+                print("error read fileName line:",[line])
+                sys.exit()
+            addr, sym, sourceFile, lineNum= m.groups()
+            if addr in self.data:
+                if not (sym,sourceFile,lineNum) in self.data[addr]:
+                    self.data[addr]+=[(sym,sourceFile,lineNum)]
+                if self.dataMax[addr]+1!= counter:
+                    self.dataCorrupted[addr]=True
+                self.dataMax[addr]=counter
+            else:
+                self.data[addr]=[(sym,sourceFile,lineNum)]
+                self.dataMax[addr]=counter
+                self.dataCorrupted[addr]=False
+            counter+=1
+            line=fileHandler.readline()
+
+
+    def compressMarks(self, lineMarkInfoTab):
+        lineToTreat=lineMarkInfoTab
+        res=""
+        while len(lineToTreat)!=0:
+            symName=lineToTreat[0][0]
+            select=[(x[1],x[2]) for x in lineToTreat if x[0]==symName ]
+            lineToTreat=[x for x in lineToTreat if x[0]!=symName ]
+            res+=" "+symName +"["+self.compressFileNames(select)+"] |"
+        return res[0:-1]
+
+    def compressMarksWithoutSym(self, addr):
+        select=[(x[1],x[2]) for x in self.data[addr]  ]
+        return self.compressFileNames(select)
+
+    def compressFileNames(self, tabFile):
+        tabToTreat=tabFile
+        res=""
+        while len(tabToTreat)!=0:
+            fileName=tabToTreat[0][0]
+            select=[(x[1]) for x in tabToTreat if x[0]==fileName ]
+            tabToTreat=[x for x in tabToTreat if x[0]!=fileName ]
+            res+=fileName +"("+self.compressLine(select)+")"
+        return res
+
+    def compressLine(self, lineTab):
+        res=""
+        intTab=[int(x) for x in lineTab]
+        while len(intTab)!=0:
+            begin=intTab[0]
+            nbSuccessor=0
+            for i in range(len(intTab))[1:]:
+                if intTab[i]==begin+i:
+                    nbSuccessor+=1
+                else:
+                    break
+            if nbSuccessor==0:
+                res+=str(begin)+","
+            else:
+                res+=str(begin)+"-"+str(begin+nbSuccessor)+","
+            intTab=intTab[nbSuccessor+1:]
+        return res[0:-1]
+
+    def getStrToPrint(self, addr):
+        return self.compressMarks(self.data[addr])
+
+    def isCorrupted(self,addr):
+        return self.dataCorrupted[addr]
+
+    def print(self):
+        for addr in self.data:
+            print(self.compressMarks(self.data[addr]))
+
+    def addrToIgnore(self, addr, ignoreList):
+        listOfFile=[fileName for sym,fileName,num in self.data[addr]]
+        for fileName in listOfFile:
+            if fileName in ignoreList:
+                return True
+        return False
+
+    def getListOfSym(self,addr):
+        return list(set([sym for sym,fileName,num in self.data[addr]]))
+
+class traceReader:
+
+    def __init__(self,pid):
+        self.trace=openGz("trace_bb_trace.log-"+str(pid))
+        self.traceOut=openGz("trace_bb_trace.log-"+str(pid)+"-post","w")
+        self.bbInfo=bbInfoReader("trace_bb_info.log-"+str(pid))
+
+    def readLine(self,comment=True):
+        addr=self.trace.readline().strip()
+        if addr==None or addr=="":
+            return None
+
+        if not (self.bbInfo.addrToIgnore(addr, self.ignoreList ) or self.bbInfo.isCorrupted(bbAddr)):
+            if comment:
+                return addr + " " +self.bbInfo.getStrToPrint(addr)
+            else:
+                return addr
+        return ""
+
+    def writeFilteredAndComment(self,ignoreList=['vfprintf.c','printf_fp.c','rounding-mode.h']):
+        self.ignoreList=ignoreList
+        line=self.readLine()
+        while not line in [None]:
+            if line !="":
+                self.traceOut.write(line+"\n")
+            line=self.readLine()
+
+
+
+class cmpTools:
+    def __init__(self, pid1, pid2):
+        self.bbInfo1=bbInfoReader("trace_bb_info.log-"+str(pid1))
+        self.bbInfo2=bbInfoReader("trace_bb_info.log-"+str(pid2))
+
+        self.bbInfo1.print()
+        self.bbInfo2.print()
+        self.trace1=open("trace_bb_trace.log-"+str(pid1))
+        self.trace2=open("trace_bb_trace.log-"+str(pid2))
+        self.context=2
+        self.bufferContext=[None]*self.context
+
+
+    def writeLines(self, addr1,num1,addr2,num2):
+        toPrint1=self.bbInfo1.getStrToPrint(addr1)
+        toPrint2=self.bbInfo2.getStrToPrint(addr2)
+        if addr1==addr2:
+            resLine= "num: "+ str(num1)+"/"+str(num2) + " == " + addr1+ "\t"
+            if(toPrint1==toPrint2):
+                resLine+= toPrint1
+            else:
+                if self.bbInfo1.isCorrupted(addr1) and self.bbInfo2.isCorrupted(addr2):
+                    print("corrupted \n")
+                    print('toPrint1:',toPrint1)
+                    print('toPrint2:',toPrint2)
+                else:
+                    print("Serious problem")
+                    sys.exit()
+
+        if addr1!=addr2:
+            resLine= "num: "+ str(num1)+"/"+str(num2)+" " + addr1+" != " + addr2+ "\n" +toPrint1 +"\n"+toPrint2
+        print(resLine)
+
+    def printContext(self):
+        for i in range(self.context):
+            buffer=self.bufferContext[self.context-i-1]
+            if buffer !=None:
+                (addr1,lineNum1, addr2, lineNum2)=buffer
+                self.writeLines(addr1,lineNum1, addr2, lineNum2)
+
+    def readUntilDiffer(self, ignoreList=[]):
+        self.ignoreList=ignoreList
+        addr1, addr2=("","")
+        lineNum1,lineNum2=(0,0)
+#        lineNumInc1, lineNumInc2=(0,0)
+        while addr1==addr2:
+            self.bufferContext=[(addr1,lineNum1, addr2, lineNum2)]+self.bufferContext[0:-1]
+            addr1,lineNumInc1=self.read(self.trace1,self.bbInfo1)
+            addr2,lineNumInc2=self.read(self.trace2,self.bbInfo2)
+            lineNum1+=lineNumInc1
+            lineNum2+=lineNumInc2
+
+            if lineNum1 % 1000 ==0:
+                print( "lineNum1: ", lineNum1)
+
+        self.printContext()
+        self.writeLines( addr1, lineNum1, addr2,lineNum2)
+        #        print(self.compressMarks(self.data["587F00C0"]))
+
+    def read(self, traceFile, bbInfo):
+        addr=traceFile.readline().strip()
+        counter=1
+        while not addr in ["", None]:
+            if not (bbInfo.addrToIgnore(addr, self.ignoreList ) or bbInfo.isCorrupted(addr)):
+                return (addr,counter)
+            addr=traceFile.readline().strip()
+            counter+=1
+        return (None,counter)
+
+
+class covReader:
+    def __init__(self,pid, rep):
+        self.pid=pid
+        self.rep=rep
+        self.bbInfo=bbInfoReader(self.rep+"/trace_bb_info.log-"+str(pid))
+        covFile=openGz(self.rep+"/trace_bb_cov.log-"+str(pid))
+
+        self.cov=self.readCov(covFile)
+
+    def readCov(self, cov):
+        res=[]
+        currentNumber=-1
+        dictRes={}
+        while True:
+            line=cov.readline()
+            if line in [None,""]:
+                if currentNumber!=-1:
+                    res+=[dictRes]
+                break
+            if line=="cover-"+str(currentNumber+1)+"\n":
+                if currentNumber!=-1:
+                    res+=[dictRes]
+                currentNumber+=1
+                dictRes={}
+                continue
+            (index,sep, num)=(line).strip().partition(":")
+            dictRes[index]=int(num)
+
+        return res
+
+    def writePartialCover(self,filenamePrefix=""):
+
+        for num in range(len(self.cov)):
+            resTab=[(index,num,self.bbInfo.getListOfSym(index),self.bbInfo.compressMarksWithoutSym(index)) for index,num in self.cov[num].items() ]
+            resTab.sort( key= itemgetter(2,3,0)) # 2 sym  3 compress string 0 index
+
+            handler=openGz(self.rep+"/"+filenamePrefix+"cover"+str(num)+"-"+str(self.pid),"w")
+            for (index,count,sym, strBB) in resTab:
+                handler.write("%d\t: %s\n"%(count,strBB))
+
+
+class covMerge:
+    """Class CovMerge allows to merge covers with counter of (success/failure) x (equal cover, diff cover).
+    The method writePartialCover will compute from this counter a correlation coefficient for each covered bb"""
+
+    def __init__(self, covRef):
+        self.covRef=covRef
+        print("covMerged with reference : %s"%(self.covRef.rep))
+        self.init()
+
+    def initCounterGlobal(self):
+        self.success=0
+        self.fail=0
+
+    def incCounterGlobal(self,status):
+        if status:
+            self.success+=1
+        else:
+            self.fail+=1
+
+    def initCounterLine(self):
+        return (0,self.fail,0,self.success) #NbFailDiff, NbFailEqual, NbSuccessDiff, NbSuccessEqual
+    def incCounterLine(self, counter, equalCounter, status):
+        diff=0
+        equal=0
+        if equalCounter:
+            equal=1
+        else:
+            diff=1
+
+        if status:
+            return (counter[0], counter[1] , counter[2]+diff, counter[3]+equal)
+        else:
+            return (counter[0]+diff, counter[1]+equal, counter[2], counter[3])
+
+    def init(self):
+        #init global counter
+        self.initCounterGlobal()
+        #load the first cover
+        self.covMerged=[]
+        for num in range(len(self.covRef.cov)): #loop over snapshots
+            mergeDict={}
+            for index,num in self.covRef.cov[num].items(): #loop over basic-bloc
+                #get basic-bloc information
+                sym=self.covRef.bbInfo.getListOfSym(index)[0]
+                strLine=self.covRef.bbInfo.compressMarksWithoutSym(index)
+                if not (sym,strLine) in mergeDict:
+                    mergeDict[(sym,strLine)]=(num,self.initCounterLine())#bb not yet seen
+                else:
+                    mergeDict[(sym,strLine)]=(mergeDict[(sym,strLine)][0]+num,self.initCounterLine()) #bb already seen
+
+            self.covMerged+=[mergeDict]
+
+
+    def addMerge(self, cov, status):
+
+        if len(cov.cov) != len(self.covRef.cov):
+            print("addMerge : problem with the number of sync point")
+            sys.exit()
+        for num in range(len(cov.cov)): #loop over snapshots
+
+            #use intermediate resDic to collapse bb with the same couple sym,strLine
+            resDic={}
+            for index,numLine in cov.cov[num].items():
+                sym=cov.bbInfo.getListOfSym(index)[0]
+                strLine=cov.bbInfo.compressMarksWithoutSym(index)
+                if not (sym,strLine) in resDic:
+                    resDic[(sym,strLine)]=numLine
+                else:
+                    resDic[(sym,strLine)]+=numLine
+            #loop over collapse bb
+            for ((sym, strLine), numLine) in resDic.items():
+                if (sym,strLine)in self.covMerged[num]:
+                    merged=self.covMerged[num][(sym,strLine)]
+                    self.covMerged[num][(sym,strLine)]=(merged[0], self.incCounterLine(merged[1], merged[0]==numLine,status))
+                else:
+                    merged=(0, self.incCounterLine(self.initCounterLine(), False,status))
+                    self.covMerged[num][(sym,strLine)]=merged
+            #loop over bb not seen in this run
+            for ((sym,strLine),merged) in self.covMerged[num].items():
+                if not (sym,strLine)  in resDic:
+                    self.covMerged[num][(sym,strLine)]=(merged[0], self.incCounterLine(merged[1], False,status))
+        #update the global counter
+        self.incCounterGlobal(status)
+
+
+    def indicatorFromCounter(self, localCounter, name=""):
+        nbFailDiff=   localCounter[0]
+        nbFailEqual=  localCounter[1]
+        nbSuccessDiff=localCounter[2]
+        nbSuccessEqual=localCounter[3]
+
+        nbSuccess=self.success
+        nbFail=   self.fail
+
+        if nbFailDiff +nbFailEqual !=nbFail:
+            print("Assert Fail error")
+            print("nbFail:",nbFail)
+            print("nbFailDiff:",nbFailDiff)
+            print("nbFailEqual:",nbFailEqual)
+            return None
+
+
+        if nbSuccessDiff +nbSuccessEqual !=nbSuccess:
+            print("Assert Success error")
+            print("nbSuccess:",nbSuccess)
+            print("nbSuccessDiff:",nbSuccessDiff)
+            print("nbSuccessEqual:",nbSuccessEqual)
+            return None
+
+
+        if name=="standard":
+            return float(nbFailDiff + nbSuccessEqual)/ float(nbSuccess+nbFail)
+
+        if name=="biased":
+            return 0.5* (float(nbFailDiff)/ float(nbFail) + float(nbSuccessEqual)/float(nbSuccess))
+
+        return None
+
+
+
+    def writePartialCover(self,rep=".",filenamePrefix="", typeIndicator="standard"):
+
+        for num in range(len(self.covMerged)): #loop over snapshots
+            maxIndicator=0
+            handler=openGz(rep+"/"+filenamePrefix+"coverMerged"+str(num),"w")
+            partialCovMerged=self.covMerged[num]
+
+            resTab=[(sym, strLine,
+                     partialCovMerged[(sym,strLine)][0],
+                     self.indicatorFromCounter(partialCovMerged[(sym,strLine)][1], typeIndicator ) ) for ((sym,strLine),counter) in partialCovMerged.items() ]
+
+            resTab.sort( key= itemgetter(0,1)) # 2 sym  3 compress string 0 index
+            for i in range(len(resTab)): #loop over sorted cover item
+                sym,strLine,numLineRef,indicator=resTab[i]
+                if indicator==None:
+                    print("resTab[i]:",resTab[i])
+                    print("partialCovMerged:",partialCovMerged[(sym,strLine)])
+                    handler.write("none\t: %s\n"%(strLine))
+                else:
+                    maxIndicator=max(maxIndicator,indicator)
+                    handler.write("%.2f\t: %s\n"%(indicator,strLine))
+
+            print("Num: ", num , "\tMaxindicator: ", maxIndicator)
+
+class statusReader:
+    """Class to provide the status of a run"""
+    # should maybe be a function instead of a class
+    def __init__(self,pid, rep):
+        self.pid=pid
+        self.rep=rep
+        self.isSuccess=None
+        self.read()
+
+    def read(self):
+        pathName=os.path.join(self.rep, "returnVal")
+        if os.path.exists(pathName):
+            try:
+                value=int(open(pathName).readline().strip())
+                if value==0:
+                    self.isSuccess=True
+                else:
+                    self.isSuccess=False
+            except:
+                print("Error while  reading "+pathName )
+                self.isSuccess=None
+        else:
+            if self.rep.endswith("ref"):
+                print("Consider ref as a success")
+                self.isSuccess=True
+            else:
+                self.isSuccess=None
+
+    def getStatus(self):
+        return self.isSuccess
+
+
+class cmpToolsCov:
+    """Class to write partial cover of several executions :
+    with writePartialCover the object write a partial cover for each execution
+    with mergedCov the object write one merged partial cover with correlation information"""
+
+    def __init__(self, tabPidRep):
+        self.tabPidRep=tabPidRep
+
+
+    def writePartialCover(self, filenamePrefix=""):
+        """Write partial cover for each execution (defined by a tab of pid)"""
+        for i in range(len(self.tabPidRep)):
+            pid,rep=self.tabPidRep[i]
+            cov=covReader(pid,rep)
+            cov.writePartialCover(filenamePrefix)
+
+    # def writeStatus(self):
+    #     for i in range(len(self.tabPidRep)):
+    #         pid,rep=self.tabPidRep[i]
+    #         status=statusReader(pid,rep)
+    #         success=status.getStatus()
+    #         print( rep+":" + str(success))
+
+    def countStatus(self):
+        """ Count the number of Success/Fail"""
+        nbSuccess=0
+        nbFail=0
+        listPidRepoIgnore=[]
+        for i in range(len(self.tabPidRep)):
+            pid,rep=self.tabPidRep[i]
+            status=statusReader(pid,rep)
+            success=status.getStatus()
+            if success==None:
+                listPidRepoIgnore+=[(pid,rep)]
+            else:
+                if success:
+                    nbSuccess+=1
+                else:
+                    nbFail+=1
+        for (pid,rep) in listPidRepoIgnore:
+            print("directory ignored : "+rep)
+            self.tabPidRep.remove((pid,rep))
+
+        return (nbSuccess, nbFail)
+
+
+    def findRef(self, pattern="ref", optionalPattern=None):
+        "return the index of the reference (required for correlation)"
+        if optionalPattern!=None:
+            for index in range(len(self.tabPidRep)):
+                (pid,rep)=self.tabPidRep[index]
+                status=statusReader(pid,rep)
+                success=status.getStatus()
+                if rep.endswith(pattern) and optionalPattern in rep and success:
+                    return index
+            print('Optional failed')
+        for index in range(len(self.tabPidRep)):
+            (pid,rep)=self.tabPidRep[index]
+            status=statusReader(pid,rep)
+            success=status.getStatus()
+            if rep.endswith(pattern) and success:
+                return index
+        print("Warning : pattern not found" )
+        print("Switch to first Success reference selection")
+        for index in range(len(self.tabPidRep)):
+            pid,rep=self.tabPidRep[index]
+            status=statusReader(pid,rep)
+            success=status.getStatus()
+            if success:
+                return index
+        print("Error fail only : cmpToolsCov is ineffective" )
+        sys.exit(42)
+
+    def writeMergedCov(self):
+        """Write merged Cov with correlation indice  between coverage difference and sucess/failure status"""
+
+        #check the presence of success and failure
+        (nbSuccess, nbFail)=self.countStatus()
+        print("NbSuccess: %d \t nbFail %d"%(nbSuccess,nbFail))
+        if nbFail==0 or nbSuccess==0:
+            print("mergeCov need Success/Fail partition")
+            sys.exit()
+
+        #get the refIndex
+        refIndex=self.findRef(pattern="ref", optionalPattern="dd.line/ref")
+
+        covMerged= covMerge(covReader(*self.tabPidRep[refIndex]))
+        #Loop with addMerge to reduce memory peak
+
+        printIndex=[int(float(p) * len(self.tabPidRep) /100.)  for p in (list(range(0,100,10))+[1,5])]
+        printIndex +=[1,  len(self.tabPidRep)-1]
+
+        for i in range(len(self.tabPidRep)):
+            if i==refIndex:
+                continue
+            pid,rep=self.tabPidRep[i]
+            covMerged.addMerge(covReader(pid,rep), statusReader(pid,rep).getStatus())
+            if i in printIndex:
+                pourcent=float(i+1)/ float(len(self.tabPidRep)-1)
+                if i >=refIndex:
+                    pourcent=float(i)/ float(len(self.tabPidRep)-1)
+                print( "%.1f"%(pourcent*100)    +"% of coverage data merged")
+        #covMerged.writePartialCover(typeIndicator="standard")
+        covMerged.writePartialCover(typeIndicator="biased")
+
+
+
+def extractPidRep(fileName):
+    """extract the pid d'un fichier de la form trace_bb_cov.log-PID[.gz]"""
+    rep=os.path.dirname(fileName)
+    if rep=="":
+        rep="."
+    baseFile=os.path.basename(fileName)
+    begin="trace_bb_cov.log-"
+    if baseFile.startswith(begin):
+        pid=int((baseFile.replace(begin,'')).replace(".gz",""))
+        return (pid,rep)
+    return None
+
+def selectPidFromFile(fileNameTab):
+    """Return a list of pid from a list of file by using extractPidRep"""
+    return [extractPidRep(fileName)  for fileName in fileNameTab]
+
+
+
+def usage():
+    print ("Usage: genCov.py [--help] [--genCov] [--genCovCorrelation]  file list of type trace_bb_cov.log* " )
+
+if __name__=="__main__":
+    options=[arg for arg in sys.argv[1:]]
+
+    if "--help" in options:
+        usage()
+        sys.exit()
+
+    #cover
+    genCov=False
+    if "--genCov" in options:
+        options.remove("--genCov")
+        genCov=True
+
+    #cover with correlation
+    genCovCorrelation=False
+    if "--genCovCorrelation" in options:
+        options.remove("--genCovCorrelation")
+        genCovCorrelation=True
+
+    #default configuartion : genCov
+    if not (genCovCorrelation or genCov):
+        genCov=True
+
+    #select default
+    optionName="--select-default"
+    selectOptions=list(filter(lambda x: x.startswith(optionName),options))
+    for option in selectOptions:
+        options.remove(option)
+
+    selectOption=False
+    if len(selectOptions)>=1:
+        selectOption=selectOptions[-1]
+        selectOption=selectOption.replace(optionName,"")
+        if selectOption.startswith("="):
+            selectOption=int(selectOption[1:])
+        if selectOption=="":
+            selectOption=True
+
+
+    addFile=[]
+    if selectOption!=False:
+        import pathlib
+        addFile=[str(x) for x in pathlib.Path(".").rglob("trace_bb_cov.log*")]
+
+        if selectOption!=True:
+            addFile=addFile[0:selectOption]
+    for option in options:
+        if option not in addFile:
+            addFile+=[option]
+    listOfFile=addFile
+
+    if len(listOfFile)<1 and genCov:
+        usage()
+        print("--genCov required at least 1 argument")
+        sys.exit()
+    if len(listOfFile)<2 and genCovCorrelation:
+        usage()
+        print("--genCovCorrelation required at least 2 arguments")
+        sys.exit()
+
+    cmp=cmpToolsCov(selectPidFromFile(listOfFile))
+    if genCov:
+        cmp.writePartialCover()
+    if genCovCorrelation:
+        cmp.writeMergedCov()
diff --git a/verrou/pyTools/dd_config.py b/verrou/pyTools/dd_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f8fcc113b13b8aea22d1585a0bb38611f464519
--- /dev/null
+++ b/verrou/pyTools/dd_config.py
@@ -0,0 +1,179 @@
+import os
+import math
+import sys
+
+def exponentialRange(nbRun):
+    tab=[int(nbRun / (2**i)) for i in range(1+int(math.floor(math.log(nbRun,2)))) ]
+    tab.reverse()
+    return tab
+
+
+class ddConfig:
+
+    def __init__(self, argv, environ,config_keys=["INTERFLOP"]):
+        self.defaultValue()
+        self.config_keys=config_keys
+        self.parseArgv(argv)
+        for config_key in self.config_keys:
+            self.read_environ(environ, config_key)
+
+    def defaultValue(self):
+        self.nbRUN=5
+        self.maxNbPROC=None
+        self.ddAlgo="rddmin"
+        self.rddminVariant="d"
+        self.param_rddmin_tab="exp"
+        self.param_dicho_tab="half"
+        self.splitGranularity=2
+        self.ddSym=False
+        self.ddQuiet=False
+
+    def parseArgv(self,argv):
+        if "-h" in argv or "--help" in argv:
+            print(self.get_EnvDoc(self.config_keys[-1]))
+            self.failure()
+
+        if len(argv)!=3:
+            self.usageCmd()
+            self.failure()
+
+        self.runScript=self.checkScriptPath(argv[1])
+        self.cmpScript=self.checkScriptPath(argv[2])
+
+    def usageCmd(self):
+        print("Usage: "+ sys.argv[0] + " runScript cmpScript")
+
+    def failure(self):
+        sys.exit(42)
+
+    def checkScriptPath(self,fpath):
+        if os.path.isfile(fpath) and os.access(fpath, os.X_OK):
+            return os.path.abspath(fpath)
+        else:
+            print("Invalid Cmd:"+str(sys.argv))
+            print(fpath + " should be executable")
+            self.usageCmd()
+            self.failure()
+
+    def get_runScript(self):
+        return self.runScript
+
+    def get_cmpScript(self):
+        return self.cmpScript
+
+    def read_environ(self,environ, PREFIX):
+        self.environ=environ #configuration to prepare the call to readOneOption
+        self.PREFIX=PREFIX
+        self.readOneOption("nbRUN", "int", "DD_NRUNS")
+        self.readOneOption("maxNbPROC", "int", "DD_NUM_THREADS")
+        if self.maxNbPROC!=None:
+            if self.maxNbPROC < self.nbRUN:
+                print("Due due implementation limitation (nbRun <=maxNbPROC or maxNbPROC=1): maxNbPROC unset\n")
+                self.maxNbPROC=None
+
+        self.readOneOption("ddAlgo", "string", "DD_ALGO", ["ddmax", "rddmin"])
+
+        self.readOneOption("rddminVariant", "string","DD_RDDMIN", ["s", "stoch", "d", "dicho", "", "strict"])
+        if self.rddminVariant=="stoch":
+            self.rddminVariant="s"
+        if self.rddminVariant=="dicho":
+            self.rddminVariant="d"
+        if self.rddminVariant=="strict":
+            self.rddminVariant=""
+
+        self.readOneOption("param_rddmin_tab", "string", "DD_RDDMIN_TAB", ["exp", "all", "single"])
+        self.readOneOption("param_dicho_tab", "int/string", "DD_DICHO_TAB" , ["exp", "all", "half", "single"])
+        self.readOneOption("splitGranularity", "int", "DD_DICHO_GRANULARITY")
+        self.readOneOption("ddSym", "bool", "DD_SYM")
+        self.readOneOption("ddQuiet", "bool", "DD_QUIET")
+
+    def readOneOption(self,attribut,conv_type ,key_name, acceptedValue=None):            
+        value=False
+        try:
+            if conv_type=="int":
+                value = int(self.environ[self.PREFIX+"_"+key_name])
+            else:
+                value = self.environ[self.PREFIX+"_"+key_name]                    
+
+            if conv_type=="bool":
+                value=True
+
+            if acceptedValue==None :
+                exec("self."+attribut+"= value")
+            else:
+                if value in acceptedValue:
+                    exec("self."+attribut+"= value")
+                elif conv_type=="string/int":
+                    try:
+                        exec("self."+attribut+"= int(value)")
+                    except:
+                        print("Error : "+ self.PREFIX+"_"+key_name+ " should be in "+str(acceptedValue) +" or be a int value")
+                else:
+                    print("Error : "+ self.PREFIX+"_"+key_name+ " should be in "+str(acceptedValue))
+                    self.failure()
+            exec("self."+attribut+"= value")
+        except KeyError:
+            pass
+
+
+    def get_SymOrLine(self):
+        if self.ddSym:
+            return "sym"
+        else:
+            return "line"
+
+    def get_splitGranularity(self):
+        return self.splitGranularity
+
+    def get_ddAlgo(self):
+        if self.ddAlgo.endswith("rddmin"):
+            return self.rddminVariant+self.ddAlgo
+        return self.ddAlgo
+
+    def get_maxNbPROC(self):
+        return self.maxNbPROC
+
+    def get_nbRUN(self):
+        return self.nbRUN
+
+    def get_quiet(self):
+        return self.ddQuiet
+
+    def get_rddMinTab(self):
+        rddMinTab=None
+        if self.param_rddmin_tab=="exp":
+            rddMinTab=exponentialRange(self.nbRUN)
+        if self.param_rddmin_tab=="all":
+            rddMinTab=range(1,self.nbRUN+1)
+        if self.param_rddmin_tab=="single":
+            rddMinTab=[self.nbRUN]
+        return rddMinTab
+
+    def get_splitTab(self):
+        splitTab=None
+        if self.param_dicho_tab=="exp":
+            splitTab=exponentialRange(self.nbRUN)
+        if self.param_dicho_tab=="all":
+            splitTab=range(self.nbRUN)
+        if self.param_dicho_tab=="single":
+            splitTab=[self.nbRUN]
+        if self.param_dicho_tab=="half":
+            splitTab=[ int(math.ceil(self.nbRUN / 2.))]
+        if self.param_dicho_tab in [str(i) for i in range(1, self.nbRUN+1) ]:
+            splitTab=[self.param_dicho_tab]
+        return splitTab
+
+
+    def get_EnvDoc(self,PREFIX="INTERFLOP"):
+        doc="""List of en variable :
+        PREFIXENV_DD_NRUNS : int (default:5)
+        PREFIXENV_DD_NUM_THREADS : int (default None)
+        PREFIXENV_DD_ALGO : in ["ddmax", "rddmin"] (default "rddmin")
+        PREFIXENV_DD_RDDMIN : in ["s", "stoch", "dicho" ,"d", "strict",""] (default "d")
+        PREFIXENV_DD_RDDMIN_TAB : in ["exp", "all" ,"single"] (default "exp")
+        PREFIXENV_DD_DICHO_TAB : in ["exp", "all" ,"single", "half"] or int (default "half")
+        PREFIXENV_DD_DICHO_GRANULARITY : int
+        PREFIXENV_DD_QUIET : set or not (default not)
+        PREFIXENV_DD_SYM : set or not (default not)
+        """
+        return doc.replace("PREFIXENV_",PREFIX+"_")
diff --git a/verrou/pyTools/verrou_dd_line b/verrou/pyTools/verrou_dd_line
new file mode 100755
index 0000000000000000000000000000000000000000..e504b09c8c62a14e60c3bda78a7ec03ab9ca1931
--- /dev/null
+++ b/verrou/pyTools/verrou_dd_line
@@ -0,0 +1,43 @@
+#!/bin/sh
+''''exec python3 -u "$0" "$@" #'''
+# This hack is an ugly but portable alternative to #!/usr/bin/env -S python3 -u
+
+
+import sys
+import os
+from valgrind import dd_config
+from valgrind import DD_stoch
+from valgrind import DD_exec_stat
+
+
+class DDline(DD_stoch.DDStoch):
+    def __init__(self, config, prefix="dd.line"):
+        DD_stoch.DDStoch.__init__(self, config, prefix)
+
+    def referenceRunEnv(self):
+        return {"VERROU_ROUNDING_MODE": "nearest",
+                "VERROU_MCA_MODE": "ieee",
+                "VERROU_GEN_SOURCE":   os.path.join(self.ref_,"dd.line.%p")}
+
+    def isFileValidToMerge(self, name):
+        return name.startswith("dd.line.")
+    
+    def getDeltaFileName(self):
+        return "dd.line"
+
+    def sampleRunEnv(self,dirName):
+        return {"VERROU_SOURCE": os.path.join(dirName,self.getDeltaFileName() +".include")}
+
+    def coerce(self, delta_config):
+        return  "\n  " + "\n  ".join(["%s:%d (%s)" % e for e in
+                                      [(col[0], int(col[1]), col[2]) for col in
+                                       [(l.strip()+"\t\t").split("\t") for l in delta_config]]])
+
+
+
+if __name__ == "__main__":
+    et=DD_exec_stat.exec_stat("dd.line")
+    config=dd_config.ddConfig(sys.argv,os.environ, ["INTERFLOP","VERROU"])
+    dd = DDline(config)
+    dd.run()
+    et.terminate()
diff --git a/verrou/pyTools/verrou_dd_sym b/verrou/pyTools/verrou_dd_sym
new file mode 100755
index 0000000000000000000000000000000000000000..68dad2a89007c5f3ba57f5608390cd1568a737fe
--- /dev/null
+++ b/verrou/pyTools/verrou_dd_sym
@@ -0,0 +1,46 @@
+#!/bin/sh
+''''exec python3 -u "$0" "$@" #'''
+# This hack is an ugly but portable alternative to #!/usr/bin/env -S python3 -u
+
+
+import sys
+import os
+from valgrind import dd_config
+from valgrind import DD_stoch
+from valgrind import DD_exec_stat
+
+
+class DDsym(DD_stoch.DDStoch):
+    def __init__(self, config, prefix="dd.sym"):
+        DD_stoch.DDStoch.__init__(self, config, prefix)
+
+    def referenceRunEnv(self):
+        return {"VERROU_ROUNDING_MODE": "nearest",
+                         "VERROU_MCA_MODE": "ieee",
+                         "VERROU_GEN_EXCLUDE":   os.path.join(self.ref_,"dd.sym.%p")}
+
+    def isFileValidToMerge(self, name):
+        return name.startswith("dd.sym.")
+
+    def getDeltaFileName(self):
+        return "dd.sym"
+
+    def sampleRunEnv(self,dirName):
+        return {"VERROU_EXCLUDE": os.path.join(dirName, self.getDeltaFileName() +".exclude")}
+
+    def coerce(self, delta_config):
+        return "\n  " + "  ".join(delta_config)
+
+
+
+
+
+
+
+
+if __name__ == "__main__":
+    et=DD_exec_stat.exec_stat("dd.sym")
+    config=dd_config.ddConfig(sys.argv,os.environ, ["INTERFLOP","VERROU"])
+    dd = DDsym(config)
+    dd.run()
+    et.terminate()
diff --git a/verrou/release.md b/verrou/release.md
new file mode 100644
index 0000000000000000000000000000000000000000..7e2a5c3c5edd866ee28974e8f4bc90513d20c399
--- /dev/null
+++ b/verrou/release.md
@@ -0,0 +1,140 @@
+# Notes about the release process
+
+```
+VERSION=2.2.0
+```
+
+## Update the valgrind patch
+
+```
+perl -pni -e "s/verrou-dev/verrou-${VERSION}/" valgrind.diff
+```
+
+## Update README.md
+
+> **NB:** This is released version X.Y.Z of Verrou, based on Valgrind
+> vX.Y.Z. The development version of Verrou can always be found in
+> the [`master`](https://github.com/edf-hpc/verrou/) branch. For other versions,
+> please consult the list
+> of [releases](https://github.com/edf-hpc/verrou/releases).
+
+
+## Update release notes
+
+- Valgrind version
+- Added features
+- Other changes
+
+- update the Verrou version + release date
+
+    ```
+    perl -pni -e "s/\[UNRELEASED\]/v${VERSION} - $(date +%Y-%m-%d)/" CHANGELOG.md
+    ```
+
+## Update CLO
+
+```
+(cd .. && ./verrou/docs/update-vr-clo)
+git add vr_clo.txt
+```
+
+## Commit in a release branch
+
+1. Prepare the commit
+
+    ```
+    git checkout -b release
+    git add -- CHANGELOG.md README.md valgrind.diff
+    git commit -m "Release v${VERSION}
+      $(perl -n                        \
+        -e '$q=1 if m/^\-\-\-/;'       \
+        -e 'print if ($p and not $q);' \
+        -e '$p=1 if m/^## /;'          \
+        CHANGELOG.md)"
+    git push origin release
+    ```
+
+2. Wait for Travis tests to run
+
+3. Tag and delete the `release` branch
+
+    ```
+    git tag v${VERSION}
+    git push origin v${VERSION}
+    ```
+
+
+## Merge the released version back in the master branch
+
+
+1. Prepare a merge in the `master` branch
+
+    ```
+    git checkout master
+    git merge --no-ff --no-commit v${VERSION}
+    git reset HEAD README.md valgrind.diff
+    git checkout -- README.md valgrind.diff
+    ```
+
+2. Add an `[UNRELEASED]` section in `Changelog.md`:
+
+    > ## [UNRELEASED]
+    > 
+    > This version is based on Valgrind-3.15.0.
+    > 
+    > ### Added
+    > 
+    > 
+    > ### Changed
+    > 
+    > 
+    > ---
+    > 
+
+3. Commit the merge
+
+    ```
+    git commit -m "Post-release v${VERSION}"
+    git push origin master
+    git branch -d release
+    git push origin :release
+    ```
+
+## Add a release in github
+
+
+- Add a release message extracted from CHANGELOG.md
+
+- Build a tgz archive for the full valgrind+verrou release
+    
+    ```
+    VALGRIND=valgrind-3.15.0
+    cd /tmp
+    wget https://github.com/edf-hpc/verrou/releases/download/valgrind/${VALGRIND}.tar.bz2
+    tar xvpf ${VALGRIND}.tar.bz2
+    mv ${VALGRIND} ${VALGRIND}+verrou-${VERSION}
+    cd ${VALGRIND}+verrou-${VERSION}
+    git clone --branch=v${VERSION} --single-branch https://github.com/edf-hpc/verrou
+    rm -rf verrou/.git
+    patch -p1 <verrou/valgrind.diff
+    cd ..
+    tar cvzf ${VALGRIND}_verrou-${VERSION}.tar.gz ${VALGRIND}+verrou-${VERSION}
+    ```
+
+- Test the archive
+
+    ```
+    cd ${VALGRIND}+verrou-${VERSION}
+    ./autogen.sh
+    ./configure --enable-only64bit --enable-verrou-fma --prefix=$PWD/install
+    make -j4 install
+    source install/env.sh && valgrind --version
+    make -C tests check && make -C verrou check && perl tests/vg_regtest verrou
+    make -C verrou/unitTest
+    ```
+
+## Update the documentation
+
+```
+./verrou/docs/update-docs
+```
diff --git a/verrou/tests/Makefile.am b/verrou/tests/Makefile.am
new file mode 100644
index 0000000000000000000000000000000000000000..ad879997e7302c53d3483433fa0fca724c70ca77
--- /dev/null
+++ b/verrou/tests/Makefile.am
@@ -0,0 +1,25 @@
+
+include $(top_srcdir)/Makefile.tool-tests.am
+
+SUBDIRS = .
+DIST_SUBDIRS = .
+
+dist_noinst_SCRIPTS = filter_stderr
+
+EXTRA_DIST = \
+	clreq.vgtest      clreq.stderr.exp      clreq.stdout.exp \
+	sum1.vgtest       sum1.stderr.exp       sum1.stdout.exp  \
+	sum2.vgtest       sum2.stderr.exp       sum2.stdout.exp  \
+	sum3.vgtest       sum3.stderr.exp       sum3.stdout.exp  \
+	seed.vgtest       seed.stderr.exp       seed.stdout.exp  \
+	naninf-rnd.vgtest naninf-rnd.stderr.exp naninf-rnd.stdout.exp  \
+	accuClreq1.vgtest accuClreq1.stderr.exp accuClreq1.stdout.exp  \
+	accuClreq2.vgtest accuClreq2.stderr.exp accuClreq2.stdout.exp
+
+check_PROGRAMS = clreq accuClreq sum
+
+check_PROGRAMS += naninf
+naninf_SOURCES = naninf.cxx
+
+AM_CFLAGS   += $(AM_FLAG_M3264_PRI)
+AM_CXXFLAGS += $(AM_FLAG_M3264_PRI) -std=c++11
diff --git a/verrou/tests/accuClreq.c b/verrou/tests/accuClreq.c
new file mode 100644
index 0000000000000000000000000000000000000000..486617e35d99c3e20c5ac9b8d4f598d95bff992f
--- /dev/null
+++ b/verrou/tests/accuClreq.c
@@ -0,0 +1,38 @@
+#include "../verrou.h"
+#include <stdio.h>
+
+float compute () {
+  float res=0;
+  int i;
+  for(i=0; i< 1000; i++){
+    res+=(float)0.1;
+  }
+  return res-100 ;
+}
+
+int main () {
+  
+
+  float res_init=compute();
+  // Uninstrumented part
+  VERROU_STOP_INSTRUMENTATION;
+  float res_uninst=compute();
+  VERROU_START_INSTRUMENTATION;
+
+  float res_end=compute();
+  
+  printf ("%f %f %f\n", res_init, res_uninst, res_end);
+
+    
+  if (res_init == res_uninst || res_init==res_end ){    
+    printf ("OK\n");
+  }else{
+    printf( "KO\n");
+  }
+  if(res_end != res_uninst){
+    printf("OK\n");
+  }else{
+    printf( "KO\n");
+  }
+  return RUNNING_ON_VALGRIND;
+}
diff --git a/verrou/tests/accuClreq1.stderr.exp b/verrou/tests/accuClreq1.stderr.exp
new file mode 100644
index 0000000000000000000000000000000000000000..d3ec9ac2114fd0231f9d8abc629c99584e6ec576
--- /dev/null
+++ b/verrou/tests/accuClreq1.stderr.exp
@@ -0,0 +1,34 @@
+Verrou, Check floating-point rounding errors
+Copyright (C) 2014-2016, F. Fevotte & B. Lathuiliere.
+
+First seed : XXX
+Backend verrou : test-version
+Backend mcaquad : test-version
+Instrumented operations :
+	add : yes
+	sub : yes
+	mul : yes
+	div : yes
+	mAdd : yes
+	mSub : yes
+	cmp : no
+	conv : yes
+	max : no
+	min : no
+Instrumented scalar operations : no
+Backend verrou simulating UPWARD rounding mode
+
+ ---------------------------------------------------------------------
+ Operation                            Instruction count
+  `- Precision
+      `- Vectorization          Total             Instrumented
+ ---------------------------------------------------------------------
+ add                     3000                     2000          ( 67%)
+  `- flt                     3000                     2000      ( 67%)
+      `- llo                     3000                     2000  ( 67%)
+ ---------------------------------------------------------------------
+ sub                        3                        2          ( 67%)
+  `- flt                        3                        2      ( 67%)
+      `- llo                        3                        2  ( 67%)
+ ---------------------------------------------------------------------
+ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
diff --git a/verrou/tests/accuClreq1.stdout.exp b/verrou/tests/accuClreq1.stdout.exp
new file mode 100644
index 0000000000000000000000000000000000000000..66e1c2dcf5e2c3b21c9171bc698a0421b47c01be
--- /dev/null
+++ b/verrou/tests/accuClreq1.stdout.exp
@@ -0,0 +1,3 @@
+0.003044 -0.000954 0.003044
+OK
+OK
diff --git a/verrou/tests/accuClreq1.vgtest b/verrou/tests/accuClreq1.vgtest
new file mode 100644
index 0000000000000000000000000000000000000000..707a99ec168e874544ada30cab31c950fe0af631
--- /dev/null
+++ b/verrou/tests/accuClreq1.vgtest
@@ -0,0 +1,4 @@
+prog: accuClreq
+args: 1
+vgopts: --rounding-mode=upward
+stderr_filter_args: -seed -cnt-cmp-conv -backend-version
diff --git a/verrou/tests/accuClreq2.stderr.exp b/verrou/tests/accuClreq2.stderr.exp
new file mode 100644
index 0000000000000000000000000000000000000000..dd6cebee9704d77895f6a85a11d57dbefd5f0c3b
--- /dev/null
+++ b/verrou/tests/accuClreq2.stderr.exp
@@ -0,0 +1,34 @@
+Verrou, Check floating-point rounding errors
+Copyright (C) 2014-2016, F. Fevotte & B. Lathuiliere.
+
+First seed : XXX
+Backend verrou : test-version
+Backend mcaquad : test-version
+Instrumented operations :
+	add : yes
+	sub : yes
+	mul : yes
+	div : yes
+	mAdd : yes
+	mSub : yes
+	cmp : no
+	conv : yes
+	max : no
+	min : no
+Instrumented scalar operations : no
+Backend verrou simulating UPWARD rounding mode
+
+ ---------------------------------------------------------------------
+ Operation                            Instruction count
+  `- Precision
+      `- Vectorization          Total             Instrumented
+ ---------------------------------------------------------------------
+ add                     3000                     1000          ( 33%)
+  `- flt                     3000                     1000      ( 33%)
+      `- llo                     3000                     1000  ( 33%)
+ ---------------------------------------------------------------------
+ sub                        3                        1          ( 33%)
+  `- flt                        3                        1      ( 33%)
+      `- llo                        3                        1  ( 33%)
+ ---------------------------------------------------------------------
+ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
diff --git a/verrou/tests/accuClreq2.stdout.exp b/verrou/tests/accuClreq2.stdout.exp
new file mode 100644
index 0000000000000000000000000000000000000000..deae326a84d13695d018b963d792020fff8468f8
--- /dev/null
+++ b/verrou/tests/accuClreq2.stdout.exp
@@ -0,0 +1,3 @@
+-0.000954 -0.000954 0.003044
+OK
+OK
diff --git a/verrou/tests/accuClreq2.vgtest b/verrou/tests/accuClreq2.vgtest
new file mode 100644
index 0000000000000000000000000000000000000000..b5d768ab4fdb361333430baacb3af139e75e24ec
--- /dev/null
+++ b/verrou/tests/accuClreq2.vgtest
@@ -0,0 +1,4 @@
+prog: accuClreq
+args: 2 
+vgopts: --rounding-mode=upward --instr-atstart=no
+stderr_filter_args: -seed -cnt-cmp-conv -backend-version
diff --git a/verrou/tests/clreq.c b/verrou/tests/clreq.c
new file mode 100644
index 0000000000000000000000000000000000000000..e3e7f31a8641bb54dc4ce4cb043ad7303aa1e9f9
--- /dev/null
+++ b/verrou/tests/clreq.c
@@ -0,0 +1,27 @@
+#include "../verrou.h"
+#include <stdio.h>
+
+float compute () {
+  int i;
+  float sum = 0;
+  for (i = 0 ; i<100 ; ++i) {
+    sum += (float)i;
+  }
+  return sum;
+}
+
+int main () {
+  if (compute() == (float)4950.)
+    printf ("OK\n");
+
+  // Uninstrumented part
+  VERROU_STOP_INSTRUMENTATION;
+  if (compute() == (float)4950.)
+    printf ("OK\n");
+  VERROU_START_INSTRUMENTATION;
+
+  if (compute() == (float)4950.)
+    printf ("OK\n");
+
+  return RUNNING_ON_VALGRIND;
+}
diff --git a/verrou/tests/clreq.stderr.exp b/verrou/tests/clreq.stderr.exp
new file mode 100644
index 0000000000000000000000000000000000000000..2369cde41bc0918abb77c9c97a7477950395c618
--- /dev/null
+++ b/verrou/tests/clreq.stderr.exp
@@ -0,0 +1,30 @@
+Verrou, Check floating-point rounding errors
+Copyright (C) 2014-2016, F. Fevotte & B. Lathuiliere.
+
+First seed : XXX
+Backend verrou : test-version
+Backend mcaquad : test-version
+Instrumented operations :
+	add : yes
+	sub : yes
+	mul : yes
+	div : yes
+	mAdd : yes
+	mSub : yes
+	cmp : no
+	conv : yes
+	max : no
+	min : no
+Instrumented scalar operations : no
+Backend verrou simulating NEAREST rounding mode
+
+ ---------------------------------------------------------------------
+ Operation                            Instruction count
+  `- Precision
+      `- Vectorization          Total             Instrumented
+ ---------------------------------------------------------------------
+ add                      300                      200          ( 67%)
+  `- flt                      300                      200      ( 67%)
+      `- llo                      300                      200  ( 67%)
+ ---------------------------------------------------------------------
+ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
diff --git a/verrou/tests/clreq.stdout.exp b/verrou/tests/clreq.stdout.exp
new file mode 100644
index 0000000000000000000000000000000000000000..0eabe367130066eaea19b3fba16ff3178789af56
--- /dev/null
+++ b/verrou/tests/clreq.stdout.exp
@@ -0,0 +1,3 @@
+OK
+OK
+OK
diff --git a/verrou/tests/clreq.vgtest b/verrou/tests/clreq.vgtest
new file mode 100644
index 0000000000000000000000000000000000000000..04da66603068eb30272ea00ce6aa472d416d428d
--- /dev/null
+++ b/verrou/tests/clreq.vgtest
@@ -0,0 +1,3 @@
+prog: clreq
+vgopts:
+stderr_filter_args: -seed -cnt-cmp-conv -backend-version
\ No newline at end of file
diff --git a/verrou/tests/filter_stderr b/verrou/tests/filter_stderr
new file mode 100755
index 0000000000000000000000000000000000000000..b5fa65709b2506d9731066be673fd04fa94ae9a4
--- /dev/null
+++ b/verrou/tests/filter_stderr
@@ -0,0 +1,31 @@
+#! /bin/sh
+
+dir=`dirname $0`
+
+cmd="$dir/../../tests/filter_stderr_basic"
+cmd="${cmd} |perl -pne 's/For lists of detected and suppressed errors, rerun with: -s\n//'"
+while [ ! -z "$1" ]; do
+    case "$1" in
+        "-seed")
+            cmd="${cmd} | perl -pne 's/^(First seed : )\d+/\$1XXX/;'"
+            ;;
+        "-cnt-cmp-conv")
+            cmd="${cmd} | perl -n"
+            cmd="${cmd} -e 'BEGIN{\$remove=0}'"
+            cmd="${cmd} -e '\$remove=1 if m/^ cmp/;'"
+            cmd="${cmd} -e '\$remove=1 if m/^ conv/;'"
+            cmd="${cmd} -e 'print unless \$remove;'"
+            cmd="${cmd} -e '\$remove=0 if m/^ -----/;'"
+            ;;
+	"-backend-version")
+            cmd="${cmd} | perl -pne 's/^(Backend verrou : )[^\s]*/\$1test-version/;'"
+	    cmd="${cmd} | perl -pne 's/^(Backend mcaquad : )[^\s]*/\$1test-version/;'"
+            ;;
+	"-addr-error")
+            cmd="${cmd} | perl -pne 's/   at 0x[0123456789ABCDEF]*: ([^\s]*) ([^s]*)/   at 0xFFFFFF: \$1 \$2/;'"
+            ;;
+    esac
+    shift
+done
+
+eval ${cmd}
diff --git a/verrou/tests/filter_stdout b/verrou/tests/filter_stdout
new file mode 100755
index 0000000000000000000000000000000000000000..97a310f0755458ea9a60b5e417c17fac8d9e4115
--- /dev/null
+++ b/verrou/tests/filter_stdout
@@ -0,0 +1,15 @@
+#! /bin/sh
+
+dir=`dirname $0`
+
+cmd="perl -pne ''"
+while [ ! -z "$1" ]; do
+    case "$1" in
+	"-nan-commut")
+            cmd="${cmd} | perl -pne 's/^nan1 \+ nan2 = nan \(7ff800000000babe\)/nan1 + nan2 = nan (7ff800000000dead)/;'"
+	    cmd="${cmd} | perl -pne 's/^nan2 \+ nan1 = nan \(7ff800000000dead\)/nan2 + nan1 = nan (7ff800000000babe)/;'"
+            ;;
+    esac
+    shift
+done
+eval ${cmd}
diff --git a/verrou/tests/naninf-rnd.stderr.exp b/verrou/tests/naninf-rnd.stderr.exp
new file mode 100644
index 0000000000000000000000000000000000000000..e02b1d8109af92c690518432217f148e541410d0
--- /dev/null
+++ b/verrou/tests/naninf-rnd.stderr.exp
@@ -0,0 +1,65 @@
+Verrou, Check floating-point rounding errors
+Copyright (C) 2014-2016, F. Fevotte & B. Lathuiliere.
+
+First seed : XXX
+Backend verrou : test-version
+Backend mcaquad : test-version
+Instrumented operations :
+	add : yes
+	sub : yes
+	mul : yes
+	div : yes
+	mAdd : yes
+	mSub : yes
+	cmp : no
+	conv : yes
+	max : no
+	min : no
+Instrumented scalar operations : no
+Backend verrou simulating RANDOM rounding mode
+NaN: 
+   at 0xFFFFFF: main (naninf.cxx:34)
+
+NaN: 
+   at 0xFFFFFF: main (naninf.cxx:35)
+
+NaN: 
+   at 0xFFFFFF: main (naninf.cxx:36)
+
+NaN: 
+   at 0xFFFFFF: main (naninf.cxx:37)
+
+NaN: 
+   at 0xFFFFFF: main (naninf.cxx:38)
+
+NaN: 
+   at 0xFFFFFF: main (naninf.cxx:48)
+
+NaN: 
+   at 0xFFFFFF: main (naninf.cxx:49)
+
+NaN: 
+   at 0xFFFFFF: main (naninf.cxx:50)
+
+NaN: 
+   at 0xFFFFFF: main (naninf.cxx:51)
+
+
+ ---------------------------------------------------------------------
+ Operation                            Instruction count
+  `- Precision
+      `- Vectorization          Total             Instrumented
+ ---------------------------------------------------------------------
+ add                        8                        8          (100%)
+  `- dbl                        8                        8      (100%)
+      `- llo                        8                        8  (100%)
+ ---------------------------------------------------------------------
+ sub                        2                        2          (100%)
+  `- dbl                        2                        2      (100%)
+      `- llo                        2                        2  (100%)
+ ---------------------------------------------------------------------
+ div                        3                        3          (100%)
+  `- dbl                        3                        3      (100%)
+      `- llo                        3                        3  (100%)
+ ---------------------------------------------------------------------
+ERROR SUMMARY: 9 errors from 9 contexts (suppressed: 0 from 0)
diff --git a/verrou/tests/naninf-rnd.stdout.exp b/verrou/tests/naninf-rnd.stdout.exp
new file mode 100644
index 0000000000000000000000000000000000000000..01b7d9d40b7f75d7b75bbc541d12b3c06944ee51
--- /dev/null
+++ b/verrou/tests/naninf-rnd.stdout.exp
@@ -0,0 +1,22 @@
+d           = 42
+infP        = inf
+infN        = -inf
+
+d / 0       = inf
+-d / 0      = -inf
+infP + d    = inf
+infN + d    = -inf
+
+0 / 0       = -nan (fff8000000000000)
+infP + infN = -nan (fff8000000000000)
+infN + infP = -nan (fff8000000000000)
+infP - infP = -nan (fff8000000000000)
+infN - infN = -nan (fff8000000000000)
+
+nan1        = nan (7ff800000000dead)
+nan2        = nan (7ff800000000babe)
+
+nan1 + d    = nan (7ff800000000dead)
+d + nan1    = nan (7ff800000000dead)
+nan1 + nan2 = nan (7ff800000000dead)
+nan2 + nan1 = nan (7ff800000000babe)
diff --git a/verrou/tests/naninf-rnd.vgtest b/verrou/tests/naninf-rnd.vgtest
new file mode 100644
index 0000000000000000000000000000000000000000..924233153e1a64e550af149cf54e925d5bc13560
--- /dev/null
+++ b/verrou/tests/naninf-rnd.vgtest
@@ -0,0 +1,5 @@
+prog: naninf
+vgopts: --rounding-mode=random
+stderr_filter_args: -seed -cnt-cmp-conv -backend-version -addr-error
+stdout_filter: filter_stdout
+stdout_filter_args: -nan-commut
\ No newline at end of file
diff --git a/verrou/tests/naninf.cxx b/verrou/tests/naninf.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..4b3f5a47db4dec3df8c3eea1340dbebc338c16cf
--- /dev/null
+++ b/verrou/tests/naninf.cxx
@@ -0,0 +1,52 @@
+#include <iostream>
+#include <cmath>
+#include <cstdint>
+#include <cstring>
+#include <limits>
+
+void printNaN(double n) {
+  std::uint64_t hex;
+  std::memcpy(&hex, &n, sizeof n);
+  std::cout << n
+            << " (" << std::hex << hex << ")"
+            << std::endl;
+}
+
+int main()
+{
+  double d = 42.;
+  std::cout << "d           = " << d << std::endl;
+
+  double infP = std::numeric_limits<double>::infinity();
+  std::cout << "infP        = " << infP << std::endl;
+
+  double infN = -std::numeric_limits<double>::infinity();
+  std::cout << "infN        = " << infN << std::endl;
+  std::cout << std::endl;
+
+  std::cout << "d / 0       = " << (d / 0.) << std::endl;
+  std::cout << "-d / 0      = " << (-d / 0.) << std::endl;
+  std::cout << "infP + d    = " << (infP + d) << std::endl;
+  std::cout << "infN + d    = " << (infN + d) << std::endl;
+  std::cout << std::endl;
+
+
+  std::cout << "0 / 0       = "; printNaN (0./0.);
+  std::cout << "infP + infN = "; printNaN (infP + infN);
+  std::cout << "infN + infP = "; printNaN (infN + infP);
+  std::cout << "infP - infP = "; printNaN (infP - infP);
+  std::cout << "infN - infN = "; printNaN (infN - infN);
+  std::cout << std::endl;
+
+  double nan1 = std::nan("0xdead");
+  std::cout << "nan1        = "; printNaN (nan1);
+
+  double nan2 = std::nan("0xbabe");
+  std::cout << "nan2        = "; printNaN (nan2);
+  std::cout << std::endl;
+
+  std::cout << "nan1 + d    = "; printNaN (nan1 + d);
+  std::cout << "d + nan1    = "; printNaN (d + nan1);
+  std::cout << "nan1 + nan2 = "; printNaN (nan1 + nan2);
+  std::cout << "nan2 + nan1 = "; printNaN (nan2 + nan1);
+}
diff --git a/verrou/tests/seed.stderr.exp b/verrou/tests/seed.stderr.exp
new file mode 100644
index 0000000000000000000000000000000000000000..f111c9931a4e685a1fb34ecd787cda7274c229f5
--- /dev/null
+++ b/verrou/tests/seed.stderr.exp
@@ -0,0 +1,34 @@
+Verrou, Check floating-point rounding errors
+Copyright (C) 2014-2016, F. Fevotte & B. Lathuiliere.
+
+First seed : 42
+Backend verrou : test-version
+Backend mcaquad : test-version
+Instrumented operations :
+	add : yes
+	sub : yes
+	mul : yes
+	div : yes
+	mAdd : yes
+	mSub : yes
+	cmp : no
+	conv : yes
+	max : no
+	min : no
+Instrumented scalar operations : no
+Backend verrou simulating RANDOM rounding mode
+
+ ---------------------------------------------------------------------
+ Operation                            Instruction count
+  `- Precision
+      `- Vectorization          Total             Instrumented
+ ---------------------------------------------------------------------
+ add                   100001                   100000          (100%)
+  `- flt                   100001                   100000      (100%)
+      `- llo                   100001                   100000  (100%)
+ ---------------------------------------------------------------------
+ mul                        1                        0          (  0%)
+  `- flt                        1                        0      (  0%)
+      `- llo                        1                        0  (  0%)
+ ---------------------------------------------------------------------
+ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
diff --git a/verrou/tests/seed.stdout.exp b/verrou/tests/seed.stdout.exp
new file mode 100644
index 0000000000000000000000000000000000000000..fbb7895bd5a2dc32c84b41a85aa94c7ec76bc795
--- /dev/null
+++ b/verrou/tests/seed.stdout.exp
@@ -0,0 +1 @@
+1.0059524775
diff --git a/verrou/tests/seed.vgtest b/verrou/tests/seed.vgtest
new file mode 100644
index 0000000000000000000000000000000000000000..f5ddb82ffa4ffc7590b4075b3a12e4b4adededc7
--- /dev/null
+++ b/verrou/tests/seed.vgtest
@@ -0,0 +1,4 @@
+prog: sum
+args: 0
+vgopts: --instr-atstart=no --rounding-mode=random --vr-seed=42
+stderr_filter_args: -cnt-cmp-conv -backend-version
diff --git a/verrou/tests/sum.c b/verrou/tests/sum.c
new file mode 100644
index 0000000000000000000000000000000000000000..0d48cb28858d875339ab2f07d8fe1c21aae1ea1f
--- /dev/null
+++ b/verrou/tests/sum.c
@@ -0,0 +1,72 @@
+#include "../verrou.h"
+#include <stdio.h>
+#include <string.h>
+#include <math.h>
+
+int N = 100000;
+float epsilon = 1e-9;
+
+float compute () {
+  float sum = 1;
+  int i;
+  VERROU_START_INSTRUMENTATION;
+  for (i = 0 ; i < N ; ++i) {
+    sum += epsilon;
+  }
+  VERROU_STOP_INSTRUMENTATION;
+
+  return sum;
+}
+
+int main (int argc, char **argv) {
+  float res = compute();
+  float ref = 1 + N*epsilon;
+
+  if (!strcmp (argv[1], "0")) {
+    // Just print the result
+    printf ("%.10f\n", res);
+  }
+  else if (!strcmp (argv[1], "1")) {
+    // sum1.vgtest
+    // CL switches:
+    //   --instr-atstart=no
+    //
+    // res should be 1
+    if (res == 1) {
+      printf ("OK\n");
+    } else {
+      printf ("%.10f\n", res);
+    }
+  }
+  else if (!strcmp (argv[1], "2")) {
+    // sum2.vgtest
+    // CL switches:
+    //   --instr-atstart=no
+    //   --rounding-mode=random
+    //
+    // res should be significantly different from 1,
+    // since there are floating point errors
+    float threshold = 20*N*epsilon;
+    if (fabs(res - 1) > threshold) {
+      printf ("OK\n");
+    } else {
+      printf ("error: |%.10e| < %.10e\n", res-1, threshold);
+    }
+  }
+  else if (!strcmp (argv[1], "3")) {
+    // sum3.vgtest
+    // CL switches:
+    //   --instr-atstart=no
+    //   --rounding-mode=average
+    //
+    // res should be close to ref
+    float threshold = 0.2f * (float)N * epsilon;
+    if (fabs(res-ref) < threshold) {
+      printf ("OK\n");
+    } else {
+      printf ("|%.10e| > %.10e \n", res-ref, threshold);
+    }
+  }
+
+  return RUNNING_ON_VALGRIND;
+}
diff --git a/verrou/tests/sum1.stderr.exp b/verrou/tests/sum1.stderr.exp
new file mode 100644
index 0000000000000000000000000000000000000000..6d63c16aaac9e504a0b0a2aabd24aa3d7bb1ea21
--- /dev/null
+++ b/verrou/tests/sum1.stderr.exp
@@ -0,0 +1,44 @@
+Verrou, Check floating-point rounding errors
+Copyright (C) 2014-2016, F. Fevotte & B. Lathuiliere.
+
+First seed : XXX
+Backend verrou : test-version
+Backend mcaquad : test-version
+Instrumented operations :
+	add : yes
+	sub : yes
+	mul : yes
+	div : yes
+	mAdd : yes
+	mSub : yes
+	cmp : no
+	conv : yes
+	max : no
+	min : no
+Instrumented scalar operations : no
+Backend verrou simulating NEAREST rounding mode
+
+ ---------------------------------------------------------------------
+ Operation                            Instruction count
+  `- Precision
+      `- Vectorization          Total             Instrumented
+ ---------------------------------------------------------------------
+ add                   100001                   100000          (100%)
+  `- flt                   100001                   100000      (100%)
+      `- llo                   100001                   100000  (100%)
+ ---------------------------------------------------------------------
+ mul                        1                        0          (  0%)
+  `- flt                        1                        0      (  0%)
+      `- llo                        1                        0  (  0%)
+ ---------------------------------------------------------------------
+ cmp                        2                        0          (  0%)
+  `- dbl                        2                        0      (  0%)
+      `- scal                       2                        0  (  0%)
+ ---------------------------------------------------------------------
+ conv                       5                        0          (  0%)
+  `- dbl=>flt                   1                        0      (  0%)
+      `- scal                       1                        0  (  0%)
+  `- flt=>dbl                   4                        0      (  0%)
+      `- scal                       4                        0  (  0%)
+ ---------------------------------------------------------------------
+ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
diff --git a/verrou/tests/sum1.stdout.exp b/verrou/tests/sum1.stdout.exp
new file mode 100644
index 0000000000000000000000000000000000000000..d86bac9de59abcc26bc7956c1e842237c7581859
--- /dev/null
+++ b/verrou/tests/sum1.stdout.exp
@@ -0,0 +1 @@
+OK
diff --git a/verrou/tests/sum1.vgtest b/verrou/tests/sum1.vgtest
new file mode 100644
index 0000000000000000000000000000000000000000..7cb5ee8889235c391f1ef6543a38d29cad7d7da4
--- /dev/null
+++ b/verrou/tests/sum1.vgtest
@@ -0,0 +1,4 @@
+prog: sum
+args: 1
+vgopts: --instr-atstart=no
+stderr_filter_args: -seed -backend-version
diff --git a/verrou/tests/sum2.stderr.exp b/verrou/tests/sum2.stderr.exp
new file mode 100644
index 0000000000000000000000000000000000000000..ced96adc1f7109e6b8d94e52aaa94cae54813ff6
--- /dev/null
+++ b/verrou/tests/sum2.stderr.exp
@@ -0,0 +1,48 @@
+Verrou, Check floating-point rounding errors
+Copyright (C) 2014-2016, F. Fevotte & B. Lathuiliere.
+
+First seed : XXX
+Backend verrou : test-version
+Backend mcaquad : test-version
+Instrumented operations :
+	add : yes
+	sub : yes
+	mul : yes
+	div : yes
+	mAdd : yes
+	mSub : yes
+	cmp : no
+	conv : yes
+	max : no
+	min : no
+Instrumented scalar operations : no
+Backend verrou simulating RANDOM rounding mode
+
+ ---------------------------------------------------------------------
+ Operation                            Instruction count
+  `- Precision
+      `- Vectorization          Total             Instrumented
+ ---------------------------------------------------------------------
+ add                   100001                   100000          (100%)
+  `- flt                   100001                   100000      (100%)
+      `- llo                   100001                   100000  (100%)
+ ---------------------------------------------------------------------
+ sub                        1                        0          (  0%)
+  `- flt                        1                        0      (  0%)
+      `- llo                        1                        0  (  0%)
+ ---------------------------------------------------------------------
+ mul                        2                        0          (  0%)
+  `- flt                        2                        0      (  0%)
+      `- llo                        2                        0  (  0%)
+ ---------------------------------------------------------------------
+ cmp                        1                        0          (  0%)
+  `- dbl                        1                        0      (  0%)
+      `- scal                       1                        0  (  0%)
+ ---------------------------------------------------------------------
+ conv                       4                        0          (  0%)
+  `- dbl=>flt                   2                        0      (  0%)
+      `- scal                       2                        0  (  0%)
+  `- flt=>dbl                   2                        0      (  0%)
+      `- scal                       2                        0  (  0%)
+ ---------------------------------------------------------------------
+ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
diff --git a/verrou/tests/sum2.stdout.exp b/verrou/tests/sum2.stdout.exp
new file mode 100644
index 0000000000000000000000000000000000000000..d86bac9de59abcc26bc7956c1e842237c7581859
--- /dev/null
+++ b/verrou/tests/sum2.stdout.exp
@@ -0,0 +1 @@
+OK
diff --git a/verrou/tests/sum2.vgtest b/verrou/tests/sum2.vgtest
new file mode 100644
index 0000000000000000000000000000000000000000..35b1d907db5d67a91cd1a30946a2b5f0fb7344c3
--- /dev/null
+++ b/verrou/tests/sum2.vgtest
@@ -0,0 +1,4 @@
+prog: sum
+args: 2
+vgopts: --instr-atstart=no --rounding-mode=random
+stderr_filter_args: -seed -backend-version
diff --git a/verrou/tests/sum3.stderr.exp b/verrou/tests/sum3.stderr.exp
new file mode 100644
index 0000000000000000000000000000000000000000..3b8702aaeace1d67480c3549b350a4928bb78785
--- /dev/null
+++ b/verrou/tests/sum3.stderr.exp
@@ -0,0 +1,48 @@
+Verrou, Check floating-point rounding errors
+Copyright (C) 2014-2016, F. Fevotte & B. Lathuiliere.
+
+First seed : XXX
+Backend verrou : test-version
+Backend mcaquad : test-version
+Instrumented operations :
+	add : yes
+	sub : yes
+	mul : yes
+	div : yes
+	mAdd : yes
+	mSub : yes
+	cmp : no
+	conv : yes
+	max : no
+	min : no
+Instrumented scalar operations : no
+Backend verrou simulating AVERAGE rounding mode
+
+ ---------------------------------------------------------------------
+ Operation                            Instruction count
+  `- Precision
+      `- Vectorization          Total             Instrumented
+ ---------------------------------------------------------------------
+ add                   100001                   100000          (100%)
+  `- flt                   100001                   100000      (100%)
+      `- llo                   100001                   100000  (100%)
+ ---------------------------------------------------------------------
+ sub                        1                        0          (  0%)
+  `- flt                        1                        0      (  0%)
+      `- llo                        1                        0  (  0%)
+ ---------------------------------------------------------------------
+ mul                        3                        0          (  0%)
+  `- flt                        3                        0      (  0%)
+      `- llo                        3                        0  (  0%)
+ ---------------------------------------------------------------------
+ cmp                        1                        0          (  0%)
+  `- dbl                        1                        0      (  0%)
+      `- scal                       1                        0  (  0%)
+ ---------------------------------------------------------------------
+ conv                       4                        0          (  0%)
+  `- dbl=>flt                   2                        0      (  0%)
+      `- scal                       2                        0  (  0%)
+  `- flt=>dbl                   2                        0      (  0%)
+      `- scal                       2                        0  (  0%)
+ ---------------------------------------------------------------------
+ERROR SUMMARY: 0 errors from 0 contexts (suppressed: 0 from 0)
diff --git a/verrou/tests/sum3.stdout.exp b/verrou/tests/sum3.stdout.exp
new file mode 100644
index 0000000000000000000000000000000000000000..d86bac9de59abcc26bc7956c1e842237c7581859
--- /dev/null
+++ b/verrou/tests/sum3.stdout.exp
@@ -0,0 +1 @@
+OK
diff --git a/verrou/tests/sum3.vgtest b/verrou/tests/sum3.vgtest
new file mode 100644
index 0000000000000000000000000000000000000000..27b9cbbd0dd38112daaea8d2977f42849f3d8b2f
--- /dev/null
+++ b/verrou/tests/sum3.vgtest
@@ -0,0 +1,4 @@
+prog: sum
+args: 3
+vgopts: --instr-atstart=no --rounding-mode=average
+stderr_filter_args: -seed -backend-version
diff --git a/verrou/travis.mk b/verrou/travis.mk
new file mode 100644
index 0000000000000000000000000000000000000000..b8970072dfdab055a44889ee2b8ae6cd88c903d8
--- /dev/null
+++ b/verrou/travis.mk
@@ -0,0 +1,49 @@
+SHELL = /bin/bash
+
+download-valgrind:
+	cd .. && git clone --branch=$(VALGRIND_VERSION) --single-branch git://sourceware.org/git/valgrind.git valgrind+verrou >/dev/null
+
+patch-valgrind:
+	cd ../valgrind+verrou && cp -a $(PWD) verrou
+	cd ../valgrind+verrou && patch -p1 <verrou/valgrind.diff
+patch-error:
+	cd ../valgrind+verrou && find . -name '*.rej' | xargs tail -n+1
+	# try to build verrou anyway if we check the development version of Valgrind
+	test "$(VALGRIND_VERSION)" = "master"
+
+configure:
+	@echo "*** AUTOGEN ***"
+	cd ../valgrind+verrou && ./autogen.sh
+
+	@echo "*** CONFIGURE ***"
+	cd ../valgrind+verrou && ./configure --enable-only64bit --enable-verrou-fma=yes --prefix=$${PWD}/install
+
+build:
+	@echo "*** MAKE ***"
+	cd ../valgrind+verrou && make
+
+	@echo "*** MAKE INSTALL ***"
+	cd ../valgrind+verrou && make install
+
+check-install:
+	@echo "*** CHECK VERSION ***"
+	source ../valgrind+verrou/install/env.sh && valgrind --version
+
+	@echo "*** CHECK HELP ***"
+	source ../valgrind+verrou/install/env.sh && valgrind --tool=verrou --help
+
+check:
+	@echo "*** BUILD TESTS ***"
+	cd ../valgrind+verrou && make -C tests  check
+	cd ../valgrind+verrou && make -C verrou check
+
+	@echo "*** VALGRIND TESTS ***"
+	cd ../valgrind+verrou && perl tests/vg_regtest verrou
+
+check-error:
+	cd ../valgrind+verrou/verrou/tests && tail -n+1 *.stdout.diff *.stdout.out *.stderr.diff *.stderr.out
+	@false
+
+unit-test:
+	@echo "*** UNIT TESTS ***"
+	cd ../valgrind+verrou/verrou/unitTest && make
diff --git a/verrou/unitTest/check-libM/genTab.py b/verrou/unitTest/check-libM/genTab.py
new file mode 100755
index 0000000000000000000000000000000000000000..6991f3e5bf13f7cf8ef51991fc39357868713e6b
--- /dev/null
+++ b/verrou/unitTest/check-libM/genTab.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+import sys
+import math
+
+def readFile(fileName):
+    data=(open(fileName).readlines()) 
+    keyData=data[0].split()
+    brutData=[line.split() for line in data[1:]]
+
+    res={}
+    for index in range(len(keyData)):
+        fileNameKey=fileName.replace("res","")
+        dataIndex=[line[index] for line in brutData]
+        res[keyData[index]]=(min(dataIndex),max(dataIndex))
+
+    return res
+
+def computeEvalError(dataNative, data):
+    res={}
+    for key in dataNative.keys():
+        resIEEE=float(dataNative[key][0])        
+        evalError=  - math.log2(max(abs(float(data[key][1]) - resIEEE),
+                                    abs(float(data[key][0]) - resIEEE)) / resIEEE)
+        res[key]=evalError
+    return res
+
+def loadRef(fileName, num=2):
+    res={}
+    for line in open(fileName):
+        spline=line.split(":")
+        typeRealtype=spline[0].split()[0]
+        correction=spline[0].split()[1]
+        nbBitStr=spline[1].strip()
+
+        if nbBitStr in ["24","53"]:
+            res[(typeRealtype, correction)]=float(nbBitStr)
+            continue
+        [valueLow,valueUp]=nbBitStr[1:-1].split(",")
+        if(float(valueUp)!=float(valueLow)):
+            print("Please Increase the mpfi precision")
+            sys.exit()
+        value=float(valueUp)
+        res[(typeRealtype, correction)]=value
+    return res
+
+
+def main(reference=None):
+    
+
+    output=open("tabAster.tex","w")
+    outputReg=open("testReg","w")
+    
+    keys=["Native", "Randominterlibm", "Randomverrou", "Randomverrou+interlibm"]
+
+    data={}
+    strLatex=""
+    for i in range(len(keys)):
+        key=keys[i]
+        data[key]=readFile("res"+key+".dat")
+
+#        for key in sorted(keys[1:]):
+    for i in range(1,len(keys)):
+        key=keys[i]
+        outputReg.write(key+"\n")
+        evalError=computeEvalError(data["Native"], data[key])
+        for keyCase in sorted(evalError.keys()):
+            outputReg.write(keyCase +" "+str(evalError[keyCase])+"\n")
+        
+    output.write(r"\begin{table}" +" \n")
+    output.write(r"\begin{center}" +" \n")
+    output.write(r"\begin{tabular}{l@{~}lccccc}\toprule" +" \n")
+    output.write(r"&  & \multicolumn{2}{c}{single precision}& \multicolumn{2}{c}{double precision}\\"+"\n"+
+                 r"&& first & second & first & second \\ \midrule"+"\n")
+
+    if reference!=None:
+        output.write("&IEEE Error & %.2f & %.2f & %.2f & %.2f"%(
+                     reference[("Float","Before")],reference[("Float","After")],
+                     reference[("Double","Before")], reference[("Double","After")])
+                     + r"\\\midrule"+"\n")
+                
+        
+    
+    for i in range(1,len(keys)):
+        key=keys[i]            
+        evalError=computeEvalError(data["Native"], data[key])
+        keyConvert={"Randominterlibm": r"\textit{(i)}&interlibm",
+                    "Randomverrou":    r"\textit{(ii)}&verrou",
+                    "Randomverrou+interlibm":r"\textit{(iii)}&verrou+interlib"}
+
+        lineStr=keyConvert[key]+ " "
+        for typeFP in ["Float","Double"]:
+            lineStr+=r"&%.2f &  %.2f  "%(evalError["BeforeCorrection_"+typeFP], evalError["AfterCorrection_"+typeFP]) 
+        lineStr+=r"\\"+"\n"
+        output.write(lineStr)
+    output.write(r"\bottomrule"+"\n")    
+    output.write(r"\end{tabular}"+"\n")
+    output.write(r"\end{center}" +" \n")
+    output.write(r"\caption{Number of significant bits for 4~implementations of function $f(a, a+6.ulp(a))$, as assessed by 3~techniques.}"+"\n")
+    output.write(r"\label{sdAster}"+"\n")
+    output.write(r"\end{table}"+"\n")
+
+    
+    
+
+
+if __name__=="__main__":
+    reference=loadRef("reference.dat")
+    if len(reference)!=4:
+        reference=None
+    main(reference)
diff --git a/verrou/unitTest/check-libM/hist.sh b/verrou/unitTest/check-libM/hist.sh
new file mode 100755
index 0000000000000000000000000000000000000000..8939ae30b16be5b8f45ec89e60386c3e6d2f0356
--- /dev/null
+++ b/verrou/unitTest/check-libM/hist.sh
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+FILE=$1
+NUM=$2
+
+index=$(cat $FILE | cut -f $NUM |head -n 1)
+echo $index
+
+values=$(cat $FILE | cut -f $NUM | sort -u |grep -v $index)
+allvalues=$(cat $FILE | cut -f $NUM | grep -v $index)
+
+
+for value in $values ;
+do
+    count=$(cat $FILE | cut -f $NUM | grep -v $index |grep $value |wc -l)
+    echo $value , $count
+done;
+	     
+
diff --git a/verrou/unitTest/check-libM/latex/article.tex b/verrou/unitTest/check-libM/latex/article.tex
new file mode 100644
index 0000000000000000000000000000000000000000..00539a6e75d0666d35684c012c1df94bc4954d47
--- /dev/null
+++ b/verrou/unitTest/check-libM/latex/article.tex
@@ -0,0 +1,6 @@
+\documentclass{article}
+\usepackage{booktabs}
+\begin{document}
+
+\input{../tabAster.tex}
+\end{document}
diff --git a/verrou/unitTest/check-libM/testAster.cxx b/verrou/unitTest/check-libM/testAster.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..17a0af3a4c5ab3febb6c85c6170d82943be11257
--- /dev/null
+++ b/verrou/unitTest/check-libM/testAster.cxx
@@ -0,0 +1,129 @@
+#include<iostream>
+#include<iomanip>
+#include<cstdlib>
+
+#include <cfloat>
+#include <cmath>
+#include <limits>
+
+#ifdef WITH_REFERENCE
+#include <boost/multiprecision/mpfi.hpp> 
+using namespace boost::multiprecision;
+typedef number<mpfi_float_backend<1000> > mpfi_realtype;
+
+namespace std{
+  mpfi_realtype log(mpfi_realtype& a){
+    return boost::multiprecision::log(a);
+  };
+
+  mpfi_realtype abs(mpfi_realtype& a){
+    return boost::multiprecision::abs(a);
+  };
+
+};
+
+template<class FUNCTOR>
+struct computeError{
+  
+ 
+  template<class REALTYPE >
+  static mpfi_realtype apply(const REALTYPE& a1, const REALTYPE& a2){
+    mpfi_realtype interA1(a1);
+    mpfi_realtype interA2(a2);
+    REALTYPE resIEEE= FUNCTOR::apply(a1,a2);
+    mpfi_realtype resIEEE_MPFI(resIEEE);
+    mpfi_realtype resMPFI=FUNCTOR::apply(interA1, interA2);
+    
+    mpfi_realtype nbBit= -log(abs((resIEEE_MPFI - resMPFI) / resMPFI))/log(mpfi_realtype(2.) );
+
+    int implicitMantissaSize=std::numeric_limits<REALTYPE>::digits;
+    if( lower(nbBit) >  implicitMantissaSize){
+      nbBit=(mpfi_realtype) implicitMantissaSize;
+    }
+
+    return nbBit;
+  }
+};
+
+#endif
+
+
+struct areaInstable{
+  template<class REALTYPE>
+  static REALTYPE apply(REALTYPE a1, REALTYPE a2){
+    if(a1==a2){
+      return a1;
+    }else{
+      return (a2-a1) / (std::log(a2) -std::log(a1));
+    }
+  }
+};
+
+
+
+struct areaCorrected{
+  template<class REALTYPE>
+  static REALTYPE apply(REALTYPE a1, REALTYPE a2){
+    REALTYPE c=a2/a1;
+    REALTYPE n=c-1;
+    if(std::abs(n) < 5*std::numeric_limits<REALTYPE>::epsilon()){
+      return a1;
+    }else{
+      REALTYPE l=std::log(c);
+      REALTYPE f=n /l;
+      REALTYPE p=a1*f;
+      return p;
+    }
+  }
+};
+
+
+int main(int argc, char** argv){
+  int numberSample=1;
+  bool ref=false;
+
+  if(argc==2){
+    numberSample=atoi(argv[1]);   
+  }
+  if(numberSample==-1){
+    ref=true;
+    numberSample=0;
+  }
+  
+
+  double a= 4.2080034963016440E-005;
+  float af= 4.2080034963016440E-005;
+  int numberEpsilon=6;
+
+  double a1double= a, a2double=a+ numberEpsilon* DBL_EPSILON ;
+  float a1float= af, a2float=af+ numberEpsilon* FLT_EPSILON ;
+  if(!ref){
+    std::cout << "BeforeCorrection_Double"
+	      << "\t" << "AfterCorrection_Double"
+	      << "\t" << "BeforeCorrection_Float"
+	      << "\t" << "AfterCorrection_Float"<<std::endl;
+  }
+  std::cout << std::setprecision(42);
+  for(int i=0; i< numberSample ; i++){
+
+    double resInstabledouble  = areaInstable::apply<double> (a1double, a2double);
+    double resCorrecteddouble = areaCorrected::apply<double> (a1double, a2double);
+
+    float resInstablefloat  = areaInstable::apply<float> (a1float, a2float);
+    float resCorrectedfloat = areaCorrected::apply<float> (a1float, a2float);
+
+    //std::cout << std::setprecision(16);
+    std::cout << resInstabledouble << "\t"<<  resCorrecteddouble<<"\t";
+    //    std::cout << std::setprecision(8);
+    std::cout << resInstablefloat << "\t"<<  resCorrectedfloat<<std::endl;
+      
+  }
+#ifdef WITH_REFERENCE
+  if(ref){
+    std::cout << "Double Before : " << computeError<areaInstable>::apply  (a1double,a2double)  <<std::endl;
+    std::cout << "Double After : "  << computeError<areaCorrected>::apply (a1double,a2double)  <<std::endl;
+    std::cout << "Float Before : "  << computeError<areaInstable>::apply  (a1float,a2float)  <<std::endl;
+    std::cout << "Float After : "   << computeError<areaCorrected>::apply (a1float,a2float)  <<std::endl;  
+  }
+#endif
+}
diff --git a/verrou/unitTest/check-libM/testRegRef b/verrou/unitTest/check-libM/testRegRef
new file mode 100644
index 0000000000000000000000000000000000000000..5e574eb620a6c3aca4f8d103a8c328ec15008426
--- /dev/null
+++ b/verrou/unitTest/check-libM/testRegRef
@@ -0,0 +1,15 @@
+Randominterlibm
+AfterCorrection_Double 52.463495428908004
+AfterCorrection_Float 23.475670480078016
+BeforeCorrection_Double 14.121371625961734
+BeforeCorrection_Float 13.109299288388522
+Randomverrou
+AfterCorrection_Double 51.463495428908004
+AfterCorrection_Float 22.475670480078016
+BeforeCorrection_Double 52.46350992680084
+BeforeCorrection_Float 23.47562150263434
+Randomverrou+interlibm
+AfterCorrection_Double 50.87853292818685
+AfterCorrection_Float 22.475670480078016
+BeforeCorrection_Double 14.121371625957593
+BeforeCorrection_Float 13.108206751387511
diff --git a/verrou/unitTest/checkRounding/checkRounding.cxx b/verrou/unitTest/checkRounding/checkRounding.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..4fa81f29c963f4a89f9cc37aed7117ebef57f2f7
--- /dev/null
+++ b/verrou/unitTest/checkRounding/checkRounding.cxx
@@ -0,0 +1,744 @@
+
+#include <iostream>
+#include <cstdlib>
+#include <math.h>
+#include <string.h>
+
+#include <fenv.h>
+#include <limits>
+
+#include "verrou.h"
+
+#ifdef  TEST_FMA
+#include  <immintrin.h>
+#include  <fmaintrin.h>
+#endif
+#ifdef TEST_SSE
+#include  <immintrin.h>
+#endif
+
+void usage(char** argv){
+  std::cout << "usage : "<< argv[0]<< " ENV ROUNDING_MODE  avec "<<std::endl;
+  std::cout << "ROUNDING_MODE in [upward, toward_zero, downward, nearest]  random, average are not valid"<<std::endl;
+  std::cout << "ENV in [valgrind fenv]"<<std::endl;  
+}
+int roundingMode=-2;
+bool fenv;
+
+void stopInst(bool fenv, int roundingMode){
+  if(fenv){
+    fesetround(FE_TONEAREST);
+  }else{
+    VERROU_STOP_INSTRUMENTATION;
+  }
+}
+
+void startInst(bool fenv, int roundingMode){
+  if(fenv){
+    fesetround(roundingMode);
+  }else{
+    VERROU_START_INSTRUMENTATION;
+  }
+}
+
+
+template<class REAL> std::string typeName(){
+  return std::string("unknown");
+}
+
+template<>
+std::string typeName<long double>(){
+  return std::string("long double");
+}
+
+template<>
+std::string typeName<double>(){
+  return std::string("double");
+}
+template<>
+std::string typeName<float>(){
+  return std::string("float");
+}
+
+
+
+template<class REALTYPE, class REALTYPEREF=REALTYPE>
+class test{
+public:
+  test(REALTYPEREF a):expectedResult(a){
+  }
+  
+  REALTYPEREF res;
+  REALTYPEREF expectedResult;
+  void check(){
+    std::cout.precision(16);
+    std::cout << name()<<"<"<< typeName<REALTYPE>()<<">" <<":\tres: " << res
+	      << "\ttheo: "<< expectedResult
+	      << "\tdiff: "<<  res-expectedResult<<std::endl;
+    
+  }
+
+  void run(){
+    startInst(fenv,roundingMode);    
+    res=(REALTYPEREF)compute();
+    stopInst(fenv,roundingMode);
+    check();
+  }
+
+
+  virtual REALTYPE compute()=0;
+  virtual std::string name()=0;
+};
+
+
+
+template<class REALTYPE>
+class testInc0d1: public test<REALTYPE>{
+ public:
+  testInc0d1():test<REALTYPE>(100001.),
+	  size(1000000),
+	  step(0.1),
+	  init(1.)
+    {
+    }
+
+  std::string name(){
+    return std::string("testInc0d1");
+  }
+
+  REALTYPE compute(){    
+    REALTYPE acc=init;
+    for(int i=0; i<size; i++){
+      acc+=step;
+    }
+    return acc;
+  }
+
+  
+ private:
+  const int size;
+  const REALTYPE step;
+  const REALTYPE init;
+};
+
+
+template<class REALTYPE>
+class testInc0d1m: public test<REALTYPE>{
+ public:
+  testInc0d1m():test<REALTYPE>(-100001.),
+    size(1000000),
+    step(-0.1),
+    init(-1.)
+    {
+    }
+
+  std::string name(){
+    return std::string("testInc0d1m");
+  }
+
+  REALTYPE compute(){    
+    REALTYPE acc=init;
+    for(int i=0; i<size; i++){
+      acc+=step;
+    }
+    return acc;
+  }
+
+  
+ private:
+  const int size;
+  const REALTYPE step;
+  const REALTYPE init;
+};
+
+
+
+template<class REALTYPE>
+class testIncSquare0d1: public test<REALTYPE>{
+ public:
+  testIncSquare0d1():test<REALTYPE>(10000),
+	  size(1000000),
+	  step(0.1),
+	  init(0.)
+    {
+
+    }
+
+  std::string name(){
+    return std::string("testIncSquare0d1");
+  }
+
+
+  REALTYPE compute(){    
+    REALTYPE acc=init;
+    for(int i=0; i<size; i++){
+      acc+=step*step;
+    }  
+    return acc;
+  }
+  
+ private:
+  const int size;
+  const REALTYPE step;
+  const REALTYPE init;
+};
+
+
+template<class REALTYPE>
+class testIncSquare0d1m: public test<REALTYPE>{
+ public:
+  testIncSquare0d1m():test<REALTYPE>(-10001),
+	  size(1000000),
+	  step(-0.1),
+	  init(-1.)
+    {
+
+    }
+
+  std::string name(){
+    return std::string("testIncSquare0d1m");
+  }
+
+
+  REALTYPE compute(){    
+    REALTYPE acc=init;
+    for(int i=0; i<size; i++){
+      acc+=(-1.0*step)*step;
+    }  
+    return acc;
+  }
+  
+ private:
+  const int size;
+  const REALTYPE step;
+  const REALTYPE init;
+};
+
+
+
+template<class REALTYPE>
+class testIncDiv10:public test<REALTYPE>{
+ public:
+  testIncDiv10():test<REALTYPE>(100000),
+	  size(1000000),
+	  stepDiv(10.),
+	  init(0.)
+    {
+    }
+
+  std::string name(){
+    return std::string("testIncDiv10");
+  }
+
+
+  REALTYPE compute(){    
+    REALTYPE acc=init;
+    for(int i=0; i<size; i++){
+      acc+=(1/stepDiv);
+    }  
+    return acc;
+  }
+
+  
+ private:
+  const int size;
+  const REALTYPE stepDiv;
+  const REALTYPE init;
+};
+
+
+template<class REALTYPE>
+class testIncDiv10m:public test<REALTYPE>{
+ public:
+  testIncDiv10m():test<REALTYPE>(-100000),
+    size(1000000),
+    stepDiv(-10.),
+    init(0.)
+  {
+  }
+
+  std::string name(){
+    return std::string("testIncDiv10m");
+  }
+
+
+  REALTYPE compute(){    
+    REALTYPE acc=init;
+    for(int i=0; i<size; i++){
+      acc+=(1/stepDiv);
+    }  
+    return acc;
+  }
+
+  
+ private:
+  const int size;
+  const REALTYPE stepDiv;
+  const REALTYPE init;
+};
+
+
+
+
+
+template<class REALTYPE>
+class testInvariantProdDiv:public test<REALTYPE>{
+ public:
+
+  //The size are adapted to avoid inf
+  static int getSize(long double a){ return 150;};
+  static int getSize(double a){ return 150;};
+  static int getSize(float a){ return 34;} ;
+  static int getSize(){return getSize((REALTYPE)0.);};
+  
+  testInvariantProdDiv():test<REALTYPE>(1.),
+	  size(getSize()),
+	  init(1.){}
+
+  std::string name(){
+    return std::string("testInvariantProdDiv");
+  }
+
+
+  REALTYPE compute(){    
+    REALTYPE prod=init;
+    for(int i=1; i<size; i++){
+      prod=prod*i;    
+    } 
+    for(int i=1; i<size; i++){
+      prod=prod/i;
+    }  
+
+    return prod;
+  }
+  
+ private:
+  const int size;
+
+  const REALTYPE init;
+};
+
+
+
+template<class REALTYPE>
+class testInvariantProdDivm:public test<REALTYPE>{
+ public:
+
+  //The size are adapted to avoid inf
+  static int getSize(long double a){ return 150;};
+  static int getSize(double a){ return 150;};
+  static int getSize(float a){ return 34;} ;
+  static int getSize(){return getSize((REALTYPE)0.);};
+  
+  testInvariantProdDivm():test<REALTYPE>(-1.),
+	  size(getSize()),
+	  init(-1.){}
+
+  std::string name(){
+    return std::string("testInvariantProdDivm");
+  }
+
+
+  REALTYPE compute(){    
+    REALTYPE prod=init;
+    for(int i=1; i<size; i++){
+      prod=prod*i;    
+    } 
+    for(int i=1; i<size; i++){
+      prod=prod/i;
+    }  
+
+    return prod;
+  }
+  
+ private:
+  const int size;
+
+  const REALTYPE init;
+};
+
+
+
+
+
+  inline double myFma(const double& a, const double& b, const double& c){
+    double d;
+#ifdef TEST_FMA
+    __m128d ai, bi,ci,di;
+    ai = _mm_load_sd(&a);
+    bi = _mm_load_sd(&b);
+    ci = _mm_load_sd(&c);
+    di=_mm_fmadd_sd(ai,bi,ci);
+    d=_mm_cvtsd_f64(di);
+#else
+    d=a*b+c;
+#endif
+    return d;
+  }
+
+
+  inline float myFma(const float& a, const float& b, const float& c){
+    float d;
+#ifdef TEST_FMA
+    __m128 ai, bi,ci,di;
+    ai = _mm_load_ss(&a);
+    bi = _mm_load_ss(&b);
+    ci = _mm_load_ss(&c);
+    di=_mm_fmadd_ss(ai,bi,ci);
+    d=_mm_cvtss_f32(di);
+#else
+    d=a*b+c;
+#endif
+    return d;
+  }
+
+
+
+template<class REALTYPE>
+class testFma:public test<REALTYPE>{
+ public:
+  
+  testFma():test<REALTYPE>(10000),
+    size(1000000),
+    value(0.1),
+    init(0.){}
+
+  std::string name(){
+    return std::string("testFma");
+  }
+
+
+  REALTYPE compute(){    
+    REALTYPE acc=init;
+
+    for(int i=0; i<size; i++){
+      acc=myFma(value,value,acc);
+    } 
+    return acc;
+  }
+  
+ private:
+  const int size;
+  const REALTYPE value;  
+  const REALTYPE init;
+
+};
+
+
+template<class REALTYPE>
+class testFmam:public test<REALTYPE>{
+ public:
+  
+  testFmam():test<REALTYPE>(-10000),
+    size(1000000),
+    value(-0.1),
+    init(0.){}
+
+  std::string name(){
+    return std::string("testFmam");
+  }
+
+
+  REALTYPE compute(){    
+    REALTYPE acc=init;
+
+    for(int i=0; i<size; i++){
+      acc=myFma(-value,value,acc);
+    } 
+    return acc;
+  }
+  
+ private:
+  const int size;
+  const REALTYPE value;  
+  const REALTYPE init;
+
+};
+
+
+
+template<class REALTYPE>
+class testMixSseLlo:public test<REALTYPE>{
+public:
+  testMixSseLlo():test<REALTYPE>(1/0.){
+  }
+
+};
+
+template<>
+class testMixSseLlo<double>:public test<double>{
+ public:
+
+  testMixSseLlo():test<double>(17){
+  }
+
+  std::string name(){
+    return std::string("testMixSseLlo");
+  }
+
+
+  double compute(){
+    const double a[]={1,2};
+    const double b[]={3,4};
+    const double c[]={5,6};
+#ifndef TEST_SSE
+    return a[0]+a[1]+b[0]+c[0]+c[1];
+#else
+    double res[2];
+    __m128d bi,ci,ri;
+    ri = _mm_loadu_pd(a);
+    bi = _mm_loadu_pd(b);
+    ri = _mm_add_sd(ri,bi);
+    ci = _mm_loadu_pd(c);
+    ri = _mm_add_pd(ri,ci);
+    _mm_storeu_pd(res,ri);
+    return res[0]+res[1];
+#endif
+  }
+};
+
+
+template<>
+class testMixSseLlo<float>:public test<float>{
+ public:
+
+  testMixSseLlo():test<float>(57){
+  }
+
+  std::string name(){
+    return std::string("testMixSseLlo");
+  }
+
+
+  float compute(){
+    const float a[]={1,2,3,4};//Sum 10
+    const float b[]={5,6,7,8};//Sum 5 beacause 6 7 8 will be ignored
+    const float c[]={9,10,11,12};//Sum 42
+#ifndef TEST_SSE
+    float res;
+    return a[0]+a[1]+ a[2]+a[3]+ b[0] + c[0]+c[1]+ c[2]+c[3];
+#else
+    float res[4];
+    __m128 bi,ci,ri;
+    ri = _mm_loadu_ps(a);
+    bi = _mm_loadu_ps(b);
+    ri=_mm_add_ss(ri,bi);
+    ci = _mm_loadu_ps(c);
+    ri=_mm_add_ps(ri,ci);
+    _mm_storeu_ps(res,ri);
+    return res[0]+res[1]+res[2]+res[3];
+#endif
+  }
+};
+
+template<class REALTYPE>
+class testMixSseLlom:public test<REALTYPE>{
+public:
+  testMixSseLlom():test<REALTYPE>(1/0.){
+  }
+
+};
+
+template<>
+class testMixSseLlom<double>:public test<double>{
+ public:
+
+  testMixSseLlom():test<double>(-17){
+  }
+
+  std::string name(){
+    return std::string("testMixSseLlo");
+  }
+
+
+  double compute(){
+    const double a[]={-1,-2};
+    const double b[]={-3,-4};
+    const double c[]={-5,-6};
+#ifndef TEST_SSE
+    return a[0]+a[1]+b[0]+c[0]+c[1];
+#else
+    double res[2];
+    __m128d bi,ci,ri;
+    ri = _mm_loadu_pd(a);
+    bi = _mm_loadu_pd(b);
+    ri = _mm_add_sd(ri,bi);
+    ci = _mm_loadu_pd(c);
+    ri = _mm_add_pd(ri,ci);
+    _mm_storeu_pd(res,ri);
+    return res[0]+res[1];
+#endif
+  }
+};
+
+
+
+template<>
+class testMixSseLlom<float>:public test<float>{
+ public:
+
+  testMixSseLlom():test<float>(-57){
+  }
+
+  std::string name(){
+    return std::string("testMixSseLlom");
+  }
+
+
+  float compute(){
+    const float a[]={-1,-2,-3,-4};//Sum 10
+    const float b[]={-5,-6,-7,-8};//Sum 5 beacause 6 7 8 will be ignored
+    const float c[]={-9,-10,-11,-12};//Sum 42
+#ifndef TEST_SSE
+    float res;
+    return a[0]+a[1]+ a[2]+a[3]+ b[0] + c[0]+c[1]+ c[2]+c[3];
+#else
+    float res[4];
+    __m128 bi,ci,ri;
+    ri = _mm_loadu_ps(a);
+    bi = _mm_loadu_ps(b);
+    ri=_mm_add_ss(ri,bi);
+    ci = _mm_loadu_ps(c);
+    ri=_mm_add_ps(ri,ci);
+    _mm_storeu_ps(res,ri);
+    return res[0]+res[1]+res[2]+res[3];
+#endif
+  }
+};
+
+
+
+
+template<class REALTYPE,class REALTYPEREF>
+class testCast:public test<REALTYPE,REALTYPEREF>{
+  //test cast
+public:
+  testCast():test<REALTYPE,REALTYPEREF>(0.1){}
+
+  std::string name(){
+    return std::string("testCast");
+  }
+
+  REALTYPE compute(){
+    REALTYPEREF ref=0.1;
+    return ((REALTYPE)ref);
+  }
+};
+
+template<class REALTYPE,class REALTYPEREF>
+class testCastm:public test<REALTYPE,REALTYPEREF>{
+  //test cast -
+public:
+  testCastm():test<REALTYPE,REALTYPEREF>(-0.1){}
+
+  std::string name(){
+    return std::string("testCastm");
+  }
+
+  REALTYPE compute(){
+    REALTYPEREF ref=-0.1;
+    return ((REALTYPE)ref);
+  }
+};
+
+
+
+int main(int argc, char** argv){
+  std::string roundingModeStr;
+  std::string env;
+
+  
+
+  if(argc==3){
+    env=argv[1];
+    roundingModeStr=argv[2];    
+  }else{
+    if(argc==2){
+      env=argv[1];
+      roundingModeStr=std::string("unknown");
+      roundingMode=-1;
+    }else{
+      usage(argv);
+      return EXIT_FAILURE;
+    }
+  }
+
+  //  std::cout << "env: "<<env<<std::endl;
+  //  std::cout << "roundingMode: "<<roundingModeStr<<std::endl;
+  
+  //Parse ENV
+
+  if(env==std::string("fenv")){
+    fenv=true;
+  }else{
+    if(env==std::string("valgrind")){
+      fenv=false;
+    }else{
+      usage(argv);
+      return EXIT_FAILURE;
+    }
+  }
+  
+  
+  //Parse ROUNDING_MODE
+  if(roundingModeStr==std::string("upward")) roundingMode=FE_UPWARD;
+  if(roundingModeStr==std::string("downward")) roundingMode=FE_DOWNWARD;
+  if(roundingModeStr==std::string("nearest")) roundingMode=FE_TONEAREST;
+  if(roundingModeStr==std::string("toward_zero")) roundingMode=FE_TOWARDZERO;
+  
+  if(roundingMode==-2){
+    usage(argv); 
+    return EXIT_FAILURE;
+  }
+  
+  {
+    typedef double RealType;
+    testInc0d1 <RealType> t1; t1.run();
+    testInc0d1m<RealType> t1m; t1m.run();
+    testIncSquare0d1<RealType> t2; t2.run();
+    testIncSquare0d1m<RealType> t2m; t2m.run();
+    testIncDiv10<RealType> t3; t3.run();
+    testIncDiv10m<RealType> t3m; t3m.run();
+    testInvariantProdDiv<RealType> t4; t4.run();
+    testInvariantProdDivm<RealType> t4m; t4m.run();
+    testFma<RealType> t5; t5.run();
+    testFmam<RealType> t5m; t5m.run();
+    testMixSseLlo<RealType> t6; t6.run();
+    testMixSseLlom<RealType> t6m; t6m.run();
+    testCast<RealType,double> t7; t7.run();
+    testCastm<RealType,double> t7m; t7m.run();
+  }
+  
+  {
+    typedef float RealType;
+    testInc0d1 <RealType> t1; t1.run();
+    testInc0d1m<RealType> t1m; t1m.run();
+    testIncSquare0d1<RealType> t2; t2.run();
+    testIncSquare0d1m<RealType> t2m; t2m.run();
+    testIncDiv10<RealType> t3; t3.run();
+    testIncDiv10m<RealType> t3m; t3m.run();  
+    testInvariantProdDiv<RealType> t4; t4.run();
+    testInvariantProdDivm<RealType> t4m; t4m.run();
+    testFma<RealType> t5; t5.run();
+    testFmam<RealType> t5m; t5m.run();
+    testMixSseLlo<RealType> t6; t6.run();
+    testMixSseLlom<RealType> t6m; t6m.run();
+    testCast<RealType,double> t7; t7.run();
+    testCastm<RealType,double> t7m; t7m.run();
+  }
+
+  /*    {
+    typedef long double RealType;
+    test1<RealType> t1; t1.run();
+    test2<RealType> t2; t2.run();
+    test3<RealType> t3; t3.run();
+    test4<RealType> t4; t4.run();
+    //test5<RealType> t5; t5.run();
+    }*/
+
+
+  return EXIT_SUCCESS;
+}
+
+
+
diff --git a/verrou/unitTest/checkRounding/makefile b/verrou/unitTest/checkRounding/makefile
new file mode 100644
index 0000000000000000000000000000000000000000..c1c61c57a293ad9581df0e27555718ce8b2a3bf5
--- /dev/null
+++ b/verrou/unitTest/checkRounding/makefile
@@ -0,0 +1,41 @@
+SOURCE=checkRounding.cxx
+EXEC1=checkRounding
+EXEC2=checkRoundingNative
+
+export INSTALLPATH = $(shell bash ../installpath)
+FLAGS += -I$(INSTALLPATH)/include/valgrind
+FLAGS += -Wall -Wno-unused
+
+FMA_FLAG=
+HAS_FMA = $(shell grep VERROU_COMPILED_WITH_FMA= $(INSTALLPATH)/env.sh)
+ifeq ($(HAS_FMA), export VERROU_COMPILED_WITH_FMA=yes)
+	FMA_FLAG=-DTEST_FMA
+endif
+ifeq ($(HAS_FMA), export VERROU_COMPILED_WITH_FMA=no)
+	FMA_FLAG=-DTEST_NOFMA
+endif
+
+include ../flag.mk
+
+FLAGS1 = $(FLAGS) $(DEBUGFLAGS)
+FLAGS2 = $(FLAGS) $(NATIVEFLAGS) -mfma $(FMA_FLAG) -DTEST_SSE
+
+.PHONY: run run1 run2
+
+
+run: run1 run2
+run1: $(EXEC1)
+	python3 runCheck.py $(EXEC1)
+
+run2: $(EXEC2)
+	python3 runCheck.py $(EXEC2)
+
+
+$(EXEC1): $(SOURCE)
+	$(CXX) $(FLAGS1) -o $(EXEC1) $(SOURCE)
+
+$(EXEC2): $(SOURCE)
+	$(CXX) $(FLAGS2) -o $(EXEC2) $(SOURCE)
+
+clean:
+	rm $(EXEC1) $(EXEC2)
diff --git a/verrou/unitTest/checkRounding/runCheck.py b/verrou/unitTest/checkRounding/runCheck.py
new file mode 100644
index 0000000000000000000000000000000000000000..97226b2080d26f5d334a536905ba76c5c990869f
--- /dev/null
+++ b/verrou/unitTest/checkRounding/runCheck.py
@@ -0,0 +1,579 @@
+#!/usr/bin/env python3
+
+import os
+import sys
+import subprocess as sp
+import shlex
+import re
+
+stdRounding=["nearest", "toward_zero", "downward", "upward" ]
+valgrindRounding=stdRounding + ["random", "average", "float", "farthest", "memcheck"]
+
+
+def printRes(res):
+    print("stdout:")
+    for line in res[0 ]:
+        print(line[0:-1])
+    print("cerr  :")
+    for line in res[1 ]:
+        print(line[0:-1])
+
+
+def runCmd(cmd,expectedResult=0, printCmd=True, printCwd=True):    
+    if printCmd:
+        print("Cmd:", cmd)
+
+    if printCwd:
+        print("Cwd:", os.getcwd())
+    #lancement de la commande
+    
+    process=sp.Popen(args=shlex.split(cmd),
+                     stdout=sp.PIPE,
+                     stderr=sp.PIPE)
+
+    (resStdStr, resErrStr)=process.communicate()
+
+    resStd=resStdStr.decode('utf8').splitlines()
+    resErr=resErrStr.decode('utf8').splitlines()
+     
+    error=process.wait()
+
+    #Traitement des erreurs
+    if error !=expectedResult: 
+        msg = "Error with the execution of : " +cmd+"\n"
+        msg+= "\t error is " +str(error) +"\n"
+        msg+= "\t expectedResult is " +str(expectedResult)        
+        print(msg)
+        printRes((resStd, resErr))
+
+        
+        sys.exit(42)
+    return (resStd, resErr)
+
+
+        
+
+class cmdPrepare:
+    def __init__(self, arg):
+        self.valgrindPath=os.path.join(os.environ["INSTALLPATH"], "bin", "valgrind")
+        self.execPath=arg
+    
+    def run(self,env="fenv", rounding="nearest"):
+        self.checkRounding(env, rounding)
+        cmd=None
+        if env=="fenv":
+            cmd=self.execPath + " fenv "+ rounding
+            
+        if env=="valgrind":
+            if rounding=="memcheck":
+                cmd=self.valgrindPath + " --tool=memcheck " +self.execPath +" valgrind"
+            else:
+                cmd=self.valgrindPath + " --tool=verrou --vr-verbose=no --rounding-mode=" + rounding+ " " +self.execPath +" valgrind"
+
+        return runCmd(cmd)
+        # print cmd
+            
+    def checkRounding(self, env, rounding):
+        
+        if env=="fenv" and rounding in stdRounding:
+            return True
+        if env=="valgrind" and rounding in valgrindRounding:
+            return True
+        print("Failure in checkRounding")
+        sys.exit(-1)
+
+
+
+def generatePairOfAvailableComputation():
+    res=[]
+    for i in stdRounding:
+        res+=[("fenv", i)]
+    for i in valgrindRounding:
+        res+=[("valgrind", i)]
+    return res
+        
+
+def verrouCerrFilter(res):
+    pidStr=(res[0].split())[0]
+    # pidStr="==2958=="
+    newRes=[]
+    for line in res:
+        newLine=line.replace(pidStr, "")
+        if newLine.startswith(" Backend verrou simulating ") and newLine.endswith(" rounding mode"):
+            continue
+        if newLine.startswith(" First seed : "):
+            continue
+        newRes+=[newLine]
+    return newRes
+
+def getDiff(outPut, testName):
+    for line in outPut[0]:
+        if line.startswith(testName+":"):
+            return float(line.split()[-1])
+    print("unknown testName: ", testName)    
+    return None
+
+
+
+        
+class errorCounter:
+
+    def __init__(self,ok=0,ko=0,warn=0):
+        self.ok=ok
+        self.ko=ko
+        self.warn=warn
+
+
+    def incOK(self, v):
+        self.ok+=v
+    def incKO(self, v):
+        self.ko+=v
+    def incWarn(self, v):
+        self.warn+=v
+    def add(self, tupleV):
+        self.ok  =tupleV[0]
+        self.ko  =tupleV[1]
+        self.warn=tupleV[2]
+    def __add__(self, v):
+        self.ok  += v.ok
+        self.ko  += v.ko
+        self.warn+= v.warn
+        return self
+    def printSummary(self):
+        print("error summary")
+        print("\tOK : "+str(self.ok))
+        print("\tKO : "+str(self.ko))
+        print("\tWarning : "+str(self.warn))
+        
+def checkVerrouInvariant(allResult):
+    ref=allResult[("valgrind", "nearest")][1]
+    ko=0
+    ok=0
+    for rounding in valgrindRounding:        
+        if rounding in ["nearest", "memcheck"]:
+            #nearest : because it is the ref
+            #memcheck : because it is not verrou
+            continue
+        (cout, cerr)=allResult[("valgrind", rounding)]
+        if cerr!=ref:
+            for i in range(len(ref)):
+                if cerr[i]!=ref[i]:
+                    print("cerr:", cerr[i])
+                    print("ref:", ref[i])
+            print("KO : incoherent number of operation ("+rounding+")")
+            ko+=1
+        else:
+            print("OK : coherent number of operation ("+rounding+")")
+            ok+=1
+    return errorCounter(ok, ko, 0)
+
+def diffRes(res1, res2):    
+    if len(res1)!=len(res2):
+        print("Wrong number of line")
+        print("fenv", res1)
+        print("val", res2)
+        sys.exit(-1)
+    else:
+        acc=0
+        for i in range(len(res1)):
+            line1=res1[i]
+            line2=res2[i]
+            if  line1 !=line2:
+                print("\tfenv: "+line1.strip())
+                print("\tfval: "+line2.strip()+"\n")
+                acc+=1
+        return acc
+
+def checkRoundingInvariant(allResult):
+    ok=0
+    ko=0
+    for rounding in stdRounding:
+        fenvRes=(allResult["fenv", rounding])[0]
+        valRes=(allResult["valgrind", rounding])[0]
+        if fenvRes!=valRes:
+            print("KO : incoherent comparison between fenv and valgrind ("+rounding+")")            
+            ko+=diffRes(fenvRes, valRes)
+        else:
+            ok+=1
+            print("OK : coherent comparison between fenv and valgrind ("+rounding+")")
+    return errorCounter(ok, ko, 0)
+
+
+
+# def checkOrder(testName, *args):
+#     tabValue=[x[0] for x in args]
+#     nameValue=[x[0] for x in args]
+
+#     for i in range(len(tabValue)-1):
+
+class assertRounding:
+    def __init__(self, testName):
+        self.testName=testName
+        self.diff_nearestMemcheck=getDiff(allResult[("valgrind", "memcheck")], testName)
+        self.diff_nearestNative=getDiff(allResult[("fenv", "nearest")], testName)
+        self.diff_toward_zeroNative  =getDiff(allResult[("fenv", "toward_zero")], testName)
+        self.diff_downwardNative     =getDiff(allResult[("fenv", "downward")], testName)
+        self.diff_upwardNative       =getDiff(allResult[("fenv", "upward")], testName)
+        
+        self.diff_nearest      =getDiff(allResult[("valgrind", "nearest")], testName)
+        self.diff_toward_zero  =getDiff(allResult[("valgrind", "toward_zero")], testName)
+        self.diff_downward     =getDiff(allResult[("valgrind", "downward")], testName)
+        self.diff_upward       =getDiff(allResult[("valgrind", "upward")], testName)
+        self.diff_float        =getDiff(allResult[("valgrind", "float")], testName)
+        self.diff_farthest     =getDiff(allResult[("valgrind", "farthest")], testName)
+
+
+        self.diff_random       =getDiff(allResult[("valgrind", "random")], testName)      
+        self.diff_average      =getDiff(allResult[("valgrind", "average")], testName)     
+
+        self.KoStr="Warning"
+        self.warnBool=True
+        self.ok=0
+        self.warn=0
+        self.ko=0
+
+        self.assertEqual("nearestNative", "nearestMemcheck")
+        if self.ok!=0:
+            self.KoStr="KO"
+            self.warnBool=False
+
+    def getValue(self, str1):
+        return eval("self.diff_"+str1)
+
+    def printKo(self, str):
+        print(self.KoStr+" : "+self.testName+ " "+str)
+        if self.warnBool:
+            self.warn+=1
+        else:
+            self.ko+=1
+            
+    def printOk(self, str):
+        print("OK : "+self.testName+ " "+str)
+        self.ok+=1
+
+    def assertEqValue(self, str1, value):
+        value1=eval("self.diff_"+str1)
+        value2=value
+        if value1!= value2:
+            self.printKo(str1+ "!=" +str(value2) + " "+str(value1))
+        else:
+            self.printOk(str1+ "="+str(value))
+
+        
+    def assertEqual(self, str1, str2):
+        value1= eval("self.diff_"+str1)
+        value2= eval("self.diff_"+str2)            
+
+        if value1!= value2:
+            self.printKo(str1+ "!="+str2 + " "+str(value1) + " " +str(value2))
+        else:
+            self.printOk(str1+ "="+str2)
+
+    def assertLeq(self, str1, str2):
+        value1= eval("self.diff_"+str1)
+        value2= eval("self.diff_"+str2)            
+
+        if value1 <= value2:
+            self.printOk(str1+ "<="+str2)
+        else:
+            self.printKo(str1+ ">"+str2 + " "+str(value1) + " " +str(value2))
+
+        
+    def assertLess(self, str1, str2):
+        value1= eval("self.diff_"+str1)
+        value2= eval("self.diff_"+str2)            
+
+        if value1 < value2:
+            self.printOk(str1+ "<"+str2)
+        else:
+            self.printKo(str1+ ">="+str2 +  " "+str(value1) + " " +str(value2))
+
+
+    def assertAbsLess(self, str1, str2):
+        value1= abs(eval("self.diff_"+str1))
+        value2= abs(eval("self.diff_"+str2))
+
+        if value1 < value2:
+            self.printOk("|"+str1+ "| < |"+str2+"|")
+        else:
+            self.printKo("|"+str1+ "| >= |"+str2+"|" +  " "+str(value1) + " " +str(value2))
+
+
+    def assertNative(self):
+        for rd in ["nearest", "toward_zero", "downward", "upward"]:
+            self.assertEqual(rd, rd+"Native")
+
+
+    
+    
+def checkTestPositiveAndOptimistRandomVerrou(allResult,testList,typeTab=["<double>", "<float>"]):
+    ok=0
+    warn=0
+    ko=0
+    for test in testList:
+        for RealType in typeTab:
+            testName=test+RealType
+
+            testCheck=assertRounding(testName)
+            testCheck.assertNative()
+            testCheck.assertEqual("toward_zero", "downward")
+            testCheck.assertLess("downward", "upward")
+            testCheck.assertLeq("downward", "nearest")
+            testCheck.assertLeq("downward", "farthest")
+            testCheck.assertLeq("farthest", "upward")
+            testCheck.assertLeq("nearest", "upward")
+
+            testCheck.assertLess("downward", "random")
+            testCheck.assertLess("downward", "average")
+
+            testCheck.assertLess("random", "upward")
+            testCheck.assertLess("average", "upward")
+
+            testCheck.assertAbsLess("average", "random")
+            testCheck.assertAbsLess("average", "upward")
+            testCheck.assertAbsLess("average", "downward")
+            testCheck.assertAbsLess("average", "nearest")
+
+            ok+=testCheck.ok
+            ko+=testCheck.ko
+            warn+=testCheck.warn
+
+    return errorCounter(ok, ko, warn)
+
+
+def checkFloat(allResult, testList):
+    ok=0
+    warn=0
+    ko=0
+    for test in testList:
+        testCheckFloat=assertRounding(test+"<float>")
+        testCheckDouble=assertRounding(test+"<double>")
+        testCheckFloat.assertEqual("nearest", "float")
+        testCheckDouble.assertEqValue("float", testCheckFloat.getValue("nearest"))
+        ok+=testCheckFloat.ok
+        ko+=testCheckFloat.ko
+        warn+=testCheckFloat.warn
+        ok+=testCheckDouble.ok
+        ko+=testCheckDouble.ko
+        warn+=testCheckDouble.warn
+    return errorCounter(ok, ko, warn)
+
+def checkTestNegativeAndOptimistRandomVerrou(allResult,testList,typeTab=["<double>", "<float>"]):
+    ok=0
+    warn=0
+    ko=0
+    for test in testList:
+        for RealType in typeTab:
+            testName=test+RealType
+
+            testCheck=assertRounding(testName)
+            testCheck.assertNative()
+            testCheck.assertEqual("toward_zero", "upward")
+            testCheck.assertLess("downward", "upward")
+            testCheck.assertLeq("downward", "nearest")
+            testCheck.assertLeq("nearest", "upward")
+
+            testCheck.assertLeq("downward", "farthest")
+            testCheck.assertLeq("farthest", "upward")
+
+            testCheck.assertLess("downward", "random")
+            testCheck.assertLess("downward", "average")
+            
+            testCheck.assertLess("random", "upward")
+            testCheck.assertLess("average", "upward")
+
+            testCheck.assertAbsLess("average", "random")
+            testCheck.assertAbsLess("average", "upward")
+            testCheck.assertAbsLess("average", "downward")
+            testCheck.assertAbsLess("average", "nearest")
+
+            ok+=testCheck.ok
+            ko+=testCheck.ko
+            warn+=testCheck.warn
+
+    return errorCounter(ok, ko, warn)
+
+def checkTestPositive(allResult,testList, typeTab=["<double>", "<float>"]):
+    ok=0
+    warn=0
+    ko=0
+    for test in testList:
+       for RealType in typeTab:
+        testName=test+RealType
+        testCheck=assertRounding(testName)
+        testCheck.assertNative()
+        testCheck.assertEqual("toward_zero", "downward")
+        testCheck.assertLess("downward", "upward")
+        testCheck.assertLeq("downward", "nearest")
+        testCheck.assertLeq("nearest", "upward")
+
+        testCheck.assertLeq("downward", "farthest")
+        testCheck.assertLeq("farthest", "upward")
+
+        testCheck.assertLess("downward", "random")
+        testCheck.assertLess("downward", "average")
+
+        testCheck.assertLess("random", "upward")
+        testCheck.assertLess("average", "upward")
+
+        ok+=testCheck.ok
+        ko+=testCheck.ko
+        warn+=testCheck.warn
+        
+    return errorCounter(ok, ko, warn)
+        
+def checkTestNegative(allResult,testList,typeTab=["<double>", "<float>"]):
+    ok=0
+    warn=0
+    ko=0
+    for test in testList:
+        for RealType in typeTab:
+            testName=test+RealType
+
+            testCheck=assertRounding(testName)
+            testCheck.assertNative()
+            testCheck.assertEqual("toward_zero", "upward")
+            testCheck.assertLess("downward", "upward")
+            testCheck.assertLeq("downward", "nearest")
+            testCheck.assertLeq("nearest", "upward")
+
+            testCheck.assertLeq("downward", "farthest")
+            testCheck.assertLeq("farthest", "upward")
+
+            testCheck.assertLess("downward", "random")
+            testCheck.assertLess("downward", "average")
+
+            testCheck.assertLess("random", "upward")
+            testCheck.assertLess("average", "upward")
+
+            ok+=testCheck.ok
+            ko+=testCheck.ko
+            warn+=testCheck.warn
+
+    return errorCounter(ok, ko, warn)
+
+def checkTestPositiveBetweenTwoValues(allResult,testList, typeTab=["<double>", "<float>"]):
+    ok=0
+    warn=0
+    ko=0
+    for test in testList:
+       for RealType in typeTab:
+        testName=test+RealType
+        testCheck=assertRounding(testName)
+        testCheck.assertNative()
+        testCheck.assertEqual("toward_zero", "downward")
+        testCheck.assertLess("downward", "upward")
+        testCheck.assertLeq("downward", "nearest")
+        testCheck.assertLeq("nearest", "upward")
+
+        testCheck.assertLeq("downward", "farthest")
+        testCheck.assertLeq("farthest", "upward")
+
+        testCheck.assertLeq("downward", "random")
+        testCheck.assertLeq("downward", "average")
+
+        testCheck.assertLeq("random", "upward")
+        testCheck.assertLeq("average", "upward")
+
+        ok+=testCheck.ok
+        ko+=testCheck.ko
+        warn+=testCheck.warn
+
+    return errorCounter(ok, ko, warn)
+
+def checkTestNegativeBetweenTwoValues(allResult,testList, typeTab=["<double>", "<float>"]):
+    ok=0
+    warn=0
+    ko=0
+    for test in testList:
+       for RealType in typeTab:
+        testName=test+RealType
+        testCheck=assertRounding(testName)
+        testCheck.assertNative()
+        testCheck.assertEqual("toward_zero", "upward")
+        testCheck.assertLess("downward", "upward")
+        testCheck.assertLeq("downward", "nearest")
+        testCheck.assertLeq("nearest", "upward")
+
+        testCheck.assertLeq("downward", "farthest")
+        testCheck.assertLeq("farthest", "upward")
+
+        testCheck.assertLeq("downward", "random")
+        testCheck.assertLeq("downward", "average")
+
+        testCheck.assertLeq("random", "upward")
+        testCheck.assertLeq("average", "upward")
+
+        ok+=testCheck.ok
+        ko+=testCheck.ko
+        warn+=testCheck.warn
+
+    return errorCounter(ok, ko, warn)
+
+
+
+
+def checkExact(allResult,testList,typeTab=["<double>", "<float>"]):
+    ok=0
+    warn=0
+    ko=0
+    for test in testList:
+        for RealType in typeTab:
+            testName=test+RealType
+
+            testCheck=assertRounding(testName)
+            testCheck.assertNative()
+            testCheck.assertEqual("toward_zero", "downward")
+            testCheck.assertEqual("downward", "upward")
+            testCheck.assertEqual("upward", "nearest")
+            testCheck.assertEqual("nearest", "upward")
+
+            testCheck.assertEqual("nearest", "farthest")
+
+            testCheck.assertEqual("downward", "random")
+            testCheck.assertEqual("downward", "average")
+
+            ok+=testCheck.ok
+            ko+=testCheck.ko
+            warn+=testCheck.warn
+
+    return errorCounter(ok, ko, warn)
+
+
+
+if __name__=='__main__':
+    cmdHandler=cmdPrepare(os.path.join(os.curdir,sys.argv[1]))
+    allResult={}
+    for (env, rounding) in generatePairOfAvailableComputation():
+        (cout, cerr)=cmdHandler.run(env, rounding)
+        if env=="valgrind":
+            allResult[(env, rounding)]=(cout, verrouCerrFilter(cerr))
+        else:
+            allResult[(env, rounding)]=(cout, cerr)
+            
+    # printRes(allResult[("fenv" ,"toward_zero")])
+    # printRes(allResult[("valgrind" ,"toward_zero")])
+    typeTab=["<double>", "<float>"]#,"<long double>"]
+
+    eCount=errorCounter()
+
+    eCount+=checkVerrouInvariant(allResult)
+
+
+    eCount+=checkTestPositiveAndOptimistRandomVerrou(allResult, testList=["testInc0d1", "testIncSquare0d1", "testIncDiv10"], typeTab=typeTab)
+    eCount+=checkTestNegativeAndOptimistRandomVerrou(allResult, testList=["testInc0d1m", "testIncSquare0d1m", "testIncDiv10m"], typeTab=typeTab)
+    eCount+=checkTestPositive(allResult, testList=["testInvariantProdDiv"], typeTab=typeTab)
+    eCount+=checkTestNegative(allResult, testList=["testInvariantProdDivm"], typeTab=typeTab)
+    eCount+=checkTestPositiveAndOptimistRandomVerrou(allResult, testList=["testFma"], typeTab=["<double>", "<float>"])
+    eCount+=checkTestNegativeAndOptimistRandomVerrou(allResult, testList=["testFmam"], typeTab=["<double>", "<float>"])
+        
+    eCount+=checkExact(allResult, testList=["testMixSseLlo"], typeTab=["<double>", "<float>"])
+
+    eCount+=checkExact(allResult, testList=["testCast", "testCastm"], typeTab=["<double>"])
+    eCount+=checkTestPositiveBetweenTwoValues(allResult, testList=["testCast"], typeTab=["<float>"])
+    eCount+=checkTestNegativeBetweenTwoValues(allResult, testList=["testCastm"], typeTab=["<float>"])
+
+    eCount+=checkFloat(allResult, ["testInc0d1", "testIncSquare0d1", "testIncDiv10", "testInc0d1m", "testIncSquare0d1m", "testIncDiv10m", "testFma", "testFmam", "testMixSseLlo"])
+    eCount.printSummary()
+    sys.exit(eCount.ko)
+    #[0]
diff --git a/verrou/unitTest/checkUCB-vecto/README b/verrou/unitTest/checkUCB-vecto/README
new file mode 100644
index 0000000000000000000000000000000000000000..fed2603684e500237822cb72ed5dd0a0a8a958fd
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/README
@@ -0,0 +1,4 @@
+
+The input data come from ucb : http://www.netlib.org/fp/ucbtest.tgz
+
+
diff --git a/verrou/unitTest/checkUCB-vecto/inputData/addd.input b/verrou/unitTest/checkUCB-vecto/inputData/addd.input
new file mode 100644
index 0000000000000000000000000000000000000000..b727d173dde98016d26fc20d785f185d297367e8
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/inputData/addd.input
@@ -0,0 +1,1432 @@
+/* Copyright (C) 1988-1994 Sun Microsystems, Inc. 2550 Garcia Avenue */
+/* Mountain View, California  94043 All rights reserved. */
+/*  */
+/* Any person is hereby authorized to download, copy, use, create bug fixes,  */
+/* and distribute, subject to the following conditions: */
+/*  */
+/* 	1.  the software may not be redistributed for a fee except as */
+/* 	    reasonable to cover media costs; */
+/* 	2.  any copy of the software must include this notice, as well as  */
+/* 	    any other embedded copyright notices; and  */
+/* 	3.  any distribution of this software or derivative works thereof  */
+/* 	    must comply with all applicable U.S. export control laws. */
+/*  */
+/* THE SOFTWARE IS MADE AVAILABLE "AS IS" AND WITHOUT EXPRESS OR IMPLIED */
+/* WARRANTY OF ANY KIND, INCLUDING BUT NOT LIMITED TO THE IMPLIED */
+/* WARRANTIES OF DESIGN, MERCHANTIBILITY, FITNESS FOR A PARTICULAR */
+/* PURPOSE, NON-INFRINGEMENT, PERFORMANCE OR CONFORMANCE TO */
+/* SPECIFICATIONS.   */
+/*  */
+/* BY DOWNLOADING AND/OR USING THIS SOFTWARE, THE USER WAIVES ALL CLAIMS */
+/* AGAINST SUN MICROSYSTEMS, INC. AND ITS AFFILIATED COMPANIES IN ANY */
+/* JURISDICTION, INCLUDING BUT NOT LIMITED TO CLAIMS FOR DAMAGES OR */
+/* EQUITABLE RELIEF BASED ON LOSS OF DATA, AND SPECIFICALLY WAIVES EVEN */
+/* UNKNOWN OR UNANTICIPATED CLAIMS OR LOSSES, PRESENT AND FUTURE. */
+/*  */
+/* IN NO EVENT WILL SUN MICROSYSTEMS, INC. OR ANY OF ITS AFFILIATED */
+/* COMPANIES BE LIABLE FOR ANY LOST REVENUE OR PROFITS OR OTHER SPECIAL, */
+/* INDIRECT AND CONSEQUENTIAL DAMAGES, EVEN IF IT HAS BEEN ADVISED OF THE */
+/* POSSIBILITY OF SUCH DAMAGES. */
+/*  */
+/* This file is provided with no support and without any obligation on the */
+/* part of Sun Microsystems, Inc. ("Sun") or any of its affiliated */
+/* companies to assist in its use, correction, modification or */
+/* enhancement.  Nevertheless, and without creating any obligation on its */
+/* part, Sun welcomes your comments concerning the software and requests */
+/* that they be sent to fdlibm-comments@sunpro.sun.com. */
+
+addd p eq - 3ff00000 00000000 3ff00000 00000000 40000000 00000000
+addd z eq - 3ff00000 00000000 3ff00000 00000000 40000000 00000000
+addd n eq - 3ff00000 00000000 40000000 00000000 40080000 00000000
+addd m eq - 3ff00000 00000000 40000000 00000000 40080000 00000000
+addd p eq - 3ff00000 00000000 40000000 00000000 40080000 00000000
+addd z eq - 3ff00000 00000000 40000000 00000000 40080000 00000000
+addd n eq - 40000000 00000000 3ff00000 00000000 40080000 00000000
+addd m eq - 40000000 00000000 3ff00000 00000000 40080000 00000000
+addd p eq - 40000000 00000000 3ff00000 00000000 40080000 00000000
+addd z eq - 40000000 00000000 3ff00000 00000000 40080000 00000000
+addd n eq - 40000000 00000000 40000000 00000000 40100000 00000000
+addd m eq - 40000000 00000000 40000000 00000000 40100000 00000000
+addd p eq - 40000000 00000000 40000000 00000000 40100000 00000000
+addd z eq - 40000000 00000000 40000000 00000000 40100000 00000000
+addd n eq - 40000000 00000000 c0000000 00000000 00000000 00000000
+addd z eq - 40000000 00000000 c0000000 00000000 00000000 00000000
+addd p eq - 40000000 00000000 c0000000 00000000 00000000 00000000
+addd m eq - 40000000 00000000 c0000000 00000000 80000000 00000000
+addd n eq - 40140000 00000000 c0140000 00000000 00000000 00000000
+addd z eq - 40140000 00000000 c0140000 00000000 00000000 00000000
+addd p eq - 40140000 00000000 c0140000 00000000 00000000 00000000
+addd m eq - 40140000 00000000 c0140000 00000000 80000000 00000000
+addd n eq - 3ff00000 00000000 401c0000 00000000 40200000 00000000
+addd m eq - 3ff00000 00000000 401c0000 00000000 40200000 00000000
+addd p eq - 3ff00000 00000000 401c0000 00000000 40200000 00000000
+addd z eq - 3ff00000 00000000 401c0000 00000000 40200000 00000000
+addd n eq - 40140000 00000000 bff00000 00000000 40100000 00000000
+addd m eq - 40140000 00000000 bff00000 00000000 40100000 00000000
+addd p eq - 40140000 00000000 bff00000 00000000 40100000 00000000
+addd z eq - 40140000 00000000 bff00000 00000000 40100000 00000000
+addd n eq - 40000000 00000000 c0140000 00000000 c0080000 00000000
+addd m eq - 40000000 00000000 c0140000 00000000 c0080000 00000000
+addd p eq - 40000000 00000000 c0140000 00000000 c0080000 00000000
+addd z eq - 40000000 00000000 c0140000 00000000 c0080000 00000000
+addd n eq - 40140000 00000000 80000000 00000000 40140000 00000000
+addd m eq - 40140000 00000000 80000000 00000000 40140000 00000000
+addd p eq - 40140000 00000000 80000000 00000000 40140000 00000000
+addd z eq - 40140000 00000000 80000000 00000000 40140000 00000000
+addd n eq - 40140000 00000000 00000000 00000000 40140000 00000000
+addd m eq - 40140000 00000000 00000000 00000000 40140000 00000000
+addd p eq - 40140000 00000000 00000000 00000000 40140000 00000000
+addd z eq - 40140000 00000000 00000000 00000000 40140000 00000000
+addd n eq - 7ff00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd m eq - 7ff00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd p eq - 7ff00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd z eq - 7ff00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd n eq - fff00000 00000000 fff00000 00000000 fff00000 00000000
+addd m eq - fff00000 00000000 fff00000 00000000 fff00000 00000000
+addd p eq - fff00000 00000000 fff00000 00000000 fff00000 00000000
+addd z eq - fff00000 00000000 fff00000 00000000 fff00000 00000000
+addd n uo v fff00000 00000000 7ff00000 00000000 7fffe000 00000000
+addd m uo v fff00000 00000000 7ff00000 00000000 7fffe000 00000000
+addd p uo v fff00000 00000000 7ff00000 00000000 7fffe000 00000000
+addd z uo v fff00000 00000000 7ff00000 00000000 7fffe000 00000000
+addd n uo v 7ff00000 00000000 fff00000 00000000 7fffe000 00000000
+addd m uo v 7ff00000 00000000 fff00000 00000000 7fffe000 00000000
+addd p uo v 7ff00000 00000000 fff00000 00000000 7fffe000 00000000
+addd z uo v 7ff00000 00000000 fff00000 00000000 7fffe000 00000000
+addd n eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+addd m eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+addd p eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+addd z eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+addd n eq - 7ff00000 00000000 ffe00000 00000000 7ff00000 00000000
+addd m eq - 7ff00000 00000000 ffe00000 00000000 7ff00000 00000000
+addd p eq - 7ff00000 00000000 ffe00000 00000000 7ff00000 00000000
+addd z eq - 7ff00000 00000000 ffe00000 00000000 7ff00000 00000000
+addd n eq - fff00000 00000000 7fe00000 00000000 fff00000 00000000
+addd m eq - fff00000 00000000 7fe00000 00000000 fff00000 00000000
+addd p eq - fff00000 00000000 7fe00000 00000000 fff00000 00000000
+addd z eq - fff00000 00000000 7fe00000 00000000 fff00000 00000000
+addd n eq - fff00000 00000000 ffe00000 00000000 fff00000 00000000
+addd m eq - fff00000 00000000 ffe00000 00000000 fff00000 00000000
+addd p eq - fff00000 00000000 ffe00000 00000000 fff00000 00000000
+addd z eq - fff00000 00000000 ffe00000 00000000 fff00000 00000000
+addd n eq - 7fe00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd m eq - 7fe00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd p eq - 7fe00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd z eq - 7fe00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd n eq - 7fe00000 00000000 fff00000 00000000 fff00000 00000000
+addd m eq - 7fe00000 00000000 fff00000 00000000 fff00000 00000000
+addd p eq - 7fe00000 00000000 fff00000 00000000 fff00000 00000000
+addd z eq - 7fe00000 00000000 fff00000 00000000 fff00000 00000000
+addd n eq - ffe00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd m eq - ffe00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd p eq - ffe00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd z eq - ffe00000 00000000 7ff00000 00000000 7ff00000 00000000
+addd n eq - ffe00000 00000000 fff00000 00000000 fff00000 00000000
+addd m eq - ffe00000 00000000 fff00000 00000000 fff00000 00000000
+addd p eq - ffe00000 00000000 fff00000 00000000 fff00000 00000000
+addd z eq - ffe00000 00000000 fff00000 00000000 fff00000 00000000
+addd n eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+addd m eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+addd p eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+addd z eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+addd n eq - 7ff00000 00000000 80000000 00000000 7ff00000 00000000
+addd m eq - 7ff00000 00000000 80000000 00000000 7ff00000 00000000
+addd p eq - 7ff00000 00000000 80000000 00000000 7ff00000 00000000
+addd z eq - 7ff00000 00000000 80000000 00000000 7ff00000 00000000
+addd n eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+addd m eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+addd p eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+addd z eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+addd n eq - fff00000 00000000 80000000 00000000 fff00000 00000000
+addd m eq - fff00000 00000000 80000000 00000000 fff00000 00000000
+addd p eq - fff00000 00000000 80000000 00000000 fff00000 00000000
+addd z eq - fff00000 00000000 80000000 00000000 fff00000 00000000
+addd n eq - 00000000 00000000 7ff00000 00000000 7ff00000 00000000
+addd m eq - 00000000 00000000 7ff00000 00000000 7ff00000 00000000
+addd p eq - 00000000 00000000 7ff00000 00000000 7ff00000 00000000
+addd z eq - 00000000 00000000 7ff00000 00000000 7ff00000 00000000
+addd n eq - 80000000 00000000 7ff00000 00000000 7ff00000 00000000
+addd m eq - 80000000 00000000 7ff00000 00000000 7ff00000 00000000
+addd p eq - 80000000 00000000 7ff00000 00000000 7ff00000 00000000
+addd z eq - 80000000 00000000 7ff00000 00000000 7ff00000 00000000
+addd n eq - 00000000 00000000 fff00000 00000000 fff00000 00000000
+addd m eq - 00000000 00000000 fff00000 00000000 fff00000 00000000
+addd p eq - 00000000 00000000 fff00000 00000000 fff00000 00000000
+addd z eq - 00000000 00000000 fff00000 00000000 fff00000 00000000
+addd n eq - 80000000 00000000 fff00000 00000000 fff00000 00000000
+addd m eq - 80000000 00000000 fff00000 00000000 fff00000 00000000
+addd p eq - 80000000 00000000 fff00000 00000000 fff00000 00000000
+addd z eq - 80000000 00000000 fff00000 00000000 fff00000 00000000
+addd n eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+addd m eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+addd p eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+addd z eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+addd n eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+addd m eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+addd p eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+addd z eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+addd n eq - 7ff00000 00000000 800fffff ffffffff 7ff00000 00000000
+addd m eq - 7ff00000 00000000 800fffff ffffffff 7ff00000 00000000
+addd p eq - 7ff00000 00000000 800fffff ffffffff 7ff00000 00000000
+addd z eq - 7ff00000 00000000 800fffff ffffffff 7ff00000 00000000
+addd n eq - fff00000 00000000 800fffff ffffffff fff00000 00000000
+addd m eq - fff00000 00000000 800fffff ffffffff fff00000 00000000
+addd p eq - fff00000 00000000 800fffff ffffffff fff00000 00000000
+addd z eq - fff00000 00000000 800fffff ffffffff fff00000 00000000
+addd n eq - 00000000 00000003 7ff00000 00000000 7ff00000 00000000
+addd m eq - 00000000 00000003 7ff00000 00000000 7ff00000 00000000
+addd p eq - 00000000 00000003 7ff00000 00000000 7ff00000 00000000
+addd z eq - 00000000 00000003 7ff00000 00000000 7ff00000 00000000
+addd n eq - 00000000 00000003 fff00000 00000000 fff00000 00000000
+addd m eq - 00000000 00000003 fff00000 00000000 fff00000 00000000
+addd p eq - 00000000 00000003 fff00000 00000000 fff00000 00000000
+addd z eq - 00000000 00000003 fff00000 00000000 fff00000 00000000
+addd n eq - 80000000 00000003 7ff00000 00000000 7ff00000 00000000
+addd m eq - 80000000 00000003 7ff00000 00000000 7ff00000 00000000
+addd p eq - 80000000 00000003 7ff00000 00000000 7ff00000 00000000
+addd z eq - 80000000 00000003 7ff00000 00000000 7ff00000 00000000
+addd n eq - 80000000 00000003 fff00000 00000000 fff00000 00000000
+addd m eq - 80000000 00000003 fff00000 00000000 fff00000 00000000
+addd p eq - 80000000 00000003 fff00000 00000000 fff00000 00000000
+addd z eq - 80000000 00000003 fff00000 00000000 fff00000 00000000
+addd n eq - 00000000 00000000 7fe00000 00000000 7fe00000 00000000
+addd m eq - 00000000 00000000 7fe00000 00000000 7fe00000 00000000
+addd p eq - 00000000 00000000 7fe00000 00000000 7fe00000 00000000
+addd z eq - 00000000 00000000 7fe00000 00000000 7fe00000 00000000
+addd n eq - 80000000 00000000 7fe00000 00000000 7fe00000 00000000
+addd m eq - 80000000 00000000 7fe00000 00000000 7fe00000 00000000
+addd p eq - 80000000 00000000 7fe00000 00000000 7fe00000 00000000
+addd z eq - 80000000 00000000 7fe00000 00000000 7fe00000 00000000
+addd n eq - ffe00000 00000000 00000000 00000000 ffe00000 00000000
+addd m eq - ffe00000 00000000 00000000 00000000 ffe00000 00000000
+addd p eq - ffe00000 00000000 00000000 00000000 ffe00000 00000000
+addd z eq - ffe00000 00000000 00000000 00000000 ffe00000 00000000
+addd n eq - ffe00000 00000000 80000000 00000000 ffe00000 00000000
+addd m eq - ffe00000 00000000 80000000 00000000 ffe00000 00000000
+addd p eq - ffe00000 00000000 80000000 00000000 ffe00000 00000000
+addd z eq - ffe00000 00000000 80000000 00000000 ffe00000 00000000
+addd n eq - 3ff00000 00000000 80000000 00000000 3ff00000 00000000
+addd m eq - 3ff00000 00000000 80000000 00000000 3ff00000 00000000
+addd p eq - 3ff00000 00000000 80000000 00000000 3ff00000 00000000
+addd z eq - 3ff00000 00000000 80000000 00000000 3ff00000 00000000
+addd n eq - bff00000 00000000 80000000 00000000 bff00000 00000000
+addd m eq - bff00000 00000000 80000000 00000000 bff00000 00000000
+addd p eq - bff00000 00000000 80000000 00000000 bff00000 00000000
+addd z eq - bff00000 00000000 80000000 00000000 bff00000 00000000
+addd n eq - 00000000 00000000 3ff00000 00000000 3ff00000 00000000
+addd m eq - 00000000 00000000 3ff00000 00000000 3ff00000 00000000
+addd p eq - 00000000 00000000 3ff00000 00000000 3ff00000 00000000
+addd z eq - 00000000 00000000 3ff00000 00000000 3ff00000 00000000
+addd n eq - 80000000 00000000 bff00000 00000000 bff00000 00000000
+addd m eq - 80000000 00000000 bff00000 00000000 bff00000 00000000
+addd p eq - 80000000 00000000 bff00000 00000000 bff00000 00000000
+addd z eq - 80000000 00000000 bff00000 00000000 bff00000 00000000
+addd n eq - 00000000 00000000 000fffff ffffffff 000fffff ffffffff
+addd m eq - 00000000 00000000 000fffff ffffffff 000fffff ffffffff
+addd p eq - 00000000 00000000 000fffff ffffffff 000fffff ffffffff
+addd z eq - 00000000 00000000 000fffff ffffffff 000fffff ffffffff
+addd n eq - 80000000 00000000 000fffff ffffffff 000fffff ffffffff
+addd m eq - 80000000 00000000 000fffff ffffffff 000fffff ffffffff
+addd p eq - 80000000 00000000 000fffff ffffffff 000fffff ffffffff
+addd z eq - 80000000 00000000 000fffff ffffffff 000fffff ffffffff
+addd n eq - 00000000 00000000 800fffff ffffffff 800fffff ffffffff
+addd m eq - 00000000 00000000 800fffff ffffffff 800fffff ffffffff
+addd p eq - 00000000 00000000 800fffff ffffffff 800fffff ffffffff
+addd z eq - 00000000 00000000 800fffff ffffffff 800fffff ffffffff
+addd n eq - 80000000 00000000 800fffff ffffffff 800fffff ffffffff
+addd m eq - 80000000 00000000 800fffff ffffffff 800fffff ffffffff
+addd p eq - 80000000 00000000 800fffff ffffffff 800fffff ffffffff
+addd z eq - 80000000 00000000 800fffff ffffffff 800fffff ffffffff
+addd n eq - 00000000 00000003 00000000 00000000 00000000 00000003
+addd m eq - 00000000 00000003 00000000 00000000 00000000 00000003
+addd p eq - 00000000 00000003 00000000 00000000 00000000 00000003
+addd z eq - 00000000 00000003 00000000 00000000 00000000 00000003
+addd n eq - 00000000 00000003 80000000 00000000 00000000 00000003
+addd m eq - 00000000 00000003 80000000 00000000 00000000 00000003
+addd p eq - 00000000 00000003 80000000 00000000 00000000 00000003
+addd z eq - 00000000 00000003 80000000 00000000 00000000 00000003
+addd n eq - 80000000 00000003 00000000 00000000 80000000 00000003
+addd m eq - 80000000 00000003 00000000 00000000 80000000 00000003
+addd p eq - 80000000 00000003 00000000 00000000 80000000 00000003
+addd z eq - 80000000 00000003 00000000 00000000 80000000 00000003
+addd n eq - 80000000 00000003 80000000 00000000 80000000 00000003
+addd m eq - 80000000 00000003 80000000 00000000 80000000 00000003
+addd p eq - 80000000 00000003 80000000 00000000 80000000 00000003
+addd z eq - 80000000 00000003 80000000 00000000 80000000 00000003
+addd n eq - 80000000 00000000 80100000 00000000 80100000 00000000
+addd m eq - 80000000 00000000 80100000 00000000 80100000 00000000
+addd p eq - 80000000 00000000 80100000 00000000 80100000 00000000
+addd z eq - 80000000 00000000 80100000 00000000 80100000 00000000
+addd n eq - 00100000 00000000 00000000 00000000 00100000 00000000
+addd m eq - 00100000 00000000 00000000 00000000 00100000 00000000
+addd p eq - 00100000 00000000 00000000 00000000 00100000 00000000
+addd z eq - 00100000 00000000 00000000 00000000 00100000 00000000
+addd n eq - 00000000 00000000 80100000 00000000 80100000 00000000
+addd m eq - 00000000 00000000 80100000 00000000 80100000 00000000
+addd p eq - 00000000 00000000 80100000 00000000 80100000 00000000
+addd z eq - 00000000 00000000 80100000 00000000 80100000 00000000
+addd n eq - 80100000 00000000 00000000 00000000 80100000 00000000
+addd m eq - 80100000 00000000 00000000 00000000 80100000 00000000
+addd p eq - 80100000 00000000 00000000 00000000 80100000 00000000
+addd z eq - 80100000 00000000 00000000 00000000 80100000 00000000
+addd n eq - 00000000 00000000 80000000 00000000 00000000 00000000
+addd z eq - 00000000 00000000 80000000 00000000 00000000 00000000
+addd p eq - 00000000 00000000 80000000 00000000 00000000 00000000
+addd n eq - 80000000 00000000 00000000 00000000 00000000 00000000
+addd z eq - 80000000 00000000 00000000 00000000 00000000 00000000
+addd p eq - 80000000 00000000 00000000 00000000 00000000 00000000
+addd m eq - 00000000 00000000 80000000 00000000 80000000 00000000
+addd m eq - 80000000 00000000 00000000 00000000 80000000 00000000
+addd n eq - 00000000 00000000 00000000 00000000 00000000 00000000
+addd m eq - 00000000 00000000 00000000 00000000 00000000 00000000
+addd p eq - 00000000 00000000 00000000 00000000 00000000 00000000
+addd z eq - 00000000 00000000 00000000 00000000 00000000 00000000
+addd n eq - 80000000 00000000 80000000 00000000 80000000 00000000
+addd m eq - 80000000 00000000 80000000 00000000 80000000 00000000
+addd p eq - 80000000 00000000 80000000 00000000 80000000 00000000
+addd z eq - 80000000 00000000 80000000 00000000 80000000 00000000
+addd n eq xo 7fe00000 00000000 7fe00000 00000000 7ff00000 00000000
+addd p eq xo 7fe00000 00000000 7fe00000 00000000 7ff00000 00000000
+addd z eq xo 7fe00000 00000000 7fe00000 00000000 7fefffff ffffffff
+addd m eq xo 7fe00000 00000000 7fe00000 00000000 7fefffff ffffffff
+addd n eq xo ffe00000 00000000 ffe00000 00000000 fff00000 00000000
+addd m eq xo ffe00000 00000000 ffe00000 00000000 fff00000 00000000
+addd z eq xo ffe00000 00000000 ffe00000 00000000 ffefffff ffffffff
+addd p eq xo ffe00000 00000000 ffe00000 00000000 ffefffff ffffffff
+addd n eq - 7fdfffff fffffffe 7fdfffff fffffffe 7fefffff fffffffe
+addd m eq - 7fdfffff fffffffe 7fdfffff fffffffe 7fefffff fffffffe
+addd p eq - 7fdfffff fffffffe 7fdfffff fffffffe 7fefffff fffffffe
+addd z eq - 7fdfffff fffffffe 7fdfffff fffffffe 7fefffff fffffffe
+addd n eq - ffdfffff fffffffe ffdfffff fffffffe ffefffff fffffffe
+addd m eq - ffdfffff fffffffe ffdfffff fffffffe ffefffff fffffffe
+addd p eq - ffdfffff fffffffe ffdfffff fffffffe ffefffff fffffffe
+addd z eq - ffdfffff fffffffe ffdfffff fffffffe ffefffff fffffffe
+addd n eq xo 7fefffff fffffffe 7fefffff fffffffe 7ff00000 00000000
+addd p eq xo 7fefffff fffffffe 7fefffff fffffffe 7ff00000 00000000
+addd z eq xo 7fefffff fffffffe 7fefffff fffffffe 7fefffff ffffffff
+addd m eq xo 7fefffff fffffffe 7fefffff fffffffe 7fefffff ffffffff
+addd n eq xo ffefffff fffffffe ffefffff fffffffe fff00000 00000000
+addd m eq xo ffefffff fffffffe ffefffff fffffffe fff00000 00000000
+addd z eq xo ffefffff fffffffe ffefffff fffffffe ffefffff ffffffff
+addd p eq xo ffefffff fffffffe ffefffff fffffffe ffefffff ffffffff
+addd n eq - 40080000 00000000 40080000 00000000 40180000 00000000
+addd m eq - 40080000 00000000 40080000 00000000 40180000 00000000
+addd p eq - 40080000 00000000 40080000 00000000 40180000 00000000
+addd z eq - 40080000 00000000 40080000 00000000 40180000 00000000
+addd n eq - 00100000 00000000 00100000 00000000 00200000 00000000
+addd m eq - 00100000 00000000 00100000 00000000 00200000 00000000
+addd p eq - 00100000 00000000 00100000 00000000 00200000 00000000
+addd z eq - 00100000 00000000 00100000 00000000 00200000 00000000
+addd n eq - 7fd00000 00000000 7fd00000 00000000 7fe00000 00000000
+addd m eq - 7fd00000 00000000 7fd00000 00000000 7fe00000 00000000
+addd p eq - 7fd00000 00000000 7fd00000 00000000 7fe00000 00000000
+addd z eq - 7fd00000 00000000 7fd00000 00000000 7fe00000 00000000
+addd n eq - 000fffff ffffffff 000fffff ffffffff 001fffff fffffffe
+addd m eq - 000fffff ffffffff 000fffff ffffffff 001fffff fffffffe
+addd p eq - 000fffff ffffffff 000fffff ffffffff 001fffff fffffffe
+addd z eq - 000fffff ffffffff 000fffff ffffffff 001fffff fffffffe
+addd n eq - 800fffff ffffffff 800fffff ffffffff 801fffff fffffffe
+addd m eq - 800fffff ffffffff 800fffff ffffffff 801fffff fffffffe
+addd p eq - 800fffff ffffffff 800fffff ffffffff 801fffff fffffffe
+addd z eq - 800fffff ffffffff 800fffff ffffffff 801fffff fffffffe
+addd n eq - 00000000 00000004 00000000 00000004 00000000 00000008
+addd m eq - 00000000 00000004 00000000 00000004 00000000 00000008
+addd p eq - 00000000 00000004 00000000 00000004 00000000 00000008
+addd z eq - 00000000 00000004 00000000 00000004 00000000 00000008
+addd n eq - 80000000 00000004 80000000 00000004 80000000 00000008
+addd m eq - 80000000 00000004 80000000 00000004 80000000 00000008
+addd p eq - 80000000 00000004 80000000 00000004 80000000 00000008
+addd z eq - 80000000 00000004 80000000 00000004 80000000 00000008
+addd n eq - 00000000 00000001 00000000 00000001 00000000 00000002
+addd m eq - 00000000 00000001 00000000 00000001 00000000 00000002
+addd p eq - 00000000 00000001 00000000 00000001 00000000 00000002
+addd z eq - 00000000 00000001 00000000 00000001 00000000 00000002
+addd n eq - 80000000 00000001 80000000 00000001 80000000 00000002
+addd m eq - 80000000 00000001 80000000 00000001 80000000 00000002
+addd p eq - 80000000 00000001 80000000 00000001 80000000 00000002
+addd z eq - 80000000 00000001 80000000 00000001 80000000 00000002
+addd n eq - 7fe00000 00000000 ffe00000 00000000 00000000 00000000
+addd z eq - 7fe00000 00000000 ffe00000 00000000 00000000 00000000
+addd p eq - 7fe00000 00000000 ffe00000 00000000 00000000 00000000
+addd n eq - ffdfffff fffffffe 7fdfffff fffffffe 00000000 00000000
+addd z eq - ffdfffff fffffffe 7fdfffff fffffffe 00000000 00000000
+addd p eq - ffdfffff fffffffe 7fdfffff fffffffe 00000000 00000000
+addd n eq - 3ff00000 00000000 bff00000 00000000 00000000 00000000
+addd z eq - 3ff00000 00000000 bff00000 00000000 00000000 00000000
+addd p eq - 3ff00000 00000000 bff00000 00000000 00000000 00000000
+addd n eq - c0080000 00000000 40080000 00000000 00000000 00000000
+addd z eq - c0080000 00000000 40080000 00000000 00000000 00000000
+addd p eq - c0080000 00000000 40080000 00000000 00000000 00000000
+addd n eq - 00100000 00000000 80100000 00000000 00000000 00000000
+addd z eq - 00100000 00000000 80100000 00000000 00000000 00000000
+addd p eq - 00100000 00000000 80100000 00000000 00000000 00000000
+addd n eq - 80100000 00000000 00100000 00000000 00000000 00000000
+addd z eq - 80100000 00000000 00100000 00000000 00000000 00000000
+addd p eq - 80100000 00000000 00100000 00000000 00000000 00000000
+addd n eq - 000fffff fffffffc 800fffff fffffffc 00000000 00000000
+addd z eq - 000fffff fffffffc 800fffff fffffffc 00000000 00000000
+addd p eq - 000fffff fffffffc 800fffff fffffffc 00000000 00000000
+addd n eq - 800fffff ffffffff 000fffff ffffffff 00000000 00000000
+addd z eq - 800fffff ffffffff 000fffff ffffffff 00000000 00000000
+addd p eq - 800fffff ffffffff 000fffff ffffffff 00000000 00000000
+addd n eq - 00000000 00000001 80000000 00000001 00000000 00000000
+addd z eq - 00000000 00000001 80000000 00000001 00000000 00000000
+addd p eq - 00000000 00000001 80000000 00000001 00000000 00000000
+addd n eq - 80000000 00000001 00000000 00000001 00000000 00000000
+addd z eq - 80000000 00000001 00000000 00000001 00000000 00000000
+addd p eq - 80000000 00000001 00000000 00000001 00000000 00000000
+addd n eq - 7fefffff ffffffff ffefffff ffffffff 00000000 00000000
+addd z eq - 7fefffff ffffffff ffefffff ffffffff 00000000 00000000
+addd p eq - 7fefffff ffffffff ffefffff ffffffff 00000000 00000000
+addd m eq - 7fe00000 00000000 ffe00000 00000000 80000000 00000000
+addd m eq - ffdfffff fffffffe 7fdfffff fffffffe 80000000 00000000
+addd m eq - 3ff00000 00000000 bff00000 00000000 80000000 00000000
+addd m eq - c0080000 00000000 40080000 00000000 80000000 00000000
+addd m eq - 00100000 00000000 80100000 00000000 80000000 00000000
+addd m eq - 80100000 00000000 00100000 00000000 80000000 00000000
+addd m eq - 000fffff fffffffc 800fffff fffffffc 80000000 00000000
+addd m eq - 800fffff ffffffff 000fffff ffffffff 80000000 00000000
+addd m eq - 00000000 00000001 80000000 00000001 80000000 00000000
+addd m eq - 80000000 00000001 00000000 00000001 80000000 00000000
+addd m eq - 7fefffff ffffffff ffefffff ffffffff 80000000 00000000
+addd n eq - 3ff00000 00000001 bff00000 00000000 3cb00000 00000000
+addd m eq - 3ff00000 00000001 bff00000 00000000 3cb00000 00000000
+addd p eq - 3ff00000 00000001 bff00000 00000000 3cb00000 00000000
+addd z eq - 3ff00000 00000001 bff00000 00000000 3cb00000 00000000
+addd n eq - bff00000 00000001 3ff00000 00000000 bcb00000 00000000
+addd m eq - bff00000 00000001 3ff00000 00000000 bcb00000 00000000
+addd p eq - bff00000 00000001 3ff00000 00000000 bcb00000 00000000
+addd z eq - bff00000 00000001 3ff00000 00000000 bcb00000 00000000
+addd n eq - 3ff00000 00000001 bff00000 00000002 bcb00000 00000000
+addd m eq - 3ff00000 00000001 bff00000 00000002 bcb00000 00000000
+addd p eq - 3ff00000 00000001 bff00000 00000002 bcb00000 00000000
+addd z eq - 3ff00000 00000001 bff00000 00000002 bcb00000 00000000
+addd n eq - bff00000 00000001 3ff00000 00000002 3cb00000 00000000
+addd m eq - bff00000 00000001 3ff00000 00000002 3cb00000 00000000
+addd p eq - bff00000 00000001 3ff00000 00000002 3cb00000 00000000
+addd z eq - bff00000 00000001 3ff00000 00000002 3cb00000 00000000
+addd n eq - 40000000 00000000 c0000000 00000001 bcc00000 00000000
+addd m eq - 40000000 00000000 c0000000 00000001 bcc00000 00000000
+addd p eq - 40000000 00000000 c0000000 00000001 bcc00000 00000000
+addd z eq - 40000000 00000000 c0000000 00000001 bcc00000 00000000
+addd n eq - c0000000 00000000 40000000 00000001 3cc00000 00000000
+addd m eq - c0000000 00000000 40000000 00000001 3cc00000 00000000
+addd p eq - c0000000 00000000 40000000 00000001 3cc00000 00000000
+addd z eq - c0000000 00000000 40000000 00000001 3cc00000 00000000
+addd n eq - 40000000 00000004 c0000000 00000003 3cc00000 00000000
+addd m eq - 40000000 00000004 c0000000 00000003 3cc00000 00000000
+addd p eq - 40000000 00000004 c0000000 00000003 3cc00000 00000000
+addd z eq - 40000000 00000004 c0000000 00000003 3cc00000 00000000
+addd n eq - c0000000 00000004 40000000 00000003 bcc00000 00000000
+addd m eq - c0000000 00000004 40000000 00000003 bcc00000 00000000
+addd p eq - c0000000 00000004 40000000 00000003 bcc00000 00000000
+addd z eq - c0000000 00000004 40000000 00000003 bcc00000 00000000
+addd n eq - 400fffff ffffffff c00fffff fffffffe 3cc00000 00000000
+addd m eq - 400fffff ffffffff c00fffff fffffffe 3cc00000 00000000
+addd p eq - 400fffff ffffffff c00fffff fffffffe 3cc00000 00000000
+addd z eq - 400fffff ffffffff c00fffff fffffffe 3cc00000 00000000
+addd n eq - c00fffff ffffffff 400fffff fffffffe bcc00000 00000000
+addd m eq - c00fffff ffffffff 400fffff fffffffe bcc00000 00000000
+addd p eq - c00fffff ffffffff 400fffff fffffffe bcc00000 00000000
+addd z eq - c00fffff ffffffff 400fffff fffffffe bcc00000 00000000
+addd n eq - 3fffffff fffffffc bfffffff fffffffd bcb00000 00000000
+addd m eq - 3fffffff fffffffc bfffffff fffffffd bcb00000 00000000
+addd p eq - 3fffffff fffffffc bfffffff fffffffd bcb00000 00000000
+addd z eq - 3fffffff fffffffc bfffffff fffffffd bcb00000 00000000
+addd n eq - bfffffff fffffffc 3fffffff fffffffd 3cb00000 00000000
+addd m eq - bfffffff fffffffc 3fffffff fffffffd 3cb00000 00000000
+addd p eq - bfffffff fffffffc 3fffffff fffffffd 3cb00000 00000000
+addd z eq - bfffffff fffffffc 3fffffff fffffffd 3cb00000 00000000
+addd n eq - 7fe00000 00000001 ffe00000 00000000 7ca00000 00000000
+addd m eq - 7fe00000 00000001 ffe00000 00000000 7ca00000 00000000
+addd p eq - 7fe00000 00000001 ffe00000 00000000 7ca00000 00000000
+addd z eq - 7fe00000 00000001 ffe00000 00000000 7ca00000 00000000
+addd n eq - ffe00000 00000001 7fe00000 00000000 fca00000 00000000
+addd m eq - ffe00000 00000001 7fe00000 00000000 fca00000 00000000
+addd p eq - ffe00000 00000001 7fe00000 00000000 fca00000 00000000
+addd z eq - ffe00000 00000001 7fe00000 00000000 fca00000 00000000
+addd n eq - 7fe00000 00000001 ffe00000 00000002 fca00000 00000000
+addd m eq - 7fe00000 00000001 ffe00000 00000002 fca00000 00000000
+addd p eq - 7fe00000 00000001 ffe00000 00000002 fca00000 00000000
+addd z eq - 7fe00000 00000001 ffe00000 00000002 fca00000 00000000
+addd n eq - ffe00000 00000001 7fe00000 00000002 7ca00000 00000000
+addd m eq - ffe00000 00000001 7fe00000 00000002 7ca00000 00000000
+addd p eq - ffe00000 00000001 7fe00000 00000002 7ca00000 00000000
+addd z eq - ffe00000 00000001 7fe00000 00000002 7ca00000 00000000
+addd n eq - 7fd00000 00000000 ffd00000 00000001 fc900000 00000000
+addd m eq - 7fd00000 00000000 ffd00000 00000001 fc900000 00000000
+addd p eq - 7fd00000 00000000 ffd00000 00000001 fc900000 00000000
+addd z eq - 7fd00000 00000000 ffd00000 00000001 fc900000 00000000
+addd n eq - ffd00000 00000000 7fd00000 00000001 7c900000 00000000
+addd m eq - ffd00000 00000000 7fd00000 00000001 7c900000 00000000
+addd p eq - ffd00000 00000000 7fd00000 00000001 7c900000 00000000
+addd z eq - ffd00000 00000000 7fd00000 00000001 7c900000 00000000
+addd n eq - 7fd00000 00000004 ffd00000 00000003 7c900000 00000000
+addd m eq - 7fd00000 00000004 ffd00000 00000003 7c900000 00000000
+addd p eq - 7fd00000 00000004 ffd00000 00000003 7c900000 00000000
+addd z eq - 7fd00000 00000004 ffd00000 00000003 7c900000 00000000
+addd n eq - ffd00000 00000004 7fd00000 00000003 fc900000 00000000
+addd m eq - ffd00000 00000004 7fd00000 00000003 fc900000 00000000
+addd p eq - ffd00000 00000004 7fd00000 00000003 fc900000 00000000
+addd z eq - ffd00000 00000004 7fd00000 00000003 fc900000 00000000
+addd n eq - 7fcfffff ffffffff ffcfffff fffffffe 7c800000 00000000
+addd m eq - 7fcfffff ffffffff ffcfffff fffffffe 7c800000 00000000
+addd p eq - 7fcfffff ffffffff ffcfffff fffffffe 7c800000 00000000
+addd z eq - 7fcfffff ffffffff ffcfffff fffffffe 7c800000 00000000
+addd n eq - ffcfffff ffffffff 7fcfffff fffffffe fc800000 00000000
+addd m eq - ffcfffff ffffffff 7fcfffff fffffffe fc800000 00000000
+addd p eq - ffcfffff ffffffff 7fcfffff fffffffe fc800000 00000000
+addd z eq - ffcfffff ffffffff 7fcfffff fffffffe fc800000 00000000
+addd n eq - ffefffff fffffffe 7fefffff ffffffff 7ca00000 00000000
+addd m eq - ffefffff fffffffe 7fefffff ffffffff 7ca00000 00000000
+addd p eq - ffefffff fffffffe 7fefffff ffffffff 7ca00000 00000000
+addd z eq - ffefffff fffffffe 7fefffff ffffffff 7ca00000 00000000
+addd n eq - 7fefffff fffffffe ffefffff ffffffff fca00000 00000000
+addd m eq - 7fefffff fffffffe ffefffff ffffffff fca00000 00000000
+addd p eq - 7fefffff fffffffe ffefffff ffffffff fca00000 00000000
+addd z eq - 7fefffff fffffffe ffefffff ffffffff fca00000 00000000
+addd n eq - 80100000 00000001 00100000 00000000 80000000 00000001
+addd m eq - 80100000 00000001 00100000 00000000 80000000 00000001
+addd p eq - 80100000 00000001 00100000 00000000 80000000 00000001
+addd z eq - 80100000 00000001 00100000 00000000 80000000 00000001
+addd n eq - 00100000 00000001 80100000 00000000 00000000 00000001
+addd m eq - 00100000 00000001 80100000 00000000 00000000 00000001
+addd p eq - 00100000 00000001 80100000 00000000 00000000 00000001
+addd z eq - 00100000 00000001 80100000 00000000 00000000 00000001
+addd n eq - 800fffff ffffffff 00100000 00000000 00000000 00000001
+addd m eq - 800fffff ffffffff 00100000 00000000 00000000 00000001
+addd p eq - 800fffff ffffffff 00100000 00000000 00000000 00000001
+addd z eq - 800fffff ffffffff 00100000 00000000 00000000 00000001
+addd n eq - 000fffff ffffffff 80100000 00000000 80000000 00000001
+addd m eq - 000fffff ffffffff 80100000 00000000 80000000 00000001
+addd p eq - 000fffff ffffffff 80100000 00000000 80000000 00000001
+addd z eq - 000fffff ffffffff 80100000 00000000 80000000 00000001
+addd n eq - 00100000 00000001 80100000 00000002 80000000 00000001
+addd m eq - 00100000 00000001 80100000 00000002 80000000 00000001
+addd p eq - 00100000 00000001 80100000 00000002 80000000 00000001
+addd z eq - 00100000 00000001 80100000 00000002 80000000 00000001
+addd n eq - 80100000 00000001 00100000 00000002 00000000 00000001
+addd m eq - 80100000 00000001 00100000 00000002 00000000 00000001
+addd p eq - 80100000 00000001 00100000 00000002 00000000 00000001
+addd z eq - 80100000 00000001 00100000 00000002 00000000 00000001
+addd n eq - 000fffff ffffffff 800fffff fffffffe 00000000 00000001
+addd m eq - 000fffff ffffffff 800fffff fffffffe 00000000 00000001
+addd p eq - 000fffff ffffffff 800fffff fffffffe 00000000 00000001
+addd z eq - 000fffff ffffffff 800fffff fffffffe 00000000 00000001
+addd n eq - 800fffff ffffffff 000fffff fffffffe 80000000 00000001
+addd m eq - 800fffff ffffffff 000fffff fffffffe 80000000 00000001
+addd p eq - 800fffff ffffffff 000fffff fffffffe 80000000 00000001
+addd z eq - 800fffff ffffffff 000fffff fffffffe 80000000 00000001
+addd n eq - 000fffff fffffffd 800fffff fffffffe 80000000 00000001
+addd m eq - 000fffff fffffffd 800fffff fffffffe 80000000 00000001
+addd p eq - 000fffff fffffffd 800fffff fffffffe 80000000 00000001
+addd z eq - 000fffff fffffffd 800fffff fffffffe 80000000 00000001
+addd n eq - 800fffff fffffffd 000fffff fffffffe 00000000 00000001
+addd m eq - 800fffff fffffffd 000fffff fffffffe 00000000 00000001
+addd p eq - 800fffff fffffffd 000fffff fffffffe 00000000 00000001
+addd z eq - 800fffff fffffffd 000fffff fffffffe 00000000 00000001
+addd n eq - 00000000 00000002 80000000 00000001 00000000 00000001
+addd m eq - 00000000 00000002 80000000 00000001 00000000 00000001
+addd p eq - 00000000 00000002 80000000 00000001 00000000 00000001
+addd z eq - 00000000 00000002 80000000 00000001 00000000 00000001
+addd n eq - 80000000 00000002 00000000 00000001 80000000 00000001
+addd m eq - 80000000 00000002 00000000 00000001 80000000 00000001
+addd p eq - 80000000 00000002 00000000 00000001 80000000 00000001
+addd z eq - 80000000 00000002 00000000 00000001 80000000 00000001
+addd n eq - 00000000 00000003 80000000 00000002 00000000 00000001
+addd m eq - 00000000 00000003 80000000 00000002 00000000 00000001
+addd p eq - 00000000 00000003 80000000 00000002 00000000 00000001
+addd z eq - 00000000 00000003 80000000 00000002 00000000 00000001
+addd n eq - 80000000 00000003 00000000 00000002 80000000 00000001
+addd m eq - 80000000 00000003 00000000 00000002 80000000 00000001
+addd p eq - 80000000 00000003 00000000 00000002 80000000 00000001
+addd z eq - 80000000 00000003 00000000 00000002 80000000 00000001
+addd n eq - 40000000 00000000 bfffffff ffffffff 3cb00000 00000000
+addd m eq - 40000000 00000000 bfffffff ffffffff 3cb00000 00000000
+addd p eq - 40000000 00000000 bfffffff ffffffff 3cb00000 00000000
+addd z eq - 40000000 00000000 bfffffff ffffffff 3cb00000 00000000
+addd n eq - c0000000 00000000 3fffffff ffffffff bcb00000 00000000
+addd m eq - c0000000 00000000 3fffffff ffffffff bcb00000 00000000
+addd p eq - c0000000 00000000 3fffffff ffffffff bcb00000 00000000
+addd z eq - c0000000 00000000 3fffffff ffffffff bcb00000 00000000
+addd n eq - bfffffff ffffffff 40000000 00000000 3cb00000 00000000
+addd m eq - bfffffff ffffffff 40000000 00000000 3cb00000 00000000
+addd p eq - bfffffff ffffffff 40000000 00000000 3cb00000 00000000
+addd z eq - bfffffff ffffffff 40000000 00000000 3cb00000 00000000
+addd n eq - 3fffffff ffffffff c0000000 00000000 bcb00000 00000000
+addd m eq - 3fffffff ffffffff c0000000 00000000 bcb00000 00000000
+addd p eq - 3fffffff ffffffff c0000000 00000000 bcb00000 00000000
+addd z eq - 3fffffff ffffffff c0000000 00000000 bcb00000 00000000
+addd n eq - 40100000 00000001 c00fffff ffffffff 3cd80000 00000000
+addd m eq - 40100000 00000001 c00fffff ffffffff 3cd80000 00000000
+addd p eq - 40100000 00000001 c00fffff ffffffff 3cd80000 00000000
+addd z eq - 40100000 00000001 c00fffff ffffffff 3cd80000 00000000
+addd n eq - c0100000 00000001 400fffff ffffffff bcd80000 00000000
+addd m eq - c0100000 00000001 400fffff ffffffff bcd80000 00000000
+addd p eq - c0100000 00000001 400fffff ffffffff bcd80000 00000000
+addd z eq - c0100000 00000001 400fffff ffffffff bcd80000 00000000
+addd n eq - 400fffff ffffffff c0100000 00000002 bce40000 00000000
+addd m eq - 400fffff ffffffff c0100000 00000002 bce40000 00000000
+addd p eq - 400fffff ffffffff c0100000 00000002 bce40000 00000000
+addd z eq - 400fffff ffffffff c0100000 00000002 bce40000 00000000
+addd n eq - c00fffff ffffffff 40100000 00000002 3ce40000 00000000
+addd m eq - c00fffff ffffffff 40100000 00000002 3ce40000 00000000
+addd p eq - c00fffff ffffffff 40100000 00000002 3ce40000 00000000
+addd z eq - c00fffff ffffffff 40100000 00000002 3ce40000 00000000
+addd n eq - 40000000 00000001 bff00000 00000001 3ff00000 00000001
+addd m eq - 40000000 00000001 bff00000 00000001 3ff00000 00000001
+addd p eq - 40000000 00000001 bff00000 00000001 3ff00000 00000001
+addd z eq - 40000000 00000001 bff00000 00000001 3ff00000 00000001
+addd n eq - c0000000 00000001 3ff00000 00000001 bff00000 00000001
+addd m eq - c0000000 00000001 3ff00000 00000001 bff00000 00000001
+addd p eq - c0000000 00000001 3ff00000 00000001 bff00000 00000001
+addd z eq - c0000000 00000001 3ff00000 00000001 bff00000 00000001
+addd n eq - 40000000 00000002 bff00000 00000001 3ff00000 00000003
+addd m eq - 40000000 00000002 bff00000 00000001 3ff00000 00000003
+addd p eq - 40000000 00000002 bff00000 00000001 3ff00000 00000003
+addd z eq - 40000000 00000002 bff00000 00000001 3ff00000 00000003
+addd n eq - c0000000 00000002 3ff00000 00000001 bff00000 00000003
+addd m eq - c0000000 00000002 3ff00000 00000001 bff00000 00000003
+addd p eq - c0000000 00000002 3ff00000 00000001 bff00000 00000003
+addd z eq - c0000000 00000002 3ff00000 00000001 bff00000 00000003
+addd n eq - 40000000 00000002 bff00000 00000003 3ff00000 00000001
+addd m eq - 40000000 00000002 bff00000 00000003 3ff00000 00000001
+addd p eq - 40000000 00000002 bff00000 00000003 3ff00000 00000001
+addd z eq - 40000000 00000002 bff00000 00000003 3ff00000 00000001
+addd n eq - c0000000 00000002 3ff00000 00000003 bff00000 00000001
+addd m eq - c0000000 00000002 3ff00000 00000003 bff00000 00000001
+addd p eq - c0000000 00000002 3ff00000 00000003 bff00000 00000001
+addd z eq - c0000000 00000002 3ff00000 00000003 bff00000 00000001
+addd n eq - 7fd00000 00000000 ffcfffff ffffffff 7c800000 00000000
+addd m eq - 7fd00000 00000000 ffcfffff ffffffff 7c800000 00000000
+addd p eq - 7fd00000 00000000 ffcfffff ffffffff 7c800000 00000000
+addd z eq - 7fd00000 00000000 ffcfffff ffffffff 7c800000 00000000
+addd n eq - ffd00000 00000000 7fcfffff ffffffff fc800000 00000000
+addd m eq - ffd00000 00000000 7fcfffff ffffffff fc800000 00000000
+addd p eq - ffd00000 00000000 7fcfffff ffffffff fc800000 00000000
+addd z eq - ffd00000 00000000 7fcfffff ffffffff fc800000 00000000
+addd n eq - ffdfffff ffffffff 7fe00000 00000000 7c900000 00000000
+addd m eq - ffdfffff ffffffff 7fe00000 00000000 7c900000 00000000
+addd p eq - ffdfffff ffffffff 7fe00000 00000000 7c900000 00000000
+addd z eq - ffdfffff ffffffff 7fe00000 00000000 7c900000 00000000
+addd n eq - 7fdfffff ffffffff ffe00000 00000000 fc900000 00000000
+addd m eq - 7fdfffff ffffffff ffe00000 00000000 fc900000 00000000
+addd p eq - 7fdfffff ffffffff ffe00000 00000000 fc900000 00000000
+addd z eq - 7fdfffff ffffffff ffe00000 00000000 fc900000 00000000
+addd n eq - 7fb00000 00000001 ffafffff ffffffff 7c780000 00000000
+addd m eq - 7fb00000 00000001 ffafffff ffffffff 7c780000 00000000
+addd p eq - 7fb00000 00000001 ffafffff ffffffff 7c780000 00000000
+addd z eq - 7fb00000 00000001 ffafffff ffffffff 7c780000 00000000
+addd n eq - ffb00000 00000001 7fafffff ffffffff fc780000 00000000
+addd m eq - ffb00000 00000001 7fafffff ffffffff fc780000 00000000
+addd p eq - ffb00000 00000001 7fafffff ffffffff fc780000 00000000
+addd z eq - ffb00000 00000001 7fafffff ffffffff fc780000 00000000
+addd n eq - 7fcfffff ffffffff ffd00000 00000002 fca40000 00000000
+addd m eq - 7fcfffff ffffffff ffd00000 00000002 fca40000 00000000
+addd p eq - 7fcfffff ffffffff ffd00000 00000002 fca40000 00000000
+addd z eq - 7fcfffff ffffffff ffd00000 00000002 fca40000 00000000
+addd n eq - ffcfffff ffffffff 7fd00000 00000002 7ca40000 00000000
+addd m eq - ffcfffff ffffffff 7fd00000 00000002 7ca40000 00000000
+addd p eq - ffcfffff ffffffff 7fd00000 00000002 7ca40000 00000000
+addd z eq - ffcfffff ffffffff 7fd00000 00000002 7ca40000 00000000
+addd n eq - 7fd00000 00000001 ffe00000 00000001 ffd00000 00000001
+addd m eq - 7fd00000 00000001 ffe00000 00000001 ffd00000 00000001
+addd p eq - 7fd00000 00000001 ffe00000 00000001 ffd00000 00000001
+addd z eq - 7fd00000 00000001 ffe00000 00000001 ffd00000 00000001
+addd n eq - ffd00000 00000001 7fe00000 00000001 7fd00000 00000001
+addd m eq - ffd00000 00000001 7fe00000 00000001 7fd00000 00000001
+addd p eq - ffd00000 00000001 7fe00000 00000001 7fd00000 00000001
+addd z eq - ffd00000 00000001 7fe00000 00000001 7fd00000 00000001
+addd n eq - 7fe00000 00000002 ffd00000 00000001 7fd00000 00000003
+addd m eq - 7fe00000 00000002 ffd00000 00000001 7fd00000 00000003
+addd p eq - 7fe00000 00000002 ffd00000 00000001 7fd00000 00000003
+addd z eq - 7fe00000 00000002 ffd00000 00000001 7fd00000 00000003
+addd n eq - ffe00000 00000002 7fd00000 00000001 ffd00000 00000003
+addd m eq - ffe00000 00000002 7fd00000 00000001 ffd00000 00000003
+addd p eq - ffe00000 00000002 7fd00000 00000001 ffd00000 00000003
+addd z eq - ffe00000 00000002 7fd00000 00000001 ffd00000 00000003
+addd n eq - 7fd00000 00000002 ffc00000 00000003 7fc00000 00000001
+addd m eq - 7fd00000 00000002 ffc00000 00000003 7fc00000 00000001
+addd p eq - 7fd00000 00000002 ffc00000 00000003 7fc00000 00000001
+addd z eq - 7fd00000 00000002 ffc00000 00000003 7fc00000 00000001
+addd n eq - ffd00000 00000002 7fc00000 00000003 ffc00000 00000001
+addd m eq - ffd00000 00000002 7fc00000 00000003 ffc00000 00000001
+addd p eq - ffd00000 00000002 7fc00000 00000003 ffc00000 00000001
+addd z eq - ffd00000 00000002 7fc00000 00000003 ffc00000 00000001
+addd n eq - 00200000 00000000 801fffff ffffffff 00000000 00000001
+addd m eq - 00200000 00000000 801fffff ffffffff 00000000 00000001
+addd p eq - 00200000 00000000 801fffff ffffffff 00000000 00000001
+addd z eq - 00200000 00000000 801fffff ffffffff 00000000 00000001
+addd n eq - 80200000 00000000 001fffff ffffffff 80000000 00000001
+addd m eq - 80200000 00000000 001fffff ffffffff 80000000 00000001
+addd p eq - 80200000 00000000 001fffff ffffffff 80000000 00000001
+addd z eq - 80200000 00000000 001fffff ffffffff 80000000 00000001
+addd n eq - 801fffff ffffffff 00200000 00000000 00000000 00000001
+addd m eq - 801fffff ffffffff 00200000 00000000 00000000 00000001
+addd p eq - 801fffff ffffffff 00200000 00000000 00000000 00000001
+addd z eq - 801fffff ffffffff 00200000 00000000 00000000 00000001
+addd n eq - 001fffff ffffffff 80200000 00000000 80000000 00000001
+addd m eq - 001fffff ffffffff 80200000 00000000 80000000 00000001
+addd p eq - 001fffff ffffffff 80200000 00000000 80000000 00000001
+addd z eq - 001fffff ffffffff 80200000 00000000 80000000 00000001
+addd n eq - 00200000 00000001 801fffff ffffffff 00000000 00000003
+addd m eq - 00200000 00000001 801fffff ffffffff 00000000 00000003
+addd p eq - 00200000 00000001 801fffff ffffffff 00000000 00000003
+addd z eq - 00200000 00000001 801fffff ffffffff 00000000 00000003
+addd n eq - 80200000 00000001 001fffff ffffffff 80000000 00000003
+addd m eq - 80200000 00000001 001fffff ffffffff 80000000 00000003
+addd p eq - 80200000 00000001 001fffff ffffffff 80000000 00000003
+addd z eq - 80200000 00000001 001fffff ffffffff 80000000 00000003
+addd n eq - 00300000 00000000 802fffff ffffffff 00000000 00000002
+addd m eq - 00300000 00000000 802fffff ffffffff 00000000 00000002
+addd p eq - 00300000 00000000 802fffff ffffffff 00000000 00000002
+addd z eq - 00300000 00000000 802fffff ffffffff 00000000 00000002
+addd n eq - 80300000 00000000 002fffff ffffffff 80000000 00000002
+addd m eq - 80300000 00000000 002fffff ffffffff 80000000 00000002
+addd p eq - 80300000 00000000 002fffff ffffffff 80000000 00000002
+addd z eq - 80300000 00000000 002fffff ffffffff 80000000 00000002
+addd n eq - 802fffff ffffffff 00300000 00000000 00000000 00000002
+addd m eq - 802fffff ffffffff 00300000 00000000 00000000 00000002
+addd p eq - 802fffff ffffffff 00300000 00000000 00000000 00000002
+addd z eq - 802fffff ffffffff 00300000 00000000 00000000 00000002
+addd n eq - 002fffff ffffffff 80300000 00000000 80000000 00000002
+addd m eq - 002fffff ffffffff 80300000 00000000 80000000 00000002
+addd p eq - 002fffff ffffffff 80300000 00000000 80000000 00000002
+addd z eq - 002fffff ffffffff 80300000 00000000 80000000 00000002
+addd n eq - 00300000 00000001 802fffff ffffffff 00000000 00000006
+addd m eq - 00300000 00000001 802fffff ffffffff 00000000 00000006
+addd p eq - 00300000 00000001 802fffff ffffffff 00000000 00000006
+addd z eq - 00300000 00000001 802fffff ffffffff 00000000 00000006
+addd n eq - 80300000 00000001 002fffff ffffffff 80000000 00000006
+addd m eq - 80300000 00000001 002fffff ffffffff 80000000 00000006
+addd p eq - 80300000 00000001 002fffff ffffffff 80000000 00000006
+addd z eq - 80300000 00000001 002fffff ffffffff 80000000 00000006
+addd n eq - 001fffff ffffffff 80200000 00000002 80000000 00000005
+addd m eq - 001fffff ffffffff 80200000 00000002 80000000 00000005
+addd p eq - 001fffff ffffffff 80200000 00000002 80000000 00000005
+addd z eq - 001fffff ffffffff 80200000 00000002 80000000 00000005
+addd n eq - 801fffff ffffffff 00200000 00000002 00000000 00000005
+addd m eq - 801fffff ffffffff 00200000 00000002 00000000 00000005
+addd p eq - 801fffff ffffffff 00200000 00000002 00000000 00000005
+addd z eq - 801fffff ffffffff 00200000 00000002 00000000 00000005
+addd n eq - 001fffff ffffffff 80200000 00000004 80000000 00000009
+addd m eq - 001fffff ffffffff 80200000 00000004 80000000 00000009
+addd p eq - 001fffff ffffffff 80200000 00000004 80000000 00000009
+addd z eq - 001fffff ffffffff 80200000 00000004 80000000 00000009
+addd n eq - 801fffff ffffffff 00200000 00000004 00000000 00000009
+addd m eq - 801fffff ffffffff 00200000 00000004 00000000 00000009
+addd p eq - 801fffff ffffffff 00200000 00000004 00000000 00000009
+addd z eq - 801fffff ffffffff 00200000 00000004 00000000 00000009
+addd n eq - 00200000 00000001 80100000 00000001 00100000 00000001
+addd m eq - 00200000 00000001 80100000 00000001 00100000 00000001
+addd p eq - 00200000 00000001 80100000 00000001 00100000 00000001
+addd z eq - 00200000 00000001 80100000 00000001 00100000 00000001
+addd n eq - 80200000 00000001 00100000 00000001 80100000 00000001
+addd m eq - 80200000 00000001 00100000 00000001 80100000 00000001
+addd p eq - 80200000 00000001 00100000 00000001 80100000 00000001
+addd z eq - 80200000 00000001 00100000 00000001 80100000 00000001
+addd n eq - 00200000 00000002 80100000 00000001 00100000 00000003
+addd m eq - 00200000 00000002 80100000 00000001 00100000 00000003
+addd p eq - 00200000 00000002 80100000 00000001 00100000 00000003
+addd z eq - 00200000 00000002 80100000 00000001 00100000 00000003
+addd n eq - 80200000 00000002 00100000 00000001 80100000 00000003
+addd m eq - 80200000 00000002 00100000 00000001 80100000 00000003
+addd p eq - 80200000 00000002 00100000 00000001 80100000 00000003
+addd z eq - 80200000 00000002 00100000 00000001 80100000 00000003
+addd n eq - 00300000 00000002 80200000 00000003 00200000 00000001
+addd m eq - 00300000 00000002 80200000 00000003 00200000 00000001
+addd p eq - 00300000 00000002 80200000 00000003 00200000 00000001
+addd z eq - 00300000 00000002 80200000 00000003 00200000 00000001
+addd n eq - 80300000 00000002 00200000 00000003 80200000 00000001
+addd m eq - 80300000 00000002 00200000 00000003 80200000 00000001
+addd p eq - 80300000 00000002 00200000 00000003 80200000 00000001
+addd z eq - 80300000 00000002 00200000 00000003 80200000 00000001
+addd n eq x 7fe00000 00000000 3ff00000 00000000 7fe00000 00000000
+addd z eq x 7fe00000 00000000 3ff00000 00000000 7fe00000 00000000
+addd m eq x 7fe00000 00000000 3ff00000 00000000 7fe00000 00000000
+addd p eq x 7fe00000 00000000 3ff00000 00000000 7fe00000 00000001
+addd n eq x ffe00000 00000000 bff00000 00000000 ffe00000 00000000
+addd z eq x ffe00000 00000000 bff00000 00000000 ffe00000 00000000
+addd p eq x ffe00000 00000000 bff00000 00000000 ffe00000 00000000
+addd m eq x ffe00000 00000000 bff00000 00000000 ffe00000 00000001
+addd n eq x 7fdfffff ffffffff 3ff00000 00000000 7fdfffff ffffffff
+addd z eq x 7fdfffff ffffffff 3ff00000 00000000 7fdfffff ffffffff
+addd m eq x 7fdfffff ffffffff 3ff00000 00000000 7fdfffff ffffffff
+addd p eq x 7fdfffff ffffffff 3ff00000 00000000 7fe00000 00000000
+addd n eq x ffdfffff ffffffff bff00000 00000000 ffdfffff ffffffff
+addd z eq x ffdfffff ffffffff bff00000 00000000 ffdfffff ffffffff
+addd p eq x ffdfffff ffffffff bff00000 00000000 ffdfffff ffffffff
+addd m eq x ffdfffff ffffffff bff00000 00000000 ffe00000 00000000
+addd n eq x 7fefffff ffffffff 3ff00000 00000000 7fefffff ffffffff
+addd z eq x 7fefffff ffffffff 3ff00000 00000000 7fefffff ffffffff
+addd m eq x 7fefffff ffffffff 3ff00000 00000000 7fefffff ffffffff
+addd p eq xo 7fefffff ffffffff 3ff00000 00000000 7ff00000 00000000
+addd n eq x ffefffff ffffffff bff00000 00000000 ffefffff ffffffff
+addd z eq x ffefffff ffffffff bff00000 00000000 ffefffff ffffffff
+addd p eq x ffefffff ffffffff bff00000 00000000 ffefffff ffffffff
+addd m eq xo ffefffff ffffffff bff00000 00000000 fff00000 00000000
+addd n eq x 7fefffff fffffffe 3ff00000 00000000 7fefffff fffffffe
+addd z eq x 7fefffff fffffffe 3ff00000 00000000 7fefffff fffffffe
+addd m eq x 7fefffff fffffffe 3ff00000 00000000 7fefffff fffffffe
+addd p eq x 7fefffff fffffffe 3ff00000 00000000 7fefffff ffffffff
+addd n eq x ffefffff fffffffe bff00000 00000000 ffefffff fffffffe
+addd z eq x ffefffff fffffffe bff00000 00000000 ffefffff fffffffe
+addd p eq x ffefffff fffffffe bff00000 00000000 ffefffff fffffffe
+addd m eq x ffefffff fffffffe bff00000 00000000 ffefffff ffffffff
+addd n eq x 00000000 00000001 7fe00000 00000000 7fe00000 00000000
+addd z eq x 00000000 00000001 7fe00000 00000000 7fe00000 00000000
+addd m eq x 00000000 00000001 7fe00000 00000000 7fe00000 00000000
+addd p eq x 00000000 00000001 7fe00000 00000000 7fe00000 00000001
+addd n eq x 80000000 00000001 ffe00000 00000000 ffe00000 00000000
+addd z eq x 80000000 00000001 ffe00000 00000000 ffe00000 00000000
+addd p eq x 80000000 00000001 ffe00000 00000000 ffe00000 00000000
+addd m eq x 80000000 00000001 ffe00000 00000000 ffe00000 00000001
+addd n eq x 00000000 00000001 7fdfffff ffffffff 7fdfffff ffffffff
+addd z eq x 00000000 00000001 7fdfffff ffffffff 7fdfffff ffffffff
+addd m eq x 00000000 00000001 7fdfffff ffffffff 7fdfffff ffffffff
+addd p eq x 00000000 00000001 7fdfffff ffffffff 7fe00000 00000000
+addd n eq x 80000000 00000001 ffdfffff ffffffff ffdfffff ffffffff
+addd z eq x 80000000 00000001 ffdfffff ffffffff ffdfffff ffffffff
+addd p eq x 80000000 00000001 ffdfffff ffffffff ffdfffff ffffffff
+addd m eq x 80000000 00000001 ffdfffff ffffffff ffe00000 00000000
+addd n eq x 00000000 00000001 7fefffff ffffffff 7fefffff ffffffff
+addd z eq x 00000000 00000001 7fefffff ffffffff 7fefffff ffffffff
+addd m eq x 00000000 00000001 7fefffff ffffffff 7fefffff ffffffff
+addd p eq xo 00000000 00000001 7fefffff ffffffff 7ff00000 00000000
+addd n eq x 80000000 00000001 ffefffff ffffffff ffefffff ffffffff
+addd z eq x 80000000 00000001 ffefffff ffffffff ffefffff ffffffff
+addd p eq x 80000000 00000001 ffefffff ffffffff ffefffff ffffffff
+addd m eq xo 80000000 00000001 ffefffff ffffffff fff00000 00000000
+addd n eq x 00000000 00000001 7fefffff fffffffe 7fefffff fffffffe
+addd z eq x 00000000 00000001 7fefffff fffffffe 7fefffff fffffffe
+addd m eq x 00000000 00000001 7fefffff fffffffe 7fefffff fffffffe
+addd p eq x 00000000 00000001 7fefffff fffffffe 7fefffff ffffffff
+addd n eq x 80000000 00000001 ffefffff fffffffe ffefffff fffffffe
+addd z eq x 80000000 00000001 ffefffff fffffffe ffefffff fffffffe
+addd p eq x 80000000 00000001 ffefffff fffffffe ffefffff fffffffe
+addd m eq x 80000000 00000001 ffefffff fffffffe ffefffff ffffffff
+addd n eq x 00000000 00000001 3ff00000 00000000 3ff00000 00000000
+addd z eq x 00000000 00000001 3ff00000 00000000 3ff00000 00000000
+addd m eq x 00000000 00000001 3ff00000 00000000 3ff00000 00000000
+addd p eq x 00000000 00000001 3ff00000 00000000 3ff00000 00000001
+addd n eq x 80000000 00000001 bff00000 00000000 bff00000 00000000
+addd z eq x 80000000 00000001 bff00000 00000000 bff00000 00000000
+addd p eq x 80000000 00000001 bff00000 00000000 bff00000 00000000
+addd m eq x 80000000 00000001 bff00000 00000000 bff00000 00000001
+addd n eq x 00000000 00000001 3fefffff ffffffff 3fefffff ffffffff
+addd z eq x 00000000 00000001 3fefffff ffffffff 3fefffff ffffffff
+addd m eq x 00000000 00000001 3fefffff ffffffff 3fefffff ffffffff
+addd p eq x 00000000 00000001 3fefffff ffffffff 3ff00000 00000000
+addd n eq x 80000000 00000001 bfefffff ffffffff bfefffff ffffffff
+addd z eq x 80000000 00000001 bfefffff ffffffff bfefffff ffffffff
+addd p eq x 80000000 00000001 bfefffff ffffffff bfefffff ffffffff
+addd m eq x 80000000 00000001 bfefffff ffffffff bff00000 00000000
+addd n eq x 00000000 00000001 3fffffff ffffffff 3fffffff ffffffff
+addd z eq x 00000000 00000001 3fffffff ffffffff 3fffffff ffffffff
+addd m eq x 00000000 00000001 3fffffff ffffffff 3fffffff ffffffff
+addd p eq x 00000000 00000001 3fffffff ffffffff 40000000 00000000
+addd n eq x 80000000 00000001 bfffffff ffffffff bfffffff ffffffff
+addd z eq x 80000000 00000001 bfffffff ffffffff bfffffff ffffffff
+addd p eq x 80000000 00000001 bfffffff ffffffff bfffffff ffffffff
+addd m eq x 80000000 00000001 bfffffff ffffffff c0000000 00000000
+addd n eq x 00000000 00000001 3fffffff fffffffe 3fffffff fffffffe
+addd z eq x 00000000 00000001 3fffffff fffffffe 3fffffff fffffffe
+addd m eq x 00000000 00000001 3fffffff fffffffe 3fffffff fffffffe
+addd p eq x 00000000 00000001 3fffffff fffffffe 3fffffff ffffffff
+addd n eq x 80000000 00000001 bfffffff fffffffe bfffffff fffffffe
+addd z eq x 80000000 00000001 bfffffff fffffffe bfffffff fffffffe
+addd p eq x 80000000 00000001 bfffffff fffffffe bfffffff fffffffe
+addd m eq x 80000000 00000001 bfffffff fffffffe bfffffff ffffffff
+addd n eq x 7fe00000 00000000 bff00000 00000000 7fe00000 00000000
+addd p eq x 7fe00000 00000000 bff00000 00000000 7fe00000 00000000
+addd z eq x 7fe00000 00000000 bff00000 00000000 7fdfffff ffffffff
+addd m eq x 7fe00000 00000000 bff00000 00000000 7fdfffff ffffffff
+addd n eq x ffe00000 00000000 3ff00000 00000000 ffe00000 00000000
+addd m eq x ffe00000 00000000 3ff00000 00000000 ffe00000 00000000
+addd z eq x ffe00000 00000000 3ff00000 00000000 ffdfffff ffffffff
+addd p eq x ffe00000 00000000 3ff00000 00000000 ffdfffff ffffffff
+addd n eq x 7fdfffff ffffffff bff00000 00000000 7fdfffff ffffffff
+addd p eq x 7fdfffff ffffffff bff00000 00000000 7fdfffff ffffffff
+addd z eq x 7fdfffff ffffffff bff00000 00000000 7fdfffff fffffffe
+addd m eq x 7fdfffff ffffffff bff00000 00000000 7fdfffff fffffffe
+addd n eq x ffdfffff ffffffff 3ff00000 00000000 ffdfffff ffffffff
+addd m eq x ffdfffff ffffffff 3ff00000 00000000 ffdfffff ffffffff
+addd z eq x ffdfffff ffffffff 3ff00000 00000000 ffdfffff fffffffe
+addd p eq x ffdfffff ffffffff 3ff00000 00000000 ffdfffff fffffffe
+addd n eq x 7fefffff ffffffff bff00000 00000000 7fefffff ffffffff
+addd p eq x 7fefffff ffffffff bff00000 00000000 7fefffff ffffffff
+addd z eq x 7fefffff ffffffff bff00000 00000000 7fefffff fffffffe
+addd m eq x 7fefffff ffffffff bff00000 00000000 7fefffff fffffffe
+addd n eq x ffefffff ffffffff 3ff00000 00000000 ffefffff ffffffff
+addd m eq x ffefffff ffffffff 3ff00000 00000000 ffefffff ffffffff
+addd z eq x ffefffff ffffffff 3ff00000 00000000 ffefffff fffffffe
+addd p eq x ffefffff ffffffff 3ff00000 00000000 ffefffff fffffffe
+addd n eq x 7fefffff fffffffe bff00000 00000000 7fefffff fffffffe
+addd p eq x 7fefffff fffffffe bff00000 00000000 7fefffff fffffffe
+addd z eq x 7fefffff fffffffe bff00000 00000000 7fefffff fffffffd
+addd m eq x 7fefffff fffffffe bff00000 00000000 7fefffff fffffffd
+addd n eq x ffefffff fffffffe 3ff00000 00000000 ffefffff fffffffe
+addd m eq x ffefffff fffffffe 3ff00000 00000000 ffefffff fffffffe
+addd z eq x ffefffff fffffffe 3ff00000 00000000 ffefffff fffffffd
+addd p eq x ffefffff fffffffe 3ff00000 00000000 ffefffff fffffffd
+addd n eq x 7fefffff ffffffff 80000000 00000001 7fefffff ffffffff
+addd p eq x 7fefffff ffffffff 80000000 00000001 7fefffff ffffffff
+addd z eq x 7fefffff ffffffff 80000000 00000001 7fefffff fffffffe
+addd m eq x 7fefffff ffffffff 80000000 00000001 7fefffff fffffffe
+addd n eq x ffefffff ffffffff 00000000 00000001 ffefffff ffffffff
+addd m eq x ffefffff ffffffff 00000000 00000001 ffefffff ffffffff
+addd z eq x ffefffff ffffffff 00000000 00000001 ffefffff fffffffe
+addd p eq x ffefffff ffffffff 00000000 00000001 ffefffff fffffffe
+addd n eq x 80000000 00000003 7fe00000 00000000 7fe00000 00000000
+addd p eq x 80000000 00000003 7fe00000 00000000 7fe00000 00000000
+addd z eq x 80000000 00000003 7fe00000 00000000 7fdfffff ffffffff
+addd m eq x 80000000 00000003 7fe00000 00000000 7fdfffff ffffffff
+addd n eq x 00000000 00000003 ffe00000 00000000 ffe00000 00000000
+addd m eq x 00000000 00000003 ffe00000 00000000 ffe00000 00000000
+addd z eq x 00000000 00000003 ffe00000 00000000 ffdfffff ffffffff
+addd p eq x 00000000 00000003 ffe00000 00000000 ffdfffff ffffffff
+addd n eq x 3fefffff ffffffff 80000000 00000001 3fefffff ffffffff
+addd p eq x 3fefffff ffffffff 80000000 00000001 3fefffff ffffffff
+addd z eq x 3fefffff ffffffff 80000000 00000001 3fefffff fffffffe
+addd m eq x 3fefffff ffffffff 80000000 00000001 3fefffff fffffffe
+addd n eq x bfffffff ffffffff 00000000 00000001 bfffffff ffffffff
+addd m eq x bfffffff ffffffff 00000000 00000001 bfffffff ffffffff
+addd z eq x bfffffff ffffffff 00000000 00000001 bfffffff fffffffe
+addd p eq x bfffffff ffffffff 00000000 00000001 bfffffff fffffffe
+addd n eq x 80000000 00000003 40080000 00000000 40080000 00000000
+addd p eq x 80000000 00000003 40080000 00000000 40080000 00000000
+addd z eq x 80000000 00000003 40080000 00000000 4007ffff ffffffff
+addd m eq x 80000000 00000003 40080000 00000000 4007ffff ffffffff
+addd n eq x 00000000 00000003 c0140000 00000000 c0140000 00000000
+addd m eq x 00000000 00000003 c0140000 00000000 c0140000 00000000
+addd z eq x 00000000 00000003 c0140000 00000000 c013ffff ffffffff
+addd p eq x 00000000 00000003 c0140000 00000000 c013ffff ffffffff
+addd n eq x 3ff00000 00000001 3ff00000 00000000 40000000 00000000
+addd z eq x 3ff00000 00000001 3ff00000 00000000 40000000 00000000
+addd m eq x 3ff00000 00000001 3ff00000 00000000 40000000 00000000
+addd p eq x 3ff00000 00000001 3ff00000 00000000 40000000 00000001
+addd n eq x bff00000 00000001 bff00000 00000000 c0000000 00000000
+addd z eq x bff00000 00000001 bff00000 00000000 c0000000 00000000
+addd p eq x bff00000 00000001 bff00000 00000000 c0000000 00000000
+addd m eq x bff00000 00000001 bff00000 00000000 c0000000 00000001
+addd n eq x c0000000 00000000 c0000000 00000001 c0100000 00000000
+addd z eq x c0000000 00000000 c0000000 00000001 c0100000 00000000
+addd p eq x c0000000 00000000 c0000000 00000001 c0100000 00000000
+addd m eq x c0000000 00000000 c0000000 00000001 c0100000 00000001
+addd n eq x 40000000 00000000 40000000 00000001 40100000 00000000
+addd z eq x 40000000 00000000 40000000 00000001 40100000 00000000
+addd m eq x 40000000 00000000 40000000 00000001 40100000 00000000
+addd p eq x 40000000 00000000 40000000 00000001 40100000 00000001
+addd n eq x 3ff00000 00000000 3ff00000 00000003 40000000 00000002
+addd p eq x 3ff00000 00000000 3ff00000 00000003 40000000 00000002
+addd z eq x 3ff00000 00000000 3ff00000 00000003 40000000 00000001
+addd m eq x 3ff00000 00000000 3ff00000 00000003 40000000 00000001
+addd n eq x bff00000 00000000 bff00000 00000003 c0000000 00000002
+addd m eq x bff00000 00000000 bff00000 00000003 c0000000 00000002
+addd z eq x bff00000 00000000 bff00000 00000003 c0000000 00000001
+addd p eq x bff00000 00000000 bff00000 00000003 c0000000 00000001
+addd n eq x c0000000 00000001 c0000000 00000002 c0100000 00000002
+addd m eq x c0000000 00000001 c0000000 00000002 c0100000 00000002
+addd z eq x c0000000 00000001 c0000000 00000002 c0100000 00000001
+addd p eq x c0000000 00000001 c0000000 00000002 c0100000 00000001
+addd n eq x 40000000 00000001 40000000 00000002 40100000 00000002
+addd p eq x 40000000 00000001 40000000 00000002 40100000 00000002
+addd z eq x 40000000 00000001 40000000 00000002 40100000 00000001
+addd m eq x 40000000 00000001 40000000 00000002 40100000 00000001
+addd n eq xo 7fefffff fffffffe 7fefffff ffffffff 7ff00000 00000000
+addd p eq xo 7fefffff fffffffe 7fefffff ffffffff 7ff00000 00000000
+addd z eq xo 7fefffff fffffffe 7fefffff ffffffff 7fefffff ffffffff
+addd m eq xo 7fefffff fffffffe 7fefffff ffffffff 7fefffff ffffffff
+addd n eq xo ffefffff fffffffe ffefffff ffffffff fff00000 00000000
+addd m eq xo ffefffff fffffffe ffefffff ffffffff fff00000 00000000
+addd z eq xo ffefffff fffffffe ffefffff ffffffff ffefffff ffffffff
+addd p eq xo ffefffff fffffffe ffefffff ffffffff ffefffff ffffffff
+addd n eq xo 7fdfffff ffffffff 7fe00000 00000000 7ff00000 00000000
+addd p eq xo 7fdfffff ffffffff 7fe00000 00000000 7ff00000 00000000
+addd z eq x 7fdfffff ffffffff 7fe00000 00000000 7fefffff ffffffff
+addd m eq x 7fdfffff ffffffff 7fe00000 00000000 7fefffff ffffffff
+addd n eq xo ffdfffff ffffffff ffe00000 00000000 fff00000 00000000
+addd m eq xo ffdfffff ffffffff ffe00000 00000000 fff00000 00000000
+addd z eq x ffdfffff ffffffff ffe00000 00000000 ffefffff ffffffff
+addd p eq x ffdfffff ffffffff ffe00000 00000000 ffefffff ffffffff
+addd n eq xo 7fe00000 00000001 7fe00000 00000000 7ff00000 00000000
+addd p eq xo 7fe00000 00000001 7fe00000 00000000 7ff00000 00000000
+addd z eq xo 7fe00000 00000001 7fe00000 00000000 7fefffff ffffffff
+addd m eq xo 7fe00000 00000001 7fe00000 00000000 7fefffff ffffffff
+addd n eq xo ffe00000 00000001 ffe00000 00000000 fff00000 00000000
+addd m eq xo ffe00000 00000001 ffe00000 00000000 fff00000 00000000
+addd z eq xo ffe00000 00000001 ffe00000 00000000 ffefffff ffffffff
+addd p eq xo ffe00000 00000001 ffe00000 00000000 ffefffff ffffffff
+addd n eq x 7fd00000 00000001 7fd00000 00000000 7fe00000 00000000
+addd z eq x 7fd00000 00000001 7fd00000 00000000 7fe00000 00000000
+addd m eq x 7fd00000 00000001 7fd00000 00000000 7fe00000 00000000
+addd p eq x 7fd00000 00000001 7fd00000 00000000 7fe00000 00000001
+addd n eq x ffd00000 00000001 ffd00000 00000000 ffe00000 00000000
+addd z eq x ffd00000 00000001 ffd00000 00000000 ffe00000 00000000
+addd p eq x ffd00000 00000001 ffd00000 00000000 ffe00000 00000000
+addd m eq x ffd00000 00000001 ffd00000 00000000 ffe00000 00000001
+addd n eq x 7fdfffff fffffffe 7fdfffff ffffffff 7fefffff fffffffe
+addd z eq x 7fdfffff fffffffe 7fdfffff ffffffff 7fefffff fffffffe
+addd m eq x 7fdfffff fffffffe 7fdfffff ffffffff 7fefffff fffffffe
+addd p eq x 7fdfffff fffffffe 7fdfffff ffffffff 7fefffff ffffffff
+addd n eq x ffdfffff fffffffe ffdfffff ffffffff ffefffff fffffffe
+addd z eq x ffdfffff fffffffe ffdfffff ffffffff ffefffff fffffffe
+addd p eq x ffdfffff fffffffe ffdfffff ffffffff ffefffff fffffffe
+addd m eq x ffdfffff fffffffe ffdfffff ffffffff ffefffff ffffffff
+addd p eq x 40000000 00000000 3cb00000 00000000 40000000 00000001
+addd n eq x 40000000 00000000 3cb00000 00000000 40000000 00000000
+addd z eq x 40000000 00000000 3cb00000 00000000 40000000 00000000
+addd m eq x 40000000 00000000 3cb00000 00000000 40000000 00000000
+addd n eq x 40000000 00000001 3cb00000 00000000 40000000 00000002
+addd p eq x 40000000 00000001 3cb00000 00000000 40000000 00000002
+addd z eq x 40000000 00000001 3cb00000 00000000 40000000 00000001
+addd m eq x 40000000 00000001 3cb00000 00000000 40000000 00000001
+addd n eq x 400fffff ffffffff 3cb00000 00000000 40100000 00000000
+addd p eq x 400fffff ffffffff 3cb00000 00000000 40100000 00000000
+addd z eq x 400fffff ffffffff 3cb00000 00000000 400fffff ffffffff
+addd m eq x 400fffff ffffffff 3cb00000 00000000 400fffff ffffffff
+addd p eq x 400fffff ffffffff 3cafffff ffffffff 40100000 00000000
+addd z eq x 400fffff ffffffff 3cafffff ffffffff 400fffff ffffffff
+addd n eq x 400fffff ffffffff 3cafffff ffffffff 400fffff ffffffff
+addd m eq x 400fffff ffffffff 3cafffff ffffffff 400fffff ffffffff
+addd n eq x c00fffff ffffffff bcb00000 00000000 c0100000 00000000
+addd m eq x c00fffff ffffffff bcb00000 00000000 c0100000 00000000
+addd z eq x c00fffff ffffffff bcb00000 00000000 c00fffff ffffffff
+addd p eq x c00fffff ffffffff bcb00000 00000000 c00fffff ffffffff
+addd m eq x c00fffff ffffffff bcafffff ffffffff c0100000 00000000
+addd z eq x c00fffff ffffffff bcafffff ffffffff c00fffff ffffffff
+addd n eq x c00fffff ffffffff bcafffff ffffffff c00fffff ffffffff
+addd p eq x c00fffff ffffffff bcafffff ffffffff c00fffff ffffffff
+addd n uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+addd m uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+addd p uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+addd z uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+addd n uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+addd m uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+addd p uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+addd z uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+addd n uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+addd m uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+addd p uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+addd z uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+addd n uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+addd m uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+addd p uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+addd z uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+addd n uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+addd m uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+addd p uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+addd z uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+addd n uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+addd m uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+addd p uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+addd z uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+addd n uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd m uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd p uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd z uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd n uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd m uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd p uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd z uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd n uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd m uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd p uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd z uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd n uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd m uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd p uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd z uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd n uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+addd m uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+addd p uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+addd z uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+addd n uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+addd m uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+addd p uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+addd z uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+addd n uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+addd m uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+addd p uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+addd z uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+addd n uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+addd m uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+addd p uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+addd z uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+addd n uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+addd m uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+addd p uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+addd z uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+addd n uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+addd m uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+addd p uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+addd z uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+addd n uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+addd m uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+addd p uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+addd z uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+addd n uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+addd m uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+addd p uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+addd z uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+addd n uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd m uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd p uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd z uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd n uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd m uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd p uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd z uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+addd n uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+addd m uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+addd p uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+addd z uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+addd n uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+addd m uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+addd p uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+addd z uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+addd n uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd m uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd p uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd z uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd n uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd m uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd p uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd z uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+addd n uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+addd m uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+addd p uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+addd z uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+addd m uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+addd p uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+addd z uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+addd m uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+addd p uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+addd z uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+addd n uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+addd m uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+addd p uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+addd z uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+addd n uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+addd m uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+addd p uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+addd z uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+addd m uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+addd p uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+addd z uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+addd m uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+addd p uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+addd z uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+addd n uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd m uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd p uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd z uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd n uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd m uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd p uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd z uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd n uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd m uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd p uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd z uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd n uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd m uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd p uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd z uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+addd m uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+addd p uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+addd z uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+addd n uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+addd m uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+addd p uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+addd z uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+addd n uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+addd m uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+addd p uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+addd z uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+addd n uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+addd m uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+addd p uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+addd z uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+addd n uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+addd m uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+addd p uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+addd z uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+addd n uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+addd m uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+addd p uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+addd z uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+addd m uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+addd p uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+addd z uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+addd n uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+addd m uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+addd p uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+addd z uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+addd n uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd m uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd p uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd z uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd n uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd m uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd p uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd z uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+addd m uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+addd p uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+addd z uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+addd m uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+addd p uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+addd z uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+addd n uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd m uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd p uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd z uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd n uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd m uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd p uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd z uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+addd n uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+addd m uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+addd p uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+addd z uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+addd m uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+addd p uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+addd z uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+addd n uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+addd m uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+addd p uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+addd z uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+addd n eq - 41f00000 00004000 c1f00000 00000000 3f900000 00000000
+addd m eq - 41f00000 00004000 c1f00000 00000000 3f900000 00000000
+addd p eq - 41f00000 00004000 c1f00000 00000000 3f900000 00000000
+addd z eq - 41f00000 00004000 c1f00000 00000000 3f900000 00000000
+addd n eq x 3fefffff ffffffff 3cb00800 00000000 3ff00000 00000001
+addd p eq x 3fefffff ffffffff 3cb00800 00000000 3ff00000 00000001
+addd m eq x 3fefffff ffffffff 3cb00800 00000000 3ff00000 00000000
+addd z eq x 3fefffff ffffffff 3cb00800 00000000 3ff00000 00000000
+addd n eq x 43300000 00000000 bfe7ff00 00000000 432fffff ffffffff
+addd p eq x 43300000 00000000 bfe7ff00 00000000 432fffff ffffffff
+addd m eq x 43300000 00000000 bfe7ff00 00000000 432fffff fffffffe
+addd z eq x 43300000 00000000 bfe7ff00 00000000 432fffff fffffffe
+addd n eq - 00040000 00000000 00040000 00000000 00080000 00000000
+addd m eq - 00040000 00000000 00040000 00000000 00080000 00000000
+addd p eq - 00040000 00000000 00040000 00000000 00080000 00000000
+addd z eq - 00040000 00000000 00040000 00000000 00080000 00000000
+addd n eq - 80040000 00000000 80040000 00000000 80080000 00000000
+addd m eq - 80040000 00000000 80040000 00000000 80080000 00000000
+addd p eq - 80040000 00000000 80040000 00000000 80080000 00000000
+addd z eq - 80040000 00000000 80040000 00000000 80080000 00000000
+addd n eq - 00000000 00000000 ffe00000 00000000 ffe00000 00000000
+addd m eq - 00000000 00000000 ffe00000 00000000 ffe00000 00000000
+addd p eq - 00000000 00000000 ffe00000 00000000 ffe00000 00000000
+addd z eq - 00000000 00000000 ffe00000 00000000 ffe00000 00000000
+addd n eq - 00000000 00000000 00000000 00000003 00000000 00000003
+addd m eq - 00000000 00000000 00000000 00000003 00000000 00000003
+addd p eq - 00000000 00000000 00000000 00000003 00000000 00000003
+addd z eq - 00000000 00000000 00000000 00000003 00000000 00000003
+addd n eq - 00000000 00000000 80000000 00000003 80000000 00000003
+addd m eq - 00000000 00000000 80000000 00000003 80000000 00000003
+addd p eq - 00000000 00000000 80000000 00000003 80000000 00000003
+addd z eq - 00000000 00000000 80000000 00000003 80000000 00000003
+addd n eq - 00000000 00000000 00100000 00000000 00100000 00000000
+addd m eq - 00000000 00000000 00100000 00000000 00100000 00000000
+addd p eq - 00000000 00000000 00100000 00000000 00100000 00000000
+addd z eq - 00000000 00000000 00100000 00000000 00100000 00000000
+addd n eq x 3ff00000 00000000 00000000 00000001 3ff00000 00000000
+addd z eq x 3ff00000 00000000 00000000 00000001 3ff00000 00000000
+addd m eq x 3ff00000 00000000 00000000 00000001 3ff00000 00000000
+addd p eq x 3ff00000 00000000 00000000 00000001 3ff00000 00000001
+addd n eq x bff00000 00000000 80000000 00000001 bff00000 00000000
+addd z eq x bff00000 00000000 80000000 00000001 bff00000 00000000
+addd p eq x bff00000 00000000 80000000 00000001 bff00000 00000000
+addd m eq x bff00000 00000000 80000000 00000001 bff00000 00000001
+addd n eq x 40080000 00000000 80000000 00000003 40080000 00000000
+addd p eq x 40080000 00000000 80000000 00000003 40080000 00000000
+addd z eq x 40080000 00000000 80000000 00000003 4007ffff ffffffff
+addd m eq x 40080000 00000000 80000000 00000003 4007ffff ffffffff
+addd n eq x c0140000 00000000 00000000 00000003 c0140000 00000000
+addd m eq x c0140000 00000000 00000000 00000003 c0140000 00000000
+addd z eq x c0140000 00000000 00000000 00000003 c013ffff ffffffff
+addd p eq x c0140000 00000000 00000000 00000003 c013ffff ffffffff
+addd n eq x 3ff00000 00000000 3ff00000 00000001 40000000 00000000
+addd z eq x 3ff00000 00000000 3ff00000 00000001 40000000 00000000
+addd m eq x 3ff00000 00000000 3ff00000 00000001 40000000 00000000
+addd p eq x 3ff00000 00000000 3ff00000 00000001 40000000 00000001
+addd n eq x bff00000 00000000 bff00000 00000001 c0000000 00000000
+addd z eq x bff00000 00000000 bff00000 00000001 c0000000 00000000
+addd p eq x bff00000 00000000 bff00000 00000001 c0000000 00000000
+addd m eq x bff00000 00000000 bff00000 00000001 c0000000 00000001
+addd n eq - bff00000 00000000 3ff00000 00000001 3cb00000 00000000
+addd m eq - bff00000 00000000 3ff00000 00000001 3cb00000 00000000
+addd p eq - bff00000 00000000 3ff00000 00000001 3cb00000 00000000
+addd z eq - bff00000 00000000 3ff00000 00000001 3cb00000 00000000
+addd n eq - 3ff00000 00000000 bff00000 00000001 bcb00000 00000000
+addd m eq - 3ff00000 00000000 bff00000 00000001 bcb00000 00000000
+addd p eq - 3ff00000 00000000 bff00000 00000001 bcb00000 00000000
+addd z eq - 3ff00000 00000000 bff00000 00000001 bcb00000 00000000
+addd n eq x 3ff00000 00000000 7fe00000 00000000 7fe00000 00000000
+addd z eq x 3ff00000 00000000 7fe00000 00000000 7fe00000 00000000
+addd m eq x 3ff00000 00000000 7fe00000 00000000 7fe00000 00000000
+addd p eq x 3ff00000 00000000 7fe00000 00000000 7fe00000 00000001
+addd n eq x bff00000 00000000 ffe00000 00000000 ffe00000 00000000
+addd z eq x bff00000 00000000 ffe00000 00000000 ffe00000 00000000
+addd p eq x bff00000 00000000 ffe00000 00000000 ffe00000 00000000
+addd m eq x bff00000 00000000 ffe00000 00000000 ffe00000 00000001
+addd n eq x 3ff00000 00000000 7fdfffff ffffffff 7fdfffff ffffffff
+addd z eq x 3ff00000 00000000 7fdfffff ffffffff 7fdfffff ffffffff
+addd m eq x 3ff00000 00000000 7fdfffff ffffffff 7fdfffff ffffffff
+addd p eq x 3ff00000 00000000 7fdfffff ffffffff 7fe00000 00000000
+addd n eq x bff00000 00000000 ffdfffff ffffffff ffdfffff ffffffff
+addd z eq x bff00000 00000000 ffdfffff ffffffff ffdfffff ffffffff
+addd p eq x bff00000 00000000 ffdfffff ffffffff ffdfffff ffffffff
+addd m eq x bff00000 00000000 ffdfffff ffffffff ffe00000 00000000
+addd n eq x 3ff00000 00000000 7fefffff ffffffff 7fefffff ffffffff
+addd z eq x 3ff00000 00000000 7fefffff ffffffff 7fefffff ffffffff
+addd m eq x 3ff00000 00000000 7fefffff ffffffff 7fefffff ffffffff
+addd p eq xo 3ff00000 00000000 7fefffff ffffffff 7ff00000 00000000
+addd n eq x bff00000 00000000 ffefffff ffffffff ffefffff ffffffff
+addd z eq x bff00000 00000000 ffefffff ffffffff ffefffff ffffffff
+addd p eq x bff00000 00000000 ffefffff ffffffff ffefffff ffffffff
+addd m eq xo bff00000 00000000 ffefffff ffffffff fff00000 00000000
+addd n eq x 3ff00000 00000000 7fefffff fffffffe 7fefffff fffffffe
+addd z eq x 3ff00000 00000000 7fefffff fffffffe 7fefffff fffffffe
+addd m eq x 3ff00000 00000000 7fefffff fffffffe 7fefffff fffffffe
+addd p eq x 3ff00000 00000000 7fefffff fffffffe 7fefffff ffffffff
+addd n eq x bff00000 00000000 ffefffff fffffffe ffefffff fffffffe
+addd z eq x bff00000 00000000 ffefffff fffffffe ffefffff fffffffe
+addd p eq x bff00000 00000000 ffefffff fffffffe ffefffff fffffffe
+addd m eq x bff00000 00000000 ffefffff fffffffe ffefffff ffffffff
+addd n eq x bff00000 00000000 7fe00000 00000000 7fe00000 00000000
+addd p eq x bff00000 00000000 7fe00000 00000000 7fe00000 00000000
+addd z eq x bff00000 00000000 7fe00000 00000000 7fdfffff ffffffff
+addd m eq x bff00000 00000000 7fe00000 00000000 7fdfffff ffffffff
+addd n eq x 3ff00000 00000000 ffe00000 00000000 ffe00000 00000000
+addd m eq x 3ff00000 00000000 ffe00000 00000000 ffe00000 00000000
+addd z eq x 3ff00000 00000000 ffe00000 00000000 ffdfffff ffffffff
+addd p eq x 3ff00000 00000000 ffe00000 00000000 ffdfffff ffffffff
+addd n eq x bff00000 00000000 7fdfffff ffffffff 7fdfffff ffffffff
+addd p eq x bff00000 00000000 7fdfffff ffffffff 7fdfffff ffffffff
+addd z eq x bff00000 00000000 7fdfffff ffffffff 7fdfffff fffffffe
+addd m eq x bff00000 00000000 7fdfffff ffffffff 7fdfffff fffffffe
+addd n eq x 3ff00000 00000000 ffdfffff ffffffff ffdfffff ffffffff
+addd m eq x 3ff00000 00000000 ffdfffff ffffffff ffdfffff ffffffff
+addd z eq x 3ff00000 00000000 ffdfffff ffffffff ffdfffff fffffffe
+addd p eq x 3ff00000 00000000 ffdfffff ffffffff ffdfffff fffffffe
+addd n eq x bff00000 00000000 7fefffff ffffffff 7fefffff ffffffff
+addd p eq x bff00000 00000000 7fefffff ffffffff 7fefffff ffffffff
+addd z eq x bff00000 00000000 7fefffff ffffffff 7fefffff fffffffe
+addd m eq x bff00000 00000000 7fefffff ffffffff 7fefffff fffffffe
+addd n eq x 3ff00000 00000000 ffefffff ffffffff ffefffff ffffffff
+addd m eq x 3ff00000 00000000 ffefffff ffffffff ffefffff ffffffff
+addd z eq x 3ff00000 00000000 ffefffff ffffffff ffefffff fffffffe
+addd p eq x 3ff00000 00000000 ffefffff ffffffff ffefffff fffffffe
+addd n eq x bff00000 00000000 7fefffff fffffffe 7fefffff fffffffe
+addd p eq x bff00000 00000000 7fefffff fffffffe 7fefffff fffffffe
+addd z eq x bff00000 00000000 7fefffff fffffffe 7fefffff fffffffd
+addd m eq x bff00000 00000000 7fefffff fffffffe 7fefffff fffffffd
+addd n eq x 3ff00000 00000000 ffefffff fffffffe ffefffff fffffffe
+addd m eq x 3ff00000 00000000 ffefffff fffffffe ffefffff fffffffe
+addd z eq x 3ff00000 00000000 ffefffff fffffffe ffefffff fffffffd
+addd p eq x 3ff00000 00000000 ffefffff fffffffe ffefffff fffffffd
+addd n eq - 40b00000 00000000 40b00000 00000000 40c00000 00000000
+addd m eq - 40b00000 00000000 40b00000 00000000 40c00000 00000000
+addd p eq - 40b00000 00000000 40b00000 00000000 40c00000 00000000
+addd z eq - 40b00000 00000000 40b00000 00000000 40c00000 00000000
+addd n eq - 40a00000 00000000 40a00000 00000000 40b00000 00000000
+addd m eq - 40a00000 00000000 40a00000 00000000 40b00000 00000000
+addd p eq - 40a00000 00000000 40a00000 00000000 40b00000 00000000
+addd z eq - 40a00000 00000000 40a00000 00000000 40b00000 00000000
+addd n eq - 40a00000 00000000 40b00000 00000000 40b80000 00000000
+addd m eq - 40a00000 00000000 40b00000 00000000 40b80000 00000000
+addd p eq - 40a00000 00000000 40b00000 00000000 40b80000 00000000
+addd z eq - 40a00000 00000000 40b00000 00000000 40b80000 00000000
+addd n eq - c0b00000 00000000 c0b00000 00000000 c0c00000 00000000
+addd m eq - c0b00000 00000000 c0b00000 00000000 c0c00000 00000000
+addd p eq - c0b00000 00000000 c0b00000 00000000 c0c00000 00000000
+addd z eq - c0b00000 00000000 c0b00000 00000000 c0c00000 00000000
+addd n eq - c0a00000 00000000 c0a00000 00000000 c0b00000 00000000
+addd m eq - c0a00000 00000000 c0a00000 00000000 c0b00000 00000000
+addd p eq - c0a00000 00000000 c0a00000 00000000 c0b00000 00000000
+addd z eq - c0a00000 00000000 c0a00000 00000000 c0b00000 00000000
+addd n eq - c0a00000 00000000 c0b00000 00000000 c0b80000 00000000
+addd m eq - c0a00000 00000000 c0b00000 00000000 c0b80000 00000000
+addd p eq - c0a00000 00000000 c0b00000 00000000 c0b80000 00000000
+addd z eq - c0a00000 00000000 c0b00000 00000000 c0b80000 00000000
+addd n eq - 00000000 00000000 43d00000 00000000 43d00000 00000000
+addd m eq - 00000000 00000000 43d00000 00000000 43d00000 00000000
+addd p eq - 00000000 00000000 43d00000 00000000 43d00000 00000000
+addd z eq - 00000000 00000000 43d00000 00000000 43d00000 00000000
+addd n eq - 40d00000 00000000 00000000 00000000 40d00000 00000000
+addd m eq - 40d00000 00000000 00000000 00000000 40d00000 00000000
+addd p eq - 40d00000 00000000 00000000 00000000 40d00000 00000000
+addd z eq - 40d00000 00000000 00000000 00000000 40d00000 00000000
+addd p eq - c0d00000 00000000 40d00000 00000000 00000000 00000000
+addd n eq - c0d00000 00000000 40d00000 00000000 00000000 00000000
+addd z eq - c0d00000 00000000 40d00000 00000000 00000000 00000000
+addd p eq - 40d00000 00000000 c0d00000 00000000 00000000 00000000
+addd n eq - 40d00000 00000000 c0d00000 00000000 00000000 00000000
+addd z eq - 40d00000 00000000 c0d00000 00000000 00000000 00000000
+addd m eq - c0d00000 00000000 40d00000 00000000 80000000 00000000
+addd m eq - 40d00000 00000000 c0d00000 00000000 80000000 00000000
+addd n eq - 40b00000 00000000 c0a00000 00000000 40a00000 00000000
+addd m eq - 40b00000 00000000 c0a00000 00000000 40a00000 00000000
+addd p eq - 40b00000 00000000 c0a00000 00000000 40a00000 00000000
+addd z eq - 40b00000 00000000 c0a00000 00000000 40a00000 00000000
+addd n eq - c0a00000 00000000 40b00000 00000000 40a00000 00000000
+addd m eq - c0a00000 00000000 40b00000 00000000 40a00000 00000000
+addd p eq - c0a00000 00000000 40b00000 00000000 40a00000 00000000
+addd z eq - c0a00000 00000000 40b00000 00000000 40a00000 00000000
+addd n eq - c0b00000 00000000 40a00000 00000000 c0a00000 00000000
+addd m eq - c0b00000 00000000 40a00000 00000000 c0a00000 00000000
+addd p eq - c0b00000 00000000 40a00000 00000000 c0a00000 00000000
+addd z eq - c0b00000 00000000 40a00000 00000000 c0a00000 00000000
+addd n eq - 40a00000 00000000 c0b00000 00000000 c0a00000 00000000
+addd m eq - 40a00000 00000000 c0b00000 00000000 c0a00000 00000000
+addd p eq - 40a00000 00000000 c0b00000 00000000 c0a00000 00000000
+addd z eq - 40a00000 00000000 c0b00000 00000000 c0a00000 00000000
+addd n eq - 40dfff40 00000000 40000000 00000000 40dfffc0 00000000
+addd m eq - 40dfff40 00000000 40000000 00000000 40dfffc0 00000000
+addd p eq - 40dfff40 00000000 40000000 00000000 40dfffc0 00000000
+addd z eq - 40dfff40 00000000 40000000 00000000 40dfffc0 00000000
+addd n eq - 40dfffc0 00000000 3ff00000 00000000 40e00000 00000000
+addd m eq - 40dfffc0 00000000 3ff00000 00000000 40e00000 00000000
+addd p eq - 40dfffc0 00000000 3ff00000 00000000 40e00000 00000000
+addd z eq - 40dfffc0 00000000 3ff00000 00000000 40e00000 00000000
+addd n eq - 40dffec0 00000000 40000000 00000000 40dfff40 00000000
+addd m eq - 40dffec0 00000000 40000000 00000000 40dfff40 00000000
+addd p eq - 40dffec0 00000000 40000000 00000000 40dfff40 00000000
+addd z eq - 40dffec0 00000000 40000000 00000000 40dfff40 00000000
+addd n eq - 40000000 00000000 40dfff40 00000000 40dfffc0 00000000
+addd m eq - 40000000 00000000 40dfff40 00000000 40dfffc0 00000000
+addd p eq - 40000000 00000000 40dfff40 00000000 40dfffc0 00000000
+addd z eq - 40000000 00000000 40dfff40 00000000 40dfffc0 00000000
+addd n eq - 3ff00000 00000000 40dfffc0 00000000 40e00000 00000000
+addd m eq - 3ff00000 00000000 40dfffc0 00000000 40e00000 00000000
+addd p eq - 3ff00000 00000000 40dfffc0 00000000 40e00000 00000000
+addd z eq - 3ff00000 00000000 40dfffc0 00000000 40e00000 00000000
+addd n eq - 40000000 00000000 40dffec0 00000000 40dfff40 00000000
+addd m eq - 40000000 00000000 40dffec0 00000000 40dfff40 00000000
+addd p eq - 40000000 00000000 40dffec0 00000000 40dfff40 00000000
+addd z eq - 40000000 00000000 40dffec0 00000000 40dfff40 00000000
+addd p eq - c0dfffc0 00000000 40dfffc0 00000000 00000000 00000000
+addd n eq - c0dfffc0 00000000 40dfffc0 00000000 00000000 00000000
+addd z eq - c0dfffc0 00000000 40dfffc0 00000000 00000000 00000000
+addd m eq - 40dfffc0 00000000 c0dfffc0 00000000 80000000 00000000
+addd n eq x 3ff00000 00000000 3ca00000 00000000 3ff00000 00000000
+addd n eq - 3ff00000 00000000 3cb00000 00000000 3ff00000 00000001
+addd n eq x 3ff00000 00000003 3c9fffff ffffffff 3ff00000 00000003
+addd n eq x 3ff00000 00000003 3ca00000 00000000 3ff00000 00000004
+addd n eq x 3ff00000 00000003 3ca00000 00000001 3ff00000 00000004
+addd n eq xo 7fe00000 00000001 7fe70000 00000001 7ff00000 00000000
+addd z eq xo 7fe00000 00000001 7fe70000 00000001 7fefffff ffffffff
+addd p eq xo 7fe00000 00000001 7fe70000 00000001 7ff00000 00000000
+addd m eq xo 7fe00000 00000001 7fe70000 00000001 7fefffff ffffffff
diff --git a/verrou/unitTest/checkUCB-vecto/inputData/adds.input b/verrou/unitTest/checkUCB-vecto/inputData/adds.input
new file mode 100644
index 0000000000000000000000000000000000000000..5566e07e71f666afb7e4716ebd1aa8046f489355
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/inputData/adds.input
@@ -0,0 +1,1415 @@
+/* Copyright (C) 1988-1994 Sun Microsystems, Inc. 2550 Garcia Avenue */
+/* Mountain View, California  94043 All rights reserved. */
+/*  */
+/* Any person is hereby authorized to download, copy, use, create bug fixes,  */
+/* and distribute, subject to the following conditions: */
+/*  */
+/* 	1.  the software may not be redistributed for a fee except as */
+/* 	    reasonable to cover media costs; */
+/* 	2.  any copy of the software must include this notice, as well as  */
+/* 	    any other embedded copyright notices; and  */
+/* 	3.  any distribution of this software or derivative works thereof  */
+/* 	    must comply with all applicable U.S. export control laws. */
+/*  */
+/* THE SOFTWARE IS MADE AVAILABLE "AS IS" AND WITHOUT EXPRESS OR IMPLIED */
+/* WARRANTY OF ANY KIND, INCLUDING BUT NOT LIMITED TO THE IMPLIED */
+/* WARRANTIES OF DESIGN, MERCHANTIBILITY, FITNESS FOR A PARTICULAR */
+/* PURPOSE, NON-INFRINGEMENT, PERFORMANCE OR CONFORMANCE TO */
+/* SPECIFICATIONS.   */
+/*  */
+/* BY DOWNLOADING AND/OR USING THIS SOFTWARE, THE USER WAIVES ALL CLAIMS */
+/* AGAINST SUN MICROSYSTEMS, INC. AND ITS AFFILIATED COMPANIES IN ANY */
+/* JURISDICTION, INCLUDING BUT NOT LIMITED TO CLAIMS FOR DAMAGES OR */
+/* EQUITABLE RELIEF BASED ON LOSS OF DATA, AND SPECIFICALLY WAIVES EVEN */
+/* UNKNOWN OR UNANTICIPATED CLAIMS OR LOSSES, PRESENT AND FUTURE. */
+/*  */
+/* IN NO EVENT WILL SUN MICROSYSTEMS, INC. OR ANY OF ITS AFFILIATED */
+/* COMPANIES BE LIABLE FOR ANY LOST REVENUE OR PROFITS OR OTHER SPECIAL, */
+/* INDIRECT AND CONSEQUENTIAL DAMAGES, EVEN IF IT HAS BEEN ADVISED OF THE */
+/* POSSIBILITY OF SUCH DAMAGES. */
+/*  */
+/* This file is provided with no support and without any obligation on the */
+/* part of Sun Microsystems, Inc. ("Sun") or any of its affiliated */
+/* companies to assist in its use, correction, modification or */
+/* enhancement.  Nevertheless, and without creating any obligation on its */
+/* part, Sun welcomes your comments concerning the software and requests */
+/* that they be sent to fdlibm-comments@sunpro.sun.com. */
+
+adds p eq - 3f800000 3f800000 40000000
+adds z eq - 3f800000 3f800000 40000000
+adds n eq - 3f800000 40000000 40400000
+adds m eq - 3f800000 40000000 40400000
+adds p eq - 3f800000 40000000 40400000
+adds z eq - 3f800000 40000000 40400000
+adds n eq - 40000000 3f800000 40400000
+adds m eq - 40000000 3f800000 40400000
+adds p eq - 40000000 3f800000 40400000
+adds z eq - 40000000 3f800000 40400000
+adds n eq - 40000000 40000000 40800000
+adds m eq - 40000000 40000000 40800000
+adds p eq - 40000000 40000000 40800000
+adds z eq - 40000000 40000000 40800000
+adds n eq - 40000000 c0000000 00000000
+adds z eq - 40000000 c0000000 00000000
+adds p eq - 40000000 c0000000 00000000
+adds m eq - 40000000 c0000000 80000000
+adds n eq - 40a00000 c0a00000 00000000
+adds z eq - 40a00000 c0a00000 00000000
+adds p eq - 40a00000 c0a00000 00000000
+adds m eq - 40a00000 c0a00000 80000000
+adds n eq - 3f800000 40e00000 41000000
+adds m eq - 3f800000 40e00000 41000000
+adds p eq - 3f800000 40e00000 41000000
+adds z eq - 3f800000 40e00000 41000000
+adds n eq - 40a00000 bf800000 40800000
+adds m eq - 40a00000 bf800000 40800000
+adds p eq - 40a00000 bf800000 40800000
+adds z eq - 40a00000 bf800000 40800000
+adds n eq - 40000000 c0a00000 c0400000
+adds m eq - 40000000 c0a00000 c0400000
+adds p eq - 40000000 c0a00000 c0400000
+adds z eq - 40000000 c0a00000 c0400000
+adds n eq - 40a00000 80000000 40a00000
+adds m eq - 40a00000 80000000 40a00000
+adds p eq - 40a00000 80000000 40a00000
+adds z eq - 40a00000 80000000 40a00000
+adds n eq - 40a00000 00000000 40a00000
+adds m eq - 40a00000 00000000 40a00000
+adds p eq - 40a00000 00000000 40a00000
+adds z eq - 40a00000 00000000 40a00000
+adds n eq - 7f800000 7f800000 7f800000
+adds m eq - 7f800000 7f800000 7f800000
+adds p eq - 7f800000 7f800000 7f800000
+adds z eq - 7f800000 7f800000 7f800000
+adds n eq - ff800000 ff800000 ff800000
+adds m eq - ff800000 ff800000 ff800000
+adds p eq - ff800000 ff800000 ff800000
+adds z eq - ff800000 ff800000 ff800000
+adds n uo v ff800000 7f800000 7fff0000
+adds m uo v ff800000 7f800000 7fff0000
+adds p uo v ff800000 7f800000 7fff0000
+adds z uo v ff800000 7f800000 7fff0000
+adds n uo v 7f800000 ff800000 7fff0000
+adds m uo v 7f800000 ff800000 7fff0000
+adds p uo v 7f800000 ff800000 7fff0000
+adds z uo v 7f800000 ff800000 7fff0000
+adds n eq - 7f800000 7f000000 7f800000
+adds m eq - 7f800000 7f000000 7f800000
+adds p eq - 7f800000 7f000000 7f800000
+adds z eq - 7f800000 7f000000 7f800000
+adds n eq - 7f800000 ff000000 7f800000
+adds m eq - 7f800000 ff000000 7f800000
+adds p eq - 7f800000 ff000000 7f800000
+adds z eq - 7f800000 ff000000 7f800000
+adds n eq - ff800000 7f000000 ff800000
+adds m eq - ff800000 7f000000 ff800000
+adds p eq - ff800000 7f000000 ff800000
+adds z eq - ff800000 7f000000 ff800000
+adds n eq - ff800000 ff000000 ff800000
+adds m eq - ff800000 ff000000 ff800000
+adds p eq - ff800000 ff000000 ff800000
+adds z eq - ff800000 ff000000 ff800000
+adds n eq - 7f000000 7f800000 7f800000
+adds m eq - 7f000000 7f800000 7f800000
+adds p eq - 7f000000 7f800000 7f800000
+adds z eq - 7f000000 7f800000 7f800000
+adds n eq - 7f000000 ff800000 ff800000
+adds m eq - 7f000000 ff800000 ff800000
+adds p eq - 7f000000 ff800000 ff800000
+adds z eq - 7f000000 ff800000 ff800000
+adds n eq - ff000000 7f800000 7f800000
+adds m eq - ff000000 7f800000 7f800000
+adds p eq - ff000000 7f800000 7f800000
+adds z eq - ff000000 7f800000 7f800000
+adds n eq - ff000000 ff800000 ff800000
+adds m eq - ff000000 ff800000 ff800000
+adds p eq - ff000000 ff800000 ff800000
+adds z eq - ff000000 ff800000 ff800000
+adds n eq - 7f800000 00000000 7f800000
+adds m eq - 7f800000 00000000 7f800000
+adds p eq - 7f800000 00000000 7f800000
+adds z eq - 7f800000 00000000 7f800000
+adds n eq - 7f800000 80000000 7f800000
+adds m eq - 7f800000 80000000 7f800000
+adds p eq - 7f800000 80000000 7f800000
+adds z eq - 7f800000 80000000 7f800000
+adds n eq - ff800000 00000000 ff800000
+adds m eq - ff800000 00000000 ff800000
+adds p eq - ff800000 00000000 ff800000
+adds z eq - ff800000 00000000 ff800000
+adds n eq - ff800000 80000000 ff800000
+adds m eq - ff800000 80000000 ff800000
+adds p eq - ff800000 80000000 ff800000
+adds z eq - ff800000 80000000 ff800000
+adds n eq - 00000000 7f800000 7f800000
+adds m eq - 00000000 7f800000 7f800000
+adds p eq - 00000000 7f800000 7f800000
+adds z eq - 00000000 7f800000 7f800000
+adds n eq - 80000000 7f800000 7f800000
+adds m eq - 80000000 7f800000 7f800000
+adds p eq - 80000000 7f800000 7f800000
+adds z eq - 80000000 7f800000 7f800000
+adds n eq - 00000000 ff800000 ff800000
+adds m eq - 00000000 ff800000 ff800000
+adds p eq - 00000000 ff800000 ff800000
+adds z eq - 00000000 ff800000 ff800000
+adds n eq - 80000000 ff800000 ff800000
+adds m eq - 80000000 ff800000 ff800000
+adds p eq - 80000000 ff800000 ff800000
+adds z eq - 80000000 ff800000 ff800000
+adds n eq - 7f800000 007fffff 7f800000
+adds m eq - 7f800000 007fffff 7f800000
+adds p eq - 7f800000 007fffff 7f800000
+adds z eq - 7f800000 007fffff 7f800000
+adds n eq - ff800000 007fffff ff800000
+adds m eq - ff800000 007fffff ff800000
+adds p eq - ff800000 007fffff ff800000
+adds z eq - ff800000 007fffff ff800000
+adds n eq - 7f800000 807fffff 7f800000
+adds m eq - 7f800000 807fffff 7f800000
+adds p eq - 7f800000 807fffff 7f800000
+adds z eq - 7f800000 807fffff 7f800000
+adds n eq - ff800000 807fffff ff800000
+adds m eq - ff800000 807fffff ff800000
+adds p eq - ff800000 807fffff ff800000
+adds z eq - ff800000 807fffff ff800000
+adds n eq - 00000003 7f800000 7f800000
+adds m eq - 00000003 7f800000 7f800000
+adds p eq - 00000003 7f800000 7f800000
+adds z eq - 00000003 7f800000 7f800000
+adds n eq - 00000003 ff800000 ff800000
+adds m eq - 00000003 ff800000 ff800000
+adds p eq - 00000003 ff800000 ff800000
+adds z eq - 00000003 ff800000 ff800000
+adds n eq - 80000003 7f800000 7f800000
+adds m eq - 80000003 7f800000 7f800000
+adds p eq - 80000003 7f800000 7f800000
+adds z eq - 80000003 7f800000 7f800000
+adds n eq - 80000003 ff800000 ff800000
+adds m eq - 80000003 ff800000 ff800000
+adds p eq - 80000003 ff800000 ff800000
+adds z eq - 80000003 ff800000 ff800000
+adds n eq - 00000000 7f000000 7f000000
+adds m eq - 00000000 7f000000 7f000000
+adds p eq - 00000000 7f000000 7f000000
+adds z eq - 00000000 7f000000 7f000000
+adds n eq - 80000000 7f000000 7f000000
+adds m eq - 80000000 7f000000 7f000000
+adds p eq - 80000000 7f000000 7f000000
+adds z eq - 80000000 7f000000 7f000000
+adds n eq - ff000000 00000000 ff000000
+adds m eq - ff000000 00000000 ff000000
+adds p eq - ff000000 00000000 ff000000
+adds z eq - ff000000 00000000 ff000000
+adds n eq - ff000000 80000000 ff000000
+adds m eq - ff000000 80000000 ff000000
+adds p eq - ff000000 80000000 ff000000
+adds z eq - ff000000 80000000 ff000000
+adds n eq - 3f800000 80000000 3f800000
+adds m eq - 3f800000 80000000 3f800000
+adds p eq - 3f800000 80000000 3f800000
+adds z eq - 3f800000 80000000 3f800000
+adds n eq - bf800000 80000000 bf800000
+adds m eq - bf800000 80000000 bf800000
+adds p eq - bf800000 80000000 bf800000
+adds z eq - bf800000 80000000 bf800000
+adds n eq - 00000000 3f800000 3f800000
+adds m eq - 00000000 3f800000 3f800000
+adds p eq - 00000000 3f800000 3f800000
+adds z eq - 00000000 3f800000 3f800000
+adds n eq - 80000000 bf800000 bf800000
+adds m eq - 80000000 bf800000 bf800000
+adds p eq - 80000000 bf800000 bf800000
+adds z eq - 80000000 bf800000 bf800000
+adds n eq - 00000000 007fffff 007fffff
+adds m eq - 00000000 007fffff 007fffff
+adds p eq - 00000000 007fffff 007fffff
+adds z eq - 00000000 007fffff 007fffff
+adds n eq - 80000000 007fffff 007fffff
+adds m eq - 80000000 007fffff 007fffff
+adds p eq - 80000000 007fffff 007fffff
+adds z eq - 80000000 007fffff 007fffff
+adds n eq - 00000000 807fffff 807fffff
+adds m eq - 00000000 807fffff 807fffff
+adds p eq - 00000000 807fffff 807fffff
+adds z eq - 00000000 807fffff 807fffff
+adds n eq - 80000000 807fffff 807fffff
+adds m eq - 80000000 807fffff 807fffff
+adds p eq - 80000000 807fffff 807fffff
+adds z eq - 80000000 807fffff 807fffff
+adds n eq - 00000003 00000000 00000003
+adds m eq - 00000003 00000000 00000003
+adds p eq - 00000003 00000000 00000003
+adds z eq - 00000003 00000000 00000003
+adds n eq - 00000003 80000000 00000003
+adds m eq - 00000003 80000000 00000003
+adds p eq - 00000003 80000000 00000003
+adds z eq - 00000003 80000000 00000003
+adds n eq - 80000003 00000000 80000003
+adds m eq - 80000003 00000000 80000003
+adds p eq - 80000003 00000000 80000003
+adds z eq - 80000003 00000000 80000003
+adds n eq - 80000003 80000000 80000003
+adds m eq - 80000003 80000000 80000003
+adds p eq - 80000003 80000000 80000003
+adds z eq - 80000003 80000000 80000003
+adds n eq - 80000000 80800000 80800000
+adds m eq - 80000000 80800000 80800000
+adds p eq - 80000000 80800000 80800000
+adds z eq - 80000000 80800000 80800000
+adds n eq - 00800000 00000000 00800000
+adds m eq - 00800000 00000000 00800000
+adds p eq - 00800000 00000000 00800000
+adds z eq - 00800000 00000000 00800000
+adds n eq - 00000000 80800000 80800000
+adds m eq - 00000000 80800000 80800000
+adds p eq - 00000000 80800000 80800000
+adds z eq - 00000000 80800000 80800000
+adds n eq - 80800000 00000000 80800000
+adds m eq - 80800000 00000000 80800000
+adds p eq - 80800000 00000000 80800000
+adds z eq - 80800000 00000000 80800000
+adds n eq - 00000000 80000000 00000000
+adds z eq - 00000000 80000000 00000000
+adds p eq - 00000000 80000000 00000000
+adds n eq - 80000000 00000000 00000000
+adds z eq - 80000000 00000000 00000000
+adds p eq - 80000000 00000000 00000000
+adds m eq - 00000000 80000000 80000000
+adds m eq - 80000000 00000000 80000000
+adds n eq - 00000000 00000000 00000000
+adds m eq - 00000000 00000000 00000000
+adds p eq - 00000000 00000000 00000000
+adds z eq - 00000000 00000000 00000000
+adds n eq - 80000000 80000000 80000000
+adds m eq - 80000000 80000000 80000000
+adds p eq - 80000000 80000000 80000000
+adds z eq - 80000000 80000000 80000000
+adds n eq xo 7f000000 7f000000 7f800000
+adds p eq xo 7f000000 7f000000 7f800000
+adds z eq xo 7f000000 7f000000 7f7fffff
+adds m eq xo 7f000000 7f000000 7f7fffff
+adds n eq xo ff000000 ff000000 ff800000
+adds m eq xo ff000000 ff000000 ff800000
+adds z eq xo ff000000 ff000000 ff7fffff
+adds p eq xo ff000000 ff000000 ff7fffff
+adds n eq - 7efffffe 7efffffe 7f7ffffe
+adds m eq - 7efffffe 7efffffe 7f7ffffe
+adds p eq - 7efffffe 7efffffe 7f7ffffe
+adds z eq - 7efffffe 7efffffe 7f7ffffe
+adds n eq - fefffffe fefffffe ff7ffffe
+adds m eq - fefffffe fefffffe ff7ffffe
+adds p eq - fefffffe fefffffe ff7ffffe
+adds z eq - fefffffe fefffffe ff7ffffe
+adds n eq xo 7f7ffffe 7f7ffffe 7f800000
+adds p eq xo 7f7ffffe 7f7ffffe 7f800000
+adds z eq xo 7f7ffffe 7f7ffffe 7f7fffff
+adds m eq xo 7f7ffffe 7f7ffffe 7f7fffff
+adds n eq xo ff7ffffe ff7ffffe ff800000
+adds m eq xo ff7ffffe ff7ffffe ff800000
+adds z eq xo ff7ffffe ff7ffffe ff7fffff
+adds p eq xo ff7ffffe ff7ffffe ff7fffff
+adds n eq - 40400000 40400000 40c00000
+adds m eq - 40400000 40400000 40c00000
+adds p eq - 40400000 40400000 40c00000
+adds z eq - 40400000 40400000 40c00000
+adds n eq - 00800000 00800000 01000000
+adds m eq - 00800000 00800000 01000000
+adds p eq - 00800000 00800000 01000000
+adds z eq - 00800000 00800000 01000000
+adds n eq - 7e800000 7e800000 7f000000
+adds m eq - 7e800000 7e800000 7f000000
+adds p eq - 7e800000 7e800000 7f000000
+adds z eq - 7e800000 7e800000 7f000000
+adds n eq - 007fffff 007fffff 00fffffe
+adds m eq - 007fffff 007fffff 00fffffe
+adds p eq - 007fffff 007fffff 00fffffe
+adds z eq - 007fffff 007fffff 00fffffe
+adds n eq - 807fffff 807fffff 80fffffe
+adds m eq - 807fffff 807fffff 80fffffe
+adds p eq - 807fffff 807fffff 80fffffe
+adds z eq - 807fffff 807fffff 80fffffe
+adds n eq - 00000004 00000004 00000008
+adds m eq - 00000004 00000004 00000008
+adds p eq - 00000004 00000004 00000008
+adds z eq - 00000004 00000004 00000008
+adds n eq - 80000004 80000004 80000008
+adds m eq - 80000004 80000004 80000008
+adds p eq - 80000004 80000004 80000008
+adds z eq - 80000004 80000004 80000008
+adds n eq - 00000001 00000001 00000002
+adds m eq - 00000001 00000001 00000002
+adds p eq - 00000001 00000001 00000002
+adds z eq - 00000001 00000001 00000002
+adds n eq - 80000001 80000001 80000002
+adds m eq - 80000001 80000001 80000002
+adds p eq - 80000001 80000001 80000002
+adds z eq - 80000001 80000001 80000002
+adds n eq - 7f000000 ff000000 00000000
+adds z eq - 7f000000 ff000000 00000000
+adds p eq - 7f000000 ff000000 00000000
+adds n eq - fefffffe 7efffffe 00000000
+adds z eq - fefffffe 7efffffe 00000000
+adds p eq - fefffffe 7efffffe 00000000
+adds n eq - 3f800000 bf800000 00000000
+adds z eq - 3f800000 bf800000 00000000
+adds p eq - 3f800000 bf800000 00000000
+adds n eq - c0400000 40400000 00000000
+adds z eq - c0400000 40400000 00000000
+adds p eq - c0400000 40400000 00000000
+adds n eq - 00800000 80800000 00000000
+adds z eq - 00800000 80800000 00000000
+adds p eq - 00800000 80800000 00000000
+adds n eq - 80800000 00800000 00000000
+adds z eq - 80800000 00800000 00000000
+adds p eq - 80800000 00800000 00000000
+adds n eq - 007ffffc 807ffffc 00000000
+adds z eq - 007ffffc 807ffffc 00000000
+adds p eq - 007ffffc 807ffffc 00000000
+adds n eq - 807fffff 007fffff 00000000
+adds z eq - 807fffff 007fffff 00000000
+adds p eq - 807fffff 007fffff 00000000
+adds n eq - 00000001 80000001 00000000
+adds z eq - 00000001 80000001 00000000
+adds p eq - 00000001 80000001 00000000
+adds n eq - 80000001 00000001 00000000
+adds z eq - 80000001 00000001 00000000
+adds p eq - 80000001 00000001 00000000
+adds n eq - 7f7fffff ff7fffff 00000000
+adds z eq - 7f7fffff ff7fffff 00000000
+adds p eq - 7f7fffff ff7fffff 00000000
+adds m eq - 7f000000 ff000000 80000000
+adds m eq - fefffffe 7efffffe 80000000
+adds m eq - 3f800000 bf800000 80000000
+adds m eq - c0400000 40400000 80000000
+adds m eq - 00800000 80800000 80000000
+adds m eq - 80800000 00800000 80000000
+adds m eq - 007ffffc 807ffffc 80000000
+adds m eq - 807fffff 007fffff 80000000
+adds m eq - 00000001 80000001 80000000
+adds m eq - 80000001 00000001 80000000
+adds m eq - 7f7fffff ff7fffff 80000000
+adds n eq - 3f800001 bf800000 34000000
+adds m eq - 3f800001 bf800000 34000000
+adds p eq - 3f800001 bf800000 34000000
+adds z eq - 3f800001 bf800000 34000000
+adds n eq - bf800001 3f800000 b4000000
+adds m eq - bf800001 3f800000 b4000000
+adds p eq - bf800001 3f800000 b4000000
+adds z eq - bf800001 3f800000 b4000000
+adds n eq - 3f800001 bf800002 b4000000
+adds m eq - 3f800001 bf800002 b4000000
+adds p eq - 3f800001 bf800002 b4000000
+adds z eq - 3f800001 bf800002 b4000000
+adds n eq - bf800001 3f800002 34000000
+adds m eq - bf800001 3f800002 34000000
+adds p eq - bf800001 3f800002 34000000
+adds z eq - bf800001 3f800002 34000000
+adds n eq - 40000000 c0000001 b4800000
+adds m eq - 40000000 c0000001 b4800000
+adds p eq - 40000000 c0000001 b4800000
+adds z eq - 40000000 c0000001 b4800000
+adds n eq - c0000000 40000001 34800000
+adds m eq - c0000000 40000001 34800000
+adds p eq - c0000000 40000001 34800000
+adds z eq - c0000000 40000001 34800000
+adds n eq - 40000004 c0000003 34800000
+adds m eq - 40000004 c0000003 34800000
+adds p eq - 40000004 c0000003 34800000
+adds z eq - 40000004 c0000003 34800000
+adds n eq - c0000004 40000003 b4800000
+adds m eq - c0000004 40000003 b4800000
+adds p eq - c0000004 40000003 b4800000
+adds z eq - c0000004 40000003 b4800000
+adds n eq - 407fffff c07ffffe 34800000
+adds m eq - 407fffff c07ffffe 34800000
+adds p eq - 407fffff c07ffffe 34800000
+adds z eq - 407fffff c07ffffe 34800000
+adds n eq - c07fffff 407ffffe b4800000
+adds m eq - c07fffff 407ffffe b4800000
+adds p eq - c07fffff 407ffffe b4800000
+adds z eq - c07fffff 407ffffe b4800000
+adds n eq - 3ffffffc bffffffd b4000000
+adds m eq - 3ffffffc bffffffd b4000000
+adds p eq - 3ffffffc bffffffd b4000000
+adds z eq - 3ffffffc bffffffd b4000000
+adds n eq - bffffffc 3ffffffd 34000000
+adds m eq - bffffffc 3ffffffd 34000000
+adds p eq - bffffffc 3ffffffd 34000000
+adds z eq - bffffffc 3ffffffd 34000000
+adds n eq - 7f000001 ff000000 73800000
+adds m eq - 7f000001 ff000000 73800000
+adds p eq - 7f000001 ff000000 73800000
+adds z eq - 7f000001 ff000000 73800000
+adds n eq - ff000001 7f000000 f3800000
+adds m eq - ff000001 7f000000 f3800000
+adds p eq - ff000001 7f000000 f3800000
+adds z eq - ff000001 7f000000 f3800000
+adds n eq - 7f000001 ff000002 f3800000
+adds m eq - 7f000001 ff000002 f3800000
+adds p eq - 7f000001 ff000002 f3800000
+adds z eq - 7f000001 ff000002 f3800000
+adds n eq - ff000001 7f000002 73800000
+adds m eq - ff000001 7f000002 73800000
+adds p eq - ff000001 7f000002 73800000
+adds z eq - ff000001 7f000002 73800000
+adds n eq - 7e800000 fe800001 f3000000
+adds m eq - 7e800000 fe800001 f3000000
+adds p eq - 7e800000 fe800001 f3000000
+adds z eq - 7e800000 fe800001 f3000000
+adds n eq - fe800000 7e800001 73000000
+adds m eq - fe800000 7e800001 73000000
+adds p eq - fe800000 7e800001 73000000
+adds z eq - fe800000 7e800001 73000000
+adds n eq - 7e800004 fe800003 73000000
+adds m eq - 7e800004 fe800003 73000000
+adds p eq - 7e800004 fe800003 73000000
+adds z eq - 7e800004 fe800003 73000000
+adds n eq - fe800004 7e800003 f3000000
+adds m eq - fe800004 7e800003 f3000000
+adds p eq - fe800004 7e800003 f3000000
+adds z eq - fe800004 7e800003 f3000000
+adds n eq - 7e7fffff fe7ffffe 72800000
+adds m eq - 7e7fffff fe7ffffe 72800000
+adds p eq - 7e7fffff fe7ffffe 72800000
+adds z eq - 7e7fffff fe7ffffe 72800000
+adds n eq - fe7fffff 7e7ffffe f2800000
+adds m eq - fe7fffff 7e7ffffe f2800000
+adds p eq - fe7fffff 7e7ffffe f2800000
+adds z eq - fe7fffff 7e7ffffe f2800000
+adds n eq - ff7ffffe 7f7fffff 73800000
+adds m eq - ff7ffffe 7f7fffff 73800000
+adds p eq - ff7ffffe 7f7fffff 73800000
+adds z eq - ff7ffffe 7f7fffff 73800000
+adds n eq - 7f7ffffe ff7fffff f3800000
+adds m eq - 7f7ffffe ff7fffff f3800000
+adds p eq - 7f7ffffe ff7fffff f3800000
+adds z eq - 7f7ffffe ff7fffff f3800000
+adds n eq - 80800001 00800000 80000001
+adds m eq - 80800001 00800000 80000001
+adds p eq - 80800001 00800000 80000001
+adds z eq - 80800001 00800000 80000001
+adds n eq - 00800001 80800000 00000001
+adds m eq - 00800001 80800000 00000001
+adds p eq - 00800001 80800000 00000001
+adds z eq - 00800001 80800000 00000001
+adds n eq - 807fffff 00800000 00000001
+adds m eq - 807fffff 00800000 00000001
+adds p eq - 807fffff 00800000 00000001
+adds z eq - 807fffff 00800000 00000001
+adds n eq - 007fffff 80800000 80000001
+adds m eq - 007fffff 80800000 80000001
+adds p eq - 007fffff 80800000 80000001
+adds z eq - 007fffff 80800000 80000001
+adds n eq - 00800001 80800002 80000001
+adds m eq - 00800001 80800002 80000001
+adds p eq - 00800001 80800002 80000001
+adds z eq - 00800001 80800002 80000001
+adds n eq - 80800001 00800002 00000001
+adds m eq - 80800001 00800002 00000001
+adds p eq - 80800001 00800002 00000001
+adds z eq - 80800001 00800002 00000001
+adds n eq - 007fffff 807ffffe 00000001
+adds m eq - 007fffff 807ffffe 00000001
+adds p eq - 007fffff 807ffffe 00000001
+adds z eq - 007fffff 807ffffe 00000001
+adds n eq - 807fffff 007ffffe 80000001
+adds m eq - 807fffff 007ffffe 80000001
+adds p eq - 807fffff 007ffffe 80000001
+adds z eq - 807fffff 007ffffe 80000001
+adds n eq - 007ffffd 807ffffe 80000001
+adds m eq - 007ffffd 807ffffe 80000001
+adds p eq - 007ffffd 807ffffe 80000001
+adds z eq - 007ffffd 807ffffe 80000001
+adds n eq - 807ffffd 007ffffe 00000001
+adds m eq - 807ffffd 007ffffe 00000001
+adds p eq - 807ffffd 007ffffe 00000001
+adds z eq - 807ffffd 007ffffe 00000001
+adds n eq - 00000002 80000001 00000001
+adds m eq - 00000002 80000001 00000001
+adds p eq - 00000002 80000001 00000001
+adds z eq - 00000002 80000001 00000001
+adds n eq - 80000002 00000001 80000001
+adds m eq - 80000002 00000001 80000001
+adds p eq - 80000002 00000001 80000001
+adds z eq - 80000002 00000001 80000001
+adds n eq - 00000003 80000002 00000001
+adds m eq - 00000003 80000002 00000001
+adds p eq - 00000003 80000002 00000001
+adds z eq - 00000003 80000002 00000001
+adds n eq - 80000003 00000002 80000001
+adds m eq - 80000003 00000002 80000001
+adds p eq - 80000003 00000002 80000001
+adds z eq - 80000003 00000002 80000001
+adds n eq - 40000000 bfffffff 34000000
+adds m eq - 40000000 bfffffff 34000000
+adds p eq - 40000000 bfffffff 34000000
+adds z eq - 40000000 bfffffff 34000000
+adds n eq - c0000000 3fffffff b4000000
+adds m eq - c0000000 3fffffff b4000000
+adds p eq - c0000000 3fffffff b4000000
+adds z eq - c0000000 3fffffff b4000000
+adds n eq - bfffffff 40000000 34000000
+adds m eq - bfffffff 40000000 34000000
+adds p eq - bfffffff 40000000 34000000
+adds z eq - bfffffff 40000000 34000000
+adds n eq - 3fffffff c0000000 b4000000
+adds m eq - 3fffffff c0000000 b4000000
+adds p eq - 3fffffff c0000000 b4000000
+adds z eq - 3fffffff c0000000 b4000000
+adds n eq - 40800001 c07fffff 35400000
+adds m eq - 40800001 c07fffff 35400000
+adds p eq - 40800001 c07fffff 35400000
+adds z eq - 40800001 c07fffff 35400000
+adds n eq - c0800001 407fffff b5400000
+adds m eq - c0800001 407fffff b5400000
+adds p eq - c0800001 407fffff b5400000
+adds z eq - c0800001 407fffff b5400000
+adds n eq - 407fffff c0800002 b5a00000
+adds m eq - 407fffff c0800002 b5a00000
+adds p eq - 407fffff c0800002 b5a00000
+adds z eq - 407fffff c0800002 b5a00000
+adds n eq - c07fffff 40800002 35a00000
+adds m eq - c07fffff 40800002 35a00000
+adds p eq - c07fffff 40800002 35a00000
+adds z eq - c07fffff 40800002 35a00000
+adds n eq - 40000001 bf800001 3f800001
+adds m eq - 40000001 bf800001 3f800001
+adds p eq - 40000001 bf800001 3f800001
+adds z eq - 40000001 bf800001 3f800001
+adds n eq - c0000001 3f800001 bf800001
+adds m eq - c0000001 3f800001 bf800001
+adds p eq - c0000001 3f800001 bf800001
+adds z eq - c0000001 3f800001 bf800001
+adds n eq - 40000002 bf800001 3f800003
+adds m eq - 40000002 bf800001 3f800003
+adds p eq - 40000002 bf800001 3f800003
+adds z eq - 40000002 bf800001 3f800003
+adds n eq - c0000002 3f800001 bf800003
+adds m eq - c0000002 3f800001 bf800003
+adds p eq - c0000002 3f800001 bf800003
+adds z eq - c0000002 3f800001 bf800003
+adds n eq - 40000002 bf800003 3f800001
+adds m eq - 40000002 bf800003 3f800001
+adds p eq - 40000002 bf800003 3f800001
+adds z eq - 40000002 bf800003 3f800001
+adds n eq - c0000002 3f800003 bf800001
+adds m eq - c0000002 3f800003 bf800001
+adds p eq - c0000002 3f800003 bf800001
+adds z eq - c0000002 3f800003 bf800001
+adds n eq - 7e800000 fe7fffff 72800000
+adds m eq - 7e800000 fe7fffff 72800000
+adds p eq - 7e800000 fe7fffff 72800000
+adds z eq - 7e800000 fe7fffff 72800000
+adds n eq - fe800000 7e7fffff f2800000
+adds m eq - fe800000 7e7fffff f2800000
+adds p eq - fe800000 7e7fffff f2800000
+adds z eq - fe800000 7e7fffff f2800000
+adds n eq - feffffff 7f000000 73000000
+adds m eq - feffffff 7f000000 73000000
+adds p eq - feffffff 7f000000 73000000
+adds z eq - feffffff 7f000000 73000000
+adds n eq - 7effffff ff000000 f3000000
+adds m eq - 7effffff ff000000 f3000000
+adds p eq - 7effffff ff000000 f3000000
+adds z eq - 7effffff ff000000 f3000000
+adds n eq - 7d800001 fd7fffff 72400000
+adds m eq - 7d800001 fd7fffff 72400000
+adds p eq - 7d800001 fd7fffff 72400000
+adds z eq - 7d800001 fd7fffff 72400000
+adds n eq - fd800001 7d7fffff f2400000
+adds m eq - fd800001 7d7fffff f2400000
+adds p eq - fd800001 7d7fffff f2400000
+adds z eq - fd800001 7d7fffff f2400000
+adds n eq - 7e7fffff fe800002 f3a00000
+adds m eq - 7e7fffff fe800002 f3a00000
+adds p eq - 7e7fffff fe800002 f3a00000
+adds z eq - 7e7fffff fe800002 f3a00000
+adds n eq - fe7fffff 7e800002 73a00000
+adds m eq - fe7fffff 7e800002 73a00000
+adds p eq - fe7fffff 7e800002 73a00000
+adds z eq - fe7fffff 7e800002 73a00000
+adds n eq - 7e800001 ff000001 fe800001
+adds m eq - 7e800001 ff000001 fe800001
+adds p eq - 7e800001 ff000001 fe800001
+adds z eq - 7e800001 ff000001 fe800001
+adds n eq - fe800001 7f000001 7e800001
+adds m eq - fe800001 7f000001 7e800001
+adds p eq - fe800001 7f000001 7e800001
+adds z eq - fe800001 7f000001 7e800001
+adds n eq - 7f000002 fe800001 7e800003
+adds m eq - 7f000002 fe800001 7e800003
+adds p eq - 7f000002 fe800001 7e800003
+adds z eq - 7f000002 fe800001 7e800003
+adds n eq - ff000002 7e800001 fe800003
+adds m eq - ff000002 7e800001 fe800003
+adds p eq - ff000002 7e800001 fe800003
+adds z eq - ff000002 7e800001 fe800003
+adds n eq - 7e800002 fe000003 7e000001
+adds m eq - 7e800002 fe000003 7e000001
+adds p eq - 7e800002 fe000003 7e000001
+adds z eq - 7e800002 fe000003 7e000001
+adds n eq - fe800002 7e000003 fe000001
+adds m eq - fe800002 7e000003 fe000001
+adds p eq - fe800002 7e000003 fe000001
+adds z eq - fe800002 7e000003 fe000001
+adds n eq - 01000000 80ffffff 00000001
+adds m eq - 01000000 80ffffff 00000001
+adds p eq - 01000000 80ffffff 00000001
+adds z eq - 01000000 80ffffff 00000001
+adds n eq - 81000000 00ffffff 80000001
+adds m eq - 81000000 00ffffff 80000001
+adds p eq - 81000000 00ffffff 80000001
+adds z eq - 81000000 00ffffff 80000001
+adds n eq - 80ffffff 01000000 00000001
+adds m eq - 80ffffff 01000000 00000001
+adds p eq - 80ffffff 01000000 00000001
+adds z eq - 80ffffff 01000000 00000001
+adds n eq - 00ffffff 81000000 80000001
+adds m eq - 00ffffff 81000000 80000001
+adds p eq - 00ffffff 81000000 80000001
+adds z eq - 00ffffff 81000000 80000001
+adds n eq - 01000001 80ffffff 00000003
+adds m eq - 01000001 80ffffff 00000003
+adds p eq - 01000001 80ffffff 00000003
+adds z eq - 01000001 80ffffff 00000003
+adds n eq - 81000001 00ffffff 80000003
+adds m eq - 81000001 00ffffff 80000003
+adds p eq - 81000001 00ffffff 80000003
+adds z eq - 81000001 00ffffff 80000003
+adds n eq - 01800000 817fffff 00000002
+adds m eq - 01800000 817fffff 00000002
+adds p eq - 01800000 817fffff 00000002
+adds z eq - 01800000 817fffff 00000002
+adds n eq - 81800000 017fffff 80000002
+adds m eq - 81800000 017fffff 80000002
+adds p eq - 81800000 017fffff 80000002
+adds z eq - 81800000 017fffff 80000002
+adds n eq - 817fffff 01800000 00000002
+adds m eq - 817fffff 01800000 00000002
+adds p eq - 817fffff 01800000 00000002
+adds z eq - 817fffff 01800000 00000002
+adds n eq - 017fffff 81800000 80000002
+adds m eq - 017fffff 81800000 80000002
+adds p eq - 017fffff 81800000 80000002
+adds z eq - 017fffff 81800000 80000002
+adds n eq - 01800001 817fffff 00000006
+adds m eq - 01800001 817fffff 00000006
+adds p eq - 01800001 817fffff 00000006
+adds z eq - 01800001 817fffff 00000006
+adds n eq - 81800001 017fffff 80000006
+adds m eq - 81800001 017fffff 80000006
+adds p eq - 81800001 017fffff 80000006
+adds z eq - 81800001 017fffff 80000006
+adds n eq - 00ffffff 81000002 80000005
+adds m eq - 00ffffff 81000002 80000005
+adds p eq - 00ffffff 81000002 80000005
+adds z eq - 00ffffff 81000002 80000005
+adds n eq - 80ffffff 01000002 00000005
+adds m eq - 80ffffff 01000002 00000005
+adds p eq - 80ffffff 01000002 00000005
+adds z eq - 80ffffff 01000002 00000005
+adds n eq - 00ffffff 81000004 80000009
+adds m eq - 00ffffff 81000004 80000009
+adds p eq - 00ffffff 81000004 80000009
+adds z eq - 00ffffff 81000004 80000009
+adds n eq - 80ffffff 01000004 00000009
+adds m eq - 80ffffff 01000004 00000009
+adds p eq - 80ffffff 01000004 00000009
+adds z eq - 80ffffff 01000004 00000009
+adds n eq - 01000001 80800001 00800001
+adds m eq - 01000001 80800001 00800001
+adds p eq - 01000001 80800001 00800001
+adds z eq - 01000001 80800001 00800001
+adds n eq - 81000001 00800001 80800001
+adds m eq - 81000001 00800001 80800001
+adds p eq - 81000001 00800001 80800001
+adds z eq - 81000001 00800001 80800001
+adds n eq - 01000002 80800001 00800003
+adds m eq - 01000002 80800001 00800003
+adds p eq - 01000002 80800001 00800003
+adds z eq - 01000002 80800001 00800003
+adds n eq - 81000002 00800001 80800003
+adds m eq - 81000002 00800001 80800003
+adds p eq - 81000002 00800001 80800003
+adds z eq - 81000002 00800001 80800003
+adds n eq - 01800002 81000003 01000001
+adds m eq - 01800002 81000003 01000001
+adds p eq - 01800002 81000003 01000001
+adds z eq - 01800002 81000003 01000001
+adds n eq - 81800002 01000003 81000001
+adds m eq - 81800002 01000003 81000001
+adds p eq - 81800002 01000003 81000001
+adds z eq - 81800002 01000003 81000001
+adds n eq x 7f000000 3f800000 7f000000
+adds z eq x 7f000000 3f800000 7f000000
+adds m eq x 7f000000 3f800000 7f000000
+adds p eq x 7f000000 3f800000 7f000001
+adds n eq x ff000000 bf800000 ff000000
+adds z eq x ff000000 bf800000 ff000000
+adds p eq x ff000000 bf800000 ff000000
+adds m eq x ff000000 bf800000 ff000001
+adds n eq x 7effffff 3f800000 7effffff
+adds z eq x 7effffff 3f800000 7effffff
+adds m eq x 7effffff 3f800000 7effffff
+adds p eq x 7effffff 3f800000 7f000000
+adds n eq x feffffff bf800000 feffffff
+adds z eq x feffffff bf800000 feffffff
+adds p eq x feffffff bf800000 feffffff
+adds m eq x feffffff bf800000 ff000000
+adds n eq x 7f7fffff 3f800000 7f7fffff
+adds z eq x 7f7fffff 3f800000 7f7fffff
+adds m eq x 7f7fffff 3f800000 7f7fffff
+adds p eq xo 7f7fffff 3f800000 7f800000
+adds n eq x ff7fffff bf800000 ff7fffff
+adds z eq x ff7fffff bf800000 ff7fffff
+adds p eq x ff7fffff bf800000 ff7fffff
+adds m eq xo ff7fffff bf800000 ff800000
+adds n eq x 7f7ffffe 3f800000 7f7ffffe
+adds z eq x 7f7ffffe 3f800000 7f7ffffe
+adds m eq x 7f7ffffe 3f800000 7f7ffffe
+adds p eq x 7f7ffffe 3f800000 7f7fffff
+adds n eq x ff7ffffe bf800000 ff7ffffe
+adds z eq x ff7ffffe bf800000 ff7ffffe
+adds p eq x ff7ffffe bf800000 ff7ffffe
+adds m eq x ff7ffffe bf800000 ff7fffff
+adds n eq x 00000001 7f000000 7f000000
+adds z eq x 00000001 7f000000 7f000000
+adds m eq x 00000001 7f000000 7f000000
+adds p eq x 00000001 7f000000 7f000001
+adds n eq x 80000001 ff000000 ff000000
+adds z eq x 80000001 ff000000 ff000000
+adds p eq x 80000001 ff000000 ff000000
+adds m eq x 80000001 ff000000 ff000001
+adds n eq x 00000001 7effffff 7effffff
+adds z eq x 00000001 7effffff 7effffff
+adds m eq x 00000001 7effffff 7effffff
+adds p eq x 00000001 7effffff 7f000000
+adds n eq x 80000001 feffffff feffffff
+adds z eq x 80000001 feffffff feffffff
+adds p eq x 80000001 feffffff feffffff
+adds m eq x 80000001 feffffff ff000000
+adds n eq x 00000001 7f7fffff 7f7fffff
+adds z eq x 00000001 7f7fffff 7f7fffff
+adds m eq x 00000001 7f7fffff 7f7fffff
+adds p eq xo 00000001 7f7fffff 7f800000
+adds n eq x 80000001 ff7fffff ff7fffff
+adds z eq x 80000001 ff7fffff ff7fffff
+adds p eq x 80000001 ff7fffff ff7fffff
+adds m eq xo 80000001 ff7fffff ff800000
+adds n eq x 00000001 7f7ffffe 7f7ffffe
+adds z eq x 00000001 7f7ffffe 7f7ffffe
+adds m eq x 00000001 7f7ffffe 7f7ffffe
+adds p eq x 00000001 7f7ffffe 7f7fffff
+adds n eq x 80000001 ff7ffffe ff7ffffe
+adds z eq x 80000001 ff7ffffe ff7ffffe
+adds p eq x 80000001 ff7ffffe ff7ffffe
+adds m eq x 80000001 ff7ffffe ff7fffff
+adds n eq x 00000001 3f800000 3f800000
+adds z eq x 00000001 3f800000 3f800000
+adds m eq x 00000001 3f800000 3f800000
+adds p eq x 00000001 3f800000 3f800001
+adds n eq x 80000001 bf800000 bf800000
+adds z eq x 80000001 bf800000 bf800000
+adds p eq x 80000001 bf800000 bf800000
+adds m eq x 80000001 bf800000 bf800001
+adds n eq x 00000001 3f7fffff 3f7fffff
+adds z eq x 00000001 3f7fffff 3f7fffff
+adds m eq x 00000001 3f7fffff 3f7fffff
+adds p eq x 00000001 3f7fffff 3f800000
+adds n eq x 80000001 bf7fffff bf7fffff
+adds z eq x 80000001 bf7fffff bf7fffff
+adds p eq x 80000001 bf7fffff bf7fffff
+adds m eq x 80000001 bf7fffff bf800000
+adds n eq x 00000001 3fffffff 3fffffff
+adds z eq x 00000001 3fffffff 3fffffff
+adds m eq x 00000001 3fffffff 3fffffff
+adds p eq x 00000001 3fffffff 40000000
+adds n eq x 80000001 bfffffff bfffffff
+adds z eq x 80000001 bfffffff bfffffff
+adds p eq x 80000001 bfffffff bfffffff
+adds m eq x 80000001 bfffffff c0000000
+adds n eq x 00000001 3ffffffe 3ffffffe
+adds z eq x 00000001 3ffffffe 3ffffffe
+adds m eq x 00000001 3ffffffe 3ffffffe
+adds p eq x 00000001 3ffffffe 3fffffff
+adds n eq x 80000001 bffffffe bffffffe
+adds z eq x 80000001 bffffffe bffffffe
+adds p eq x 80000001 bffffffe bffffffe
+adds m eq x 80000001 bffffffe bfffffff
+adds n eq x 7f000000 bf800000 7f000000
+adds p eq x 7f000000 bf800000 7f000000
+adds z eq x 7f000000 bf800000 7effffff
+adds m eq x 7f000000 bf800000 7effffff
+adds n eq x ff000000 3f800000 ff000000
+adds m eq x ff000000 3f800000 ff000000
+adds z eq x ff000000 3f800000 feffffff
+adds p eq x ff000000 3f800000 feffffff
+adds n eq x 7effffff bf800000 7effffff
+adds p eq x 7effffff bf800000 7effffff
+adds z eq x 7effffff bf800000 7efffffe
+adds m eq x 7effffff bf800000 7efffffe
+adds n eq x feffffff 3f800000 feffffff
+adds m eq x feffffff 3f800000 feffffff
+adds z eq x feffffff 3f800000 fefffffe
+adds p eq x feffffff 3f800000 fefffffe
+adds n eq x 7f7fffff bf800000 7f7fffff
+adds p eq x 7f7fffff bf800000 7f7fffff
+adds z eq x 7f7fffff bf800000 7f7ffffe
+adds m eq x 7f7fffff bf800000 7f7ffffe
+adds n eq x ff7fffff 3f800000 ff7fffff
+adds m eq x ff7fffff 3f800000 ff7fffff
+adds z eq x ff7fffff 3f800000 ff7ffffe
+adds p eq x ff7fffff 3f800000 ff7ffffe
+adds n eq x 7f7ffffe bf800000 7f7ffffe
+adds p eq x 7f7ffffe bf800000 7f7ffffe
+adds z eq x 7f7ffffe bf800000 7f7ffffd
+adds m eq x 7f7ffffe bf800000 7f7ffffd
+adds n eq x ff7ffffe 3f800000 ff7ffffe
+adds m eq x ff7ffffe 3f800000 ff7ffffe
+adds z eq x ff7ffffe 3f800000 ff7ffffd
+adds p eq x ff7ffffe 3f800000 ff7ffffd
+adds n eq x 7f7fffff 80000001 7f7fffff
+adds p eq x 7f7fffff 80000001 7f7fffff
+adds z eq x 7f7fffff 80000001 7f7ffffe
+adds m eq x 7f7fffff 80000001 7f7ffffe
+adds n eq x ff7fffff 00000001 ff7fffff
+adds m eq x ff7fffff 00000001 ff7fffff
+adds z eq x ff7fffff 00000001 ff7ffffe
+adds p eq x ff7fffff 00000001 ff7ffffe
+adds n eq x 80000003 7f000000 7f000000
+adds p eq x 80000003 7f000000 7f000000
+adds z eq x 80000003 7f000000 7effffff
+adds m eq x 80000003 7f000000 7effffff
+adds n eq x 00000003 ff000000 ff000000
+adds m eq x 00000003 ff000000 ff000000
+adds z eq x 00000003 ff000000 feffffff
+adds p eq x 00000003 ff000000 feffffff
+adds n eq x 3f7fffff 80000001 3f7fffff
+adds p eq x 3f7fffff 80000001 3f7fffff
+adds z eq x 3f7fffff 80000001 3f7ffffe
+adds m eq x 3f7fffff 80000001 3f7ffffe
+adds n eq x bfffffff 00000001 bfffffff
+adds m eq x bfffffff 00000001 bfffffff
+adds z eq x bfffffff 00000001 bffffffe
+adds p eq x bfffffff 00000001 bffffffe
+adds n eq x 80000003 40400000 40400000
+adds p eq x 80000003 40400000 40400000
+adds z eq x 80000003 40400000 403fffff
+adds m eq x 80000003 40400000 403fffff
+adds n eq x 00000003 c0a00000 c0a00000
+adds m eq x 00000003 c0a00000 c0a00000
+adds z eq x 00000003 c0a00000 c09fffff
+adds p eq x 00000003 c0a00000 c09fffff
+adds n eq x 3f800001 3f800000 40000000
+adds z eq x 3f800001 3f800000 40000000
+adds m eq x 3f800001 3f800000 40000000
+adds p eq x 3f800001 3f800000 40000001
+adds n eq x bf800001 bf800000 c0000000
+adds z eq x bf800001 bf800000 c0000000
+adds p eq x bf800001 bf800000 c0000000
+adds m eq x bf800001 bf800000 c0000001
+adds n eq x c0000000 c0000001 c0800000
+adds z eq x c0000000 c0000001 c0800000
+adds p eq x c0000000 c0000001 c0800000
+adds m eq x c0000000 c0000001 c0800001
+adds n eq x 40000000 40000001 40800000
+adds z eq x 40000000 40000001 40800000
+adds m eq x 40000000 40000001 40800000
+adds p eq x 40000000 40000001 40800001
+adds n eq x 3f800000 3f800003 40000002
+adds p eq x 3f800000 3f800003 40000002
+adds z eq x 3f800000 3f800003 40000001
+adds m eq x 3f800000 3f800003 40000001
+adds n eq x bf800000 bf800003 c0000002
+adds m eq x bf800000 bf800003 c0000002
+adds z eq x bf800000 bf800003 c0000001
+adds p eq x bf800000 bf800003 c0000001
+adds n eq x c0000001 c0000002 c0800002
+adds m eq x c0000001 c0000002 c0800002
+adds z eq x c0000001 c0000002 c0800001
+adds p eq x c0000001 c0000002 c0800001
+adds n eq x 40000001 40000002 40800002
+adds p eq x 40000001 40000002 40800002
+adds z eq x 40000001 40000002 40800001
+adds m eq x 40000001 40000002 40800001
+adds n eq xo 7f7ffffe 7f7fffff 7f800000
+adds p eq xo 7f7ffffe 7f7fffff 7f800000
+adds z eq xo 7f7ffffe 7f7fffff 7f7fffff
+adds m eq xo 7f7ffffe 7f7fffff 7f7fffff
+adds n eq xo ff7ffffe ff7fffff ff800000
+adds m eq xo ff7ffffe ff7fffff ff800000
+adds z eq xo ff7ffffe ff7fffff ff7fffff
+adds p eq xo ff7ffffe ff7fffff ff7fffff
+adds n eq xo 7effffff 7f000000 7f800000
+adds p eq xo 7effffff 7f000000 7f800000
+adds z eq x 7effffff 7f000000 7f7fffff
+adds m eq x 7effffff 7f000000 7f7fffff
+adds n eq xo feffffff ff000000 ff800000
+adds m eq xo feffffff ff000000 ff800000
+adds z eq x feffffff ff000000 ff7fffff
+adds p eq x feffffff ff000000 ff7fffff
+adds n eq xo 7f000001 7f000000 7f800000
+adds p eq xo 7f000001 7f000000 7f800000
+adds z eq xo 7f000001 7f000000 7f7fffff
+adds m eq xo 7f000001 7f000000 7f7fffff
+adds n eq xo ff000001 ff000000 ff800000
+adds m eq xo ff000001 ff000000 ff800000
+adds z eq xo ff000001 ff000000 ff7fffff
+adds p eq xo ff000001 ff000000 ff7fffff
+adds n eq x 7e800001 7e800000 7f000000
+adds z eq x 7e800001 7e800000 7f000000
+adds m eq x 7e800001 7e800000 7f000000
+adds p eq x 7e800001 7e800000 7f000001
+adds n eq x fe800001 fe800000 ff000000
+adds z eq x fe800001 fe800000 ff000000
+adds p eq x fe800001 fe800000 ff000000
+adds m eq x fe800001 fe800000 ff000001
+adds n eq x 7efffffe 7effffff 7f7ffffe
+adds z eq x 7efffffe 7effffff 7f7ffffe
+adds m eq x 7efffffe 7effffff 7f7ffffe
+adds p eq x 7efffffe 7effffff 7f7fffff
+adds n eq x fefffffe feffffff ff7ffffe
+adds z eq x fefffffe feffffff ff7ffffe
+adds p eq x fefffffe feffffff ff7ffffe
+adds m eq x fefffffe feffffff ff7fffff
+adds p eq x 40000000 34000000 40000001
+adds n eq x 40000000 34000000 40000000
+adds z eq x 40000000 34000000 40000000
+adds m eq x 40000000 34000000 40000000
+adds n eq x 40000001 34000000 40000002
+adds p eq x 40000001 34000000 40000002
+adds z eq x 40000001 34000000 40000001
+adds m eq x 40000001 34000000 40000001
+adds n eq x 407fffff 34000000 40800000
+adds p eq x 407fffff 34000000 40800000
+adds z eq x 407fffff 34000000 407fffff
+adds m eq x 407fffff 34000000 407fffff
+adds p eq x 407fffff 33ffffff 40800000
+adds z eq x 407fffff 33ffffff 407fffff
+adds n eq x 407fffff 33ffffff 407fffff
+adds m eq x 407fffff 33ffffff 407fffff
+adds n eq x c07fffff b4000000 c0800000
+adds m eq x c07fffff b4000000 c0800000
+adds z eq x c07fffff b4000000 c07fffff
+adds p eq x c07fffff b4000000 c07fffff
+adds m eq x c07fffff b3ffffff c0800000
+adds z eq x c07fffff b3ffffff c07fffff
+adds n eq x c07fffff b3ffffff c07fffff
+adds p eq x c07fffff b3ffffff c07fffff
+adds n uo - 7fff0000 00000000 7fff0000
+adds m uo - 7fff0000 00000000 7fff0000
+adds p uo - 7fff0000 00000000 7fff0000
+adds z uo - 7fff0000 00000000 7fff0000
+adds n uo - 7fff0000 80000000 7fff0000
+adds m uo - 7fff0000 80000000 7fff0000
+adds p uo - 7fff0000 80000000 7fff0000
+adds z uo - 7fff0000 80000000 7fff0000
+adds n uo - 00000000 7fff0000 7fff0000
+adds m uo - 00000000 7fff0000 7fff0000
+adds p uo - 00000000 7fff0000 7fff0000
+adds z uo - 00000000 7fff0000 7fff0000
+adds n uo - 80000000 7fff0000 7fff0000
+adds m uo - 80000000 7fff0000 7fff0000
+adds p uo - 80000000 7fff0000 7fff0000
+adds z uo - 80000000 7fff0000 7fff0000
+adds n uo - 7fff0000 3f800000 7fff0000
+adds m uo - 7fff0000 3f800000 7fff0000
+adds p uo - 7fff0000 3f800000 7fff0000
+adds z uo - 7fff0000 3f800000 7fff0000
+adds n uo - 7fff0000 bf800000 7fff0000
+adds m uo - 7fff0000 bf800000 7fff0000
+adds p uo - 7fff0000 bf800000 7fff0000
+adds z uo - 7fff0000 bf800000 7fff0000
+adds n uo - 3f800000 7fff0000 7fff0000
+adds m uo - 3f800000 7fff0000 7fff0000
+adds p uo - 3f800000 7fff0000 7fff0000
+adds z uo - 3f800000 7fff0000 7fff0000
+adds n uo - bf800000 7fff0000 7fff0000
+adds m uo - bf800000 7fff0000 7fff0000
+adds p uo - bf800000 7fff0000 7fff0000
+adds z uo - bf800000 7fff0000 7fff0000
+adds n uo - 007fffff 7fff0000 7fff0000
+adds m uo - 007fffff 7fff0000 7fff0000
+adds p uo - 007fffff 7fff0000 7fff0000
+adds z uo - 007fffff 7fff0000 7fff0000
+adds n uo - 807fffff 7fff0000 7fff0000
+adds m uo - 807fffff 7fff0000 7fff0000
+adds p uo - 807fffff 7fff0000 7fff0000
+adds z uo - 807fffff 7fff0000 7fff0000
+adds n uo - 7fff0000 007fffff 7fff0000
+adds m uo - 7fff0000 007fffff 7fff0000
+adds p uo - 7fff0000 007fffff 7fff0000
+adds z uo - 7fff0000 007fffff 7fff0000
+adds n uo - 7fff0000 807fffff 7fff0000
+adds m uo - 7fff0000 807fffff 7fff0000
+adds p uo - 7fff0000 807fffff 7fff0000
+adds z uo - 7fff0000 807fffff 7fff0000
+adds n uo - 7fff0000 00000001 7fff0000
+adds m uo - 7fff0000 00000001 7fff0000
+adds p uo - 7fff0000 00000001 7fff0000
+adds z uo - 7fff0000 00000001 7fff0000
+adds n uo - 7fff0000 80000001 7fff0000
+adds m uo - 7fff0000 80000001 7fff0000
+adds p uo - 7fff0000 80000001 7fff0000
+adds z uo - 7fff0000 80000001 7fff0000
+adds n uo - 00000001 7fff0000 7fff0000
+adds m uo - 00000001 7fff0000 7fff0000
+adds p uo - 00000001 7fff0000 7fff0000
+adds z uo - 00000001 7fff0000 7fff0000
+adds n uo - 80000001 7fff0000 7fff0000
+adds m uo - 80000001 7fff0000 7fff0000
+adds p uo - 80000001 7fff0000 7fff0000
+adds z uo - 80000001 7fff0000 7fff0000
+adds n uo - 7fff0000 7f7fffff 7fff0000
+adds m uo - 7fff0000 7f7fffff 7fff0000
+adds p uo - 7fff0000 7f7fffff 7fff0000
+adds z uo - 7fff0000 7f7fffff 7fff0000
+adds n uo - 7fff0000 ff7fffff 7fff0000
+adds m uo - 7fff0000 ff7fffff 7fff0000
+adds p uo - 7fff0000 ff7fffff 7fff0000
+adds z uo - 7fff0000 ff7fffff 7fff0000
+adds n uo - 7f7fffff 7fff0000 7fff0000
+adds m uo - 7f7fffff 7fff0000 7fff0000
+adds p uo - 7f7fffff 7fff0000 7fff0000
+adds z uo - 7f7fffff 7fff0000 7fff0000
+adds n uo - ff7fffff 7fff0000 7fff0000
+adds m uo - ff7fffff 7fff0000 7fff0000
+adds p uo - ff7fffff 7fff0000 7fff0000
+adds z uo - ff7fffff 7fff0000 7fff0000
+adds n uo - 7fff0000 7f800000 7fff0000
+adds m uo - 7fff0000 7f800000 7fff0000
+adds p uo - 7fff0000 7f800000 7fff0000
+adds z uo - 7fff0000 7f800000 7fff0000
+adds n uo - 7fff0000 ff800000 7fff0000
+adds m uo - 7fff0000 ff800000 7fff0000
+adds p uo - 7fff0000 ff800000 7fff0000
+adds z uo - 7fff0000 ff800000 7fff0000
+adds n uo - 7f800000 7fff0000 7fff0000
+adds m uo - 7f800000 7fff0000 7fff0000
+adds p uo - 7f800000 7fff0000 7fff0000
+adds z uo - 7f800000 7fff0000 7fff0000
+adds n uo - ff800000 7fff0000 7fff0000
+adds m uo - ff800000 7fff0000 7fff0000
+adds p uo - ff800000 7fff0000 7fff0000
+adds z uo - ff800000 7fff0000 7fff0000
+adds n uo - 7fff0000 7fff0000 7fff0000
+adds m uo - 7fff0000 7fff0000 7fff0000
+adds p uo - 7fff0000 7fff0000 7fff0000
+adds z uo - 7fff0000 7fff0000 7fff0000
+adds n uo v 7f810000 00000000 7fff0000
+adds m uo v 7f810000 00000000 7fff0000
+adds p uo v 7f810000 00000000 7fff0000
+adds z uo v 7f810000 00000000 7fff0000
+adds n uo v 7f810000 80000000 7fff0000
+adds m uo v 7f810000 80000000 7fff0000
+adds p uo v 7f810000 80000000 7fff0000
+adds z uo v 7f810000 80000000 7fff0000
+adds n uo v 00000000 7f810000 7fff0000
+adds m uo v 00000000 7f810000 7fff0000
+adds p uo v 00000000 7f810000 7fff0000
+adds z uo v 00000000 7f810000 7fff0000
+adds n uo v 80000000 7f810000 7fff0000
+adds m uo v 80000000 7f810000 7fff0000
+adds p uo v 80000000 7f810000 7fff0000
+adds z uo v 80000000 7f810000 7fff0000
+adds n uo v 7f810000 3f800000 7fff0000
+adds m uo v 7f810000 3f800000 7fff0000
+adds p uo v 7f810000 3f800000 7fff0000
+adds z uo v 7f810000 3f800000 7fff0000
+adds n uo v 7f810000 bf800000 7fff0000
+adds m uo v 7f810000 bf800000 7fff0000
+adds p uo v 7f810000 bf800000 7fff0000
+adds z uo v 7f810000 bf800000 7fff0000
+adds n uo v 3f800000 7f810000 7fff0000
+adds m uo v 3f800000 7f810000 7fff0000
+adds p uo v 3f800000 7f810000 7fff0000
+adds z uo v 3f800000 7f810000 7fff0000
+adds n uo v bf800000 7f810000 7fff0000
+adds m uo v bf800000 7f810000 7fff0000
+adds p uo v bf800000 7f810000 7fff0000
+adds z uo v bf800000 7f810000 7fff0000
+adds n uo v 007fffff 7f810000 7fff0000
+adds m uo v 007fffff 7f810000 7fff0000
+adds p uo v 007fffff 7f810000 7fff0000
+adds z uo v 007fffff 7f810000 7fff0000
+adds n uo v 807fffff 7f810000 7fff0000
+adds m uo v 807fffff 7f810000 7fff0000
+adds p uo v 807fffff 7f810000 7fff0000
+adds z uo v 807fffff 7f810000 7fff0000
+adds n uo v 7f810000 007fffff 7fff0000
+adds m uo v 7f810000 007fffff 7fff0000
+adds p uo v 7f810000 007fffff 7fff0000
+adds z uo v 7f810000 007fffff 7fff0000
+adds n uo v 7f810000 807fffff 7fff0000
+adds m uo v 7f810000 807fffff 7fff0000
+adds p uo v 7f810000 807fffff 7fff0000
+adds z uo v 7f810000 807fffff 7fff0000
+adds n uo v 7f810000 00000001 7fff0000
+adds m uo v 7f810000 00000001 7fff0000
+adds p uo v 7f810000 00000001 7fff0000
+adds z uo v 7f810000 00000001 7fff0000
+adds n uo v 7f810000 80000001 7fff0000
+adds m uo v 7f810000 80000001 7fff0000
+adds p uo v 7f810000 80000001 7fff0000
+adds z uo v 7f810000 80000001 7fff0000
+adds n uo v 00000001 7f810000 7fff0000
+adds m uo v 00000001 7f810000 7fff0000
+adds p uo v 00000001 7f810000 7fff0000
+adds z uo v 00000001 7f810000 7fff0000
+adds n uo v 80000001 7f810000 7fff0000
+adds m uo v 80000001 7f810000 7fff0000
+adds p uo v 80000001 7f810000 7fff0000
+adds z uo v 80000001 7f810000 7fff0000
+adds n uo v 7f810000 7f7fffff 7fff0000
+adds m uo v 7f810000 7f7fffff 7fff0000
+adds p uo v 7f810000 7f7fffff 7fff0000
+adds z uo v 7f810000 7f7fffff 7fff0000
+adds n uo v 7f810000 ff7fffff 7fff0000
+adds m uo v 7f810000 ff7fffff 7fff0000
+adds p uo v 7f810000 ff7fffff 7fff0000
+adds z uo v 7f810000 ff7fffff 7fff0000
+adds n uo v 7f7fffff 7f810000 7fff0000
+adds m uo v 7f7fffff 7f810000 7fff0000
+adds p uo v 7f7fffff 7f810000 7fff0000
+adds z uo v 7f7fffff 7f810000 7fff0000
+adds n uo v ff7fffff 7f810000 7fff0000
+adds m uo v ff7fffff 7f810000 7fff0000
+adds p uo v ff7fffff 7f810000 7fff0000
+adds z uo v ff7fffff 7f810000 7fff0000
+adds n uo v 7f810000 7f800000 7fff0000
+adds m uo v 7f810000 7f800000 7fff0000
+adds p uo v 7f810000 7f800000 7fff0000
+adds z uo v 7f810000 7f800000 7fff0000
+adds n uo v 7f810000 ff800000 7fff0000
+adds m uo v 7f810000 ff800000 7fff0000
+adds p uo v 7f810000 ff800000 7fff0000
+adds z uo v 7f810000 ff800000 7fff0000
+adds n uo v 7f800000 7f810000 7fff0000
+adds m uo v 7f800000 7f810000 7fff0000
+adds p uo v 7f800000 7f810000 7fff0000
+adds z uo v 7f800000 7f810000 7fff0000
+adds n uo v ff800000 7f810000 7fff0000
+adds m uo v ff800000 7f810000 7fff0000
+adds p uo v ff800000 7f810000 7fff0000
+adds z uo v ff800000 7f810000 7fff0000
+adds n uo v 7fff0000 7f810000 7fff0000
+adds m uo v 7fff0000 7f810000 7fff0000
+adds p uo v 7fff0000 7f810000 7fff0000
+adds z uo v 7fff0000 7f810000 7fff0000
+adds n uo v 7f810000 7fff0000 7fff0000
+adds m uo v 7f810000 7fff0000 7fff0000
+adds p uo v 7f810000 7fff0000 7fff0000
+adds z uo v 7f810000 7fff0000 7fff0000
+adds n uo v 7f810000 7f810000 7fff0000
+adds m uo v 7f810000 7f810000 7fff0000
+adds p uo v 7f810000 7f810000 7fff0000
+adds z uo v 7f810000 7f810000 7fff0000
+adds n eq - 4f804000 cf800000 4b000000
+adds m eq - 4f804000 cf800000 4b000000
+adds p eq - 4f804000 cf800000 4b000000
+adds z eq - 4f804000 cf800000 4b000000
+adds n eq x 3f7fffff 34004000 3f800001
+adds p eq x 3f7fffff 34004000 3f800001
+adds m eq x 3f7fffff 34004000 3f800000
+adds z eq x 3f7fffff 34004000 3f800000
+adds n eq x 59800000 bf3ff800 59800000
+adds p eq x 59800000 bf3ff800 59800000
+adds m eq x 59800000 bf3ff800 597fffff
+adds z eq x 59800000 bf3ff800 597fffff
+adds n eq - 00000000 ff000000 ff000000
+adds m eq - 00000000 ff000000 ff000000
+adds p eq - 00000000 ff000000 ff000000
+adds z eq - 00000000 ff000000 ff000000
+adds n eq - 00000000 00000003 00000003
+adds m eq - 00000000 00000003 00000003
+adds p eq - 00000000 00000003 00000003
+adds z eq - 00000000 00000003 00000003
+adds n eq - 00000000 80000003 80000003
+adds m eq - 00000000 80000003 80000003
+adds p eq - 00000000 80000003 80000003
+adds z eq - 00000000 80000003 80000003
+adds n eq - 00000000 00800000 00800000
+adds m eq - 00000000 00800000 00800000
+adds p eq - 00000000 00800000 00800000
+adds z eq - 00000000 00800000 00800000
+adds n eq x 3f800000 00000001 3f800000
+adds z eq x 3f800000 00000001 3f800000
+adds m eq x 3f800000 00000001 3f800000
+adds p eq x 3f800000 00000001 3f800001
+adds n eq x bf800000 80000001 bf800000
+adds z eq x bf800000 80000001 bf800000
+adds p eq x bf800000 80000001 bf800000
+adds m eq x bf800000 80000001 bf800001
+adds n eq x 40400000 80000003 40400000
+adds p eq x 40400000 80000003 40400000
+adds z eq x 40400000 80000003 403fffff
+adds m eq x 40400000 80000003 403fffff
+adds n eq x c0a00000 00000003 c0a00000
+adds m eq x c0a00000 00000003 c0a00000
+adds z eq x c0a00000 00000003 c09fffff
+adds p eq x c0a00000 00000003 c09fffff
+adds n eq x 3f800000 3f800001 40000000
+adds z eq x 3f800000 3f800001 40000000
+adds m eq x 3f800000 3f800001 40000000
+adds p eq x 3f800000 3f800001 40000001
+adds n eq x bf800000 bf800001 c0000000
+adds z eq x bf800000 bf800001 c0000000
+adds p eq x bf800000 bf800001 c0000000
+adds m eq x bf800000 bf800001 c0000001
+adds n eq - bf800000 3f800001 34000000
+adds m eq - bf800000 3f800001 34000000
+adds p eq - bf800000 3f800001 34000000
+adds z eq - bf800000 3f800001 34000000
+adds n eq - 3f800000 bf800001 b4000000
+adds m eq - 3f800000 bf800001 b4000000
+adds p eq - 3f800000 bf800001 b4000000
+adds z eq - 3f800000 bf800001 b4000000
+adds n eq x 3f800000 7f000000 7f000000
+adds z eq x 3f800000 7f000000 7f000000
+adds m eq x 3f800000 7f000000 7f000000
+adds p eq x 3f800000 7f000000 7f000001
+adds n eq x bf800000 ff000000 ff000000
+adds z eq x bf800000 ff000000 ff000000
+adds p eq x bf800000 ff000000 ff000000
+adds m eq x bf800000 ff000000 ff000001
+adds n eq x 3f800000 7effffff 7effffff
+adds z eq x 3f800000 7effffff 7effffff
+adds m eq x 3f800000 7effffff 7effffff
+adds p eq x 3f800000 7effffff 7f000000
+adds n eq x bf800000 feffffff feffffff
+adds z eq x bf800000 feffffff feffffff
+adds p eq x bf800000 feffffff feffffff
+adds m eq x bf800000 feffffff ff000000
+adds n eq x 3f800000 7f7fffff 7f7fffff
+adds z eq x 3f800000 7f7fffff 7f7fffff
+adds m eq x 3f800000 7f7fffff 7f7fffff
+adds p eq xo 3f800000 7f7fffff 7f800000
+adds n eq x bf800000 ff7fffff ff7fffff
+adds z eq x bf800000 ff7fffff ff7fffff
+adds p eq x bf800000 ff7fffff ff7fffff
+adds m eq xo bf800000 ff7fffff ff800000
+adds n eq x 3f800000 7f7ffffe 7f7ffffe
+adds z eq x 3f800000 7f7ffffe 7f7ffffe
+adds m eq x 3f800000 7f7ffffe 7f7ffffe
+adds p eq x 3f800000 7f7ffffe 7f7fffff
+adds n eq x bf800000 ff7ffffe ff7ffffe
+adds z eq x bf800000 ff7ffffe ff7ffffe
+adds p eq x bf800000 ff7ffffe ff7ffffe
+adds m eq x bf800000 ff7ffffe ff7fffff
+adds n eq x bf800000 7f000000 7f000000
+adds p eq x bf800000 7f000000 7f000000
+adds z eq x bf800000 7f000000 7effffff
+adds m eq x bf800000 7f000000 7effffff
+adds n eq x 3f800000 ff000000 ff000000
+adds m eq x 3f800000 ff000000 ff000000
+adds z eq x 3f800000 ff000000 feffffff
+adds p eq x 3f800000 ff000000 feffffff
+adds n eq x bf800000 7effffff 7effffff
+adds p eq x bf800000 7effffff 7effffff
+adds z eq x bf800000 7effffff 7efffffe
+adds m eq x bf800000 7effffff 7efffffe
+adds n eq x 3f800000 feffffff feffffff
+adds m eq x 3f800000 feffffff feffffff
+adds z eq x 3f800000 feffffff fefffffe
+adds p eq x 3f800000 feffffff fefffffe
+adds n eq x bf800000 7f7fffff 7f7fffff
+adds p eq x bf800000 7f7fffff 7f7fffff
+adds z eq x bf800000 7f7fffff 7f7ffffe
+adds m eq x bf800000 7f7fffff 7f7ffffe
+adds n eq x 3f800000 ff7fffff ff7fffff
+adds m eq x 3f800000 ff7fffff ff7fffff
+adds z eq x 3f800000 ff7fffff ff7ffffe
+adds p eq x 3f800000 ff7fffff ff7ffffe
+adds n eq x bf800000 7f7ffffe 7f7ffffe
+adds p eq x bf800000 7f7ffffe 7f7ffffe
+adds z eq x bf800000 7f7ffffe 7f7ffffd
+adds m eq x bf800000 7f7ffffe 7f7ffffd
+adds n eq x 3f800000 ff7ffffe ff7ffffe
+adds m eq x 3f800000 ff7ffffe ff7ffffe
+adds z eq x 3f800000 ff7ffffe ff7ffffd
+adds p eq x 3f800000 ff7ffffe ff7ffffd
+adds n eq - 45800000 45800000 46000000
+adds m eq - 45800000 45800000 46000000
+adds p eq - 45800000 45800000 46000000
+adds z eq - 45800000 45800000 46000000
+adds n eq - 45000000 45000000 45800000
+adds m eq - 45000000 45000000 45800000
+adds p eq - 45000000 45000000 45800000
+adds z eq - 45000000 45000000 45800000
+adds n eq - 45000000 45800000 45c00000
+adds m eq - 45000000 45800000 45c00000
+adds p eq - 45000000 45800000 45c00000
+adds z eq - 45000000 45800000 45c00000
+adds n eq - c5800000 c5800000 c6000000
+adds m eq - c5800000 c5800000 c6000000
+adds p eq - c5800000 c5800000 c6000000
+adds z eq - c5800000 c5800000 c6000000
+adds n eq - c5000000 c5000000 c5800000
+adds m eq - c5000000 c5000000 c5800000
+adds p eq - c5000000 c5000000 c5800000
+adds z eq - c5000000 c5000000 c5800000
+adds n eq - c5000000 c5800000 c5c00000
+adds m eq - c5000000 c5800000 c5c00000
+adds p eq - c5000000 c5800000 c5c00000
+adds z eq - c5000000 c5800000 c5c00000
+adds n eq - 00000000 5e800000 5e800000
+adds m eq - 00000000 5e800000 5e800000
+adds p eq - 00000000 5e800000 5e800000
+adds z eq - 00000000 5e800000 5e800000
+adds n eq - 46800000 00000000 46800000
+adds m eq - 46800000 00000000 46800000
+adds p eq - 46800000 00000000 46800000
+adds z eq - 46800000 00000000 46800000
+adds p eq - c6800000 46800000 00000000
+adds n eq - c6800000 46800000 00000000
+adds z eq - c6800000 46800000 00000000
+adds p eq - 46800000 c6800000 00000000
+adds n eq - 46800000 c6800000 00000000
+adds z eq - 46800000 c6800000 00000000
+adds m eq - c6800000 46800000 80000000
+adds m eq - 46800000 c6800000 80000000
+adds n eq - 45800000 c5000000 45000000
+adds m eq - 45800000 c5000000 45000000
+adds p eq - 45800000 c5000000 45000000
+adds z eq - 45800000 c5000000 45000000
+adds n eq - c5000000 45800000 45000000
+adds m eq - c5000000 45800000 45000000
+adds p eq - c5000000 45800000 45000000
+adds z eq - c5000000 45800000 45000000
+adds n eq - c5800000 45000000 c5000000
+adds m eq - c5800000 45000000 c5000000
+adds p eq - c5800000 45000000 c5000000
+adds z eq - c5800000 45000000 c5000000
+adds n eq - 45000000 c5800000 c5000000
+adds m eq - 45000000 c5800000 c5000000
+adds p eq - 45000000 c5800000 c5000000
+adds z eq - 45000000 c5800000 c5000000
+adds n eq - 46fffa00 40000000 46fffe00
+adds m eq - 46fffa00 40000000 46fffe00
+adds p eq - 46fffa00 40000000 46fffe00
+adds z eq - 46fffa00 40000000 46fffe00
+adds n eq - 46fffe00 3f800000 47000000
+adds m eq - 46fffe00 3f800000 47000000
+adds p eq - 46fffe00 3f800000 47000000
+adds z eq - 46fffe00 3f800000 47000000
+adds n eq - 46fff600 40000000 46fffa00
+adds m eq - 46fff600 40000000 46fffa00
+adds p eq - 46fff600 40000000 46fffa00
+adds z eq - 46fff600 40000000 46fffa00
+adds n eq - 40000000 46fffa00 46fffe00
+adds m eq - 40000000 46fffa00 46fffe00
+adds p eq - 40000000 46fffa00 46fffe00
+adds z eq - 40000000 46fffa00 46fffe00
+adds n eq - 3f800000 46fffe00 47000000
+adds m eq - 3f800000 46fffe00 47000000
+adds p eq - 3f800000 46fffe00 47000000
+adds z eq - 3f800000 46fffe00 47000000
+adds n eq - 40000000 46fff600 46fffa00
+adds m eq - 40000000 46fff600 46fffa00
+adds p eq - 40000000 46fff600 46fffa00
+adds z eq - 40000000 46fff600 46fffa00
+adds p eq - c6fffe00 46fffe00 00000000
+adds n eq - c6fffe00 46fffe00 00000000
+adds z eq - c6fffe00 46fffe00 00000000
+adds m eq - 46fffe00 c6fffe00 80000000
diff --git a/verrou/unitTest/checkUCB-vecto/inputData/divd.input b/verrou/unitTest/checkUCB-vecto/inputData/divd.input
new file mode 100644
index 0000000000000000000000000000000000000000..34ac748ce6086bd4699af0b5c8dc2cced76d931f
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/inputData/divd.input
@@ -0,0 +1,1559 @@
+/* Copyright (C) 1988-1994 Sun Microsystems, Inc. 2550 Garcia Avenue */
+/* Mountain View, California  94043 All rights reserved. */
+/*  */
+/* Any person is hereby authorized to download, copy, use, create bug fixes,  */
+/* and distribute, subject to the following conditions: */
+/*  */
+/* 	1.  the software may not be redistributed for a fee except as */
+/* 	    reasonable to cover media costs; */
+/* 	2.  any copy of the software must include this notice, as well as  */
+/* 	    any other embedded copyright notices; and  */
+/* 	3.  any distribution of this software or derivative works thereof  */
+/* 	    must comply with all applicable U.S. export control laws. */
+/*  */
+/* THE SOFTWARE IS MADE AVAILABLE "AS IS" AND WITHOUT EXPRESS OR IMPLIED */
+/* WARRANTY OF ANY KIND, INCLUDING BUT NOT LIMITED TO THE IMPLIED */
+/* WARRANTIES OF DESIGN, MERCHANTIBILITY, FITNESS FOR A PARTICULAR */
+/* PURPOSE, NON-INFRINGEMENT, PERFORMANCE OR CONFORMANCE TO */
+/* SPECIFICATIONS.   */
+/*  */
+/* BY DOWNLOADING AND/OR USING THIS SOFTWARE, THE USER WAIVES ALL CLAIMS */
+/* AGAINST SUN MICROSYSTEMS, INC. AND ITS AFFILIATED COMPANIES IN ANY */
+/* JURISDICTION, INCLUDING BUT NOT LIMITED TO CLAIMS FOR DAMAGES OR */
+/* EQUITABLE RELIEF BASED ON LOSS OF DATA, AND SPECIFICALLY WAIVES EVEN */
+/* UNKNOWN OR UNANTICIPATED CLAIMS OR LOSSES, PRESENT AND FUTURE. */
+/*  */
+/* IN NO EVENT WILL SUN MICROSYSTEMS, INC. OR ANY OF ITS AFFILIATED */
+/* COMPANIES BE LIABLE FOR ANY LOST REVENUE OR PROFITS OR OTHER SPECIAL, */
+/* INDIRECT AND CONSEQUENTIAL DAMAGES, EVEN IF IT HAS BEEN ADVISED OF THE */
+/* POSSIBILITY OF SUCH DAMAGES. */
+/*  */
+/* This file is provided with no support and without any obligation on the */
+/* part of Sun Microsystems, Inc. ("Sun") or any of its affiliated */
+/* companies to assist in its use, correction, modification or */
+/* enhancement.  Nevertheless, and without creating any obligation on its */
+/* part, Sun welcomes your comments concerning the software and requests */
+/* that they be sent to fdlibm-comments@sunpro.sun.com. */
+
+divd z eq xu 000fffff fffffff8 3ff00000 00000008 000fffff fffffff0
+divd p eq xu 000fffff fffffff8 3ff00000 00000008 000fffff fffffff1
+divd n eq xu 20000000 02000000 5fe00000 02000001 000fffff ffffffff
+divd m eq xu 20000000 02000000 5fe00000 02000001 000fffff ffffffff
+divd z eq xu 20000000 02000000 5fe00000 02000001 000fffff ffffffff
+divd p eq xu 20000000 02000000 5fe00000 02000001 00100000 00000000
+divd n eq - 40e00000 00000000 40400000 00000000 40900000 00000000
+divd m eq - 40e00000 00000000 40400000 00000000 40900000 00000000
+divd p eq - 40e00000 00000000 40400000 00000000 40900000 00000000
+divd z eq - 40e00000 00000000 40400000 00000000 40900000 00000000
+divd n eq - 43e00000 00000000 41600000 00000000 42700000 00000000
+divd m eq - 43e00000 00000000 41600000 00000000 42700000 00000000
+divd p eq - 43e00000 00000000 41600000 00000000 42700000 00000000
+divd z eq - 43e00000 00000000 41600000 00000000 42700000 00000000
+divd n eq - 40dffe00 00000000 40240000 00000000 40a99800 00000000
+divd m eq - 40dffe00 00000000 40240000 00000000 40a99800 00000000
+divd p eq - 40dffe00 00000000 40240000 00000000 40a99800 00000000
+divd z eq - 40dffe00 00000000 40240000 00000000 40a99800 00000000
+divd n eq - c0e00000 00000000 40400000 00000000 c0900000 00000000
+divd m eq - c0e00000 00000000 40400000 00000000 c0900000 00000000
+divd p eq - c0e00000 00000000 40400000 00000000 c0900000 00000000
+divd z eq - c0e00000 00000000 40400000 00000000 c0900000 00000000
+divd n eq - 40e00000 00000000 c0400000 00000000 c0900000 00000000
+divd m eq - 40e00000 00000000 c0400000 00000000 c0900000 00000000
+divd p eq - 40e00000 00000000 c0400000 00000000 c0900000 00000000
+divd z eq - 40e00000 00000000 c0400000 00000000 c0900000 00000000
+divd n eq - 47700000 00000000 41300000 00000000 46300000 00000000
+divd m eq - 47700000 00000000 41300000 00000000 46300000 00000000
+divd p eq - 47700000 00000000 41300000 00000000 46300000 00000000
+divd z eq - 47700000 00000000 41300000 00000000 46300000 00000000
+divd n eq - c7700000 00000000 41300000 00000000 c6300000 00000000
+divd m eq - c7700000 00000000 41300000 00000000 c6300000 00000000
+divd p eq - c7700000 00000000 41300000 00000000 c6300000 00000000
+divd z eq - c7700000 00000000 41300000 00000000 c6300000 00000000
+divd n eq - 47700000 00000000 c1300000 00000000 c6300000 00000000
+divd m eq - 47700000 00000000 c1300000 00000000 c6300000 00000000
+divd p eq - 47700000 00000000 c1300000 00000000 c6300000 00000000
+divd z eq - 47700000 00000000 c1300000 00000000 c6300000 00000000
+divd n eq - 42e00000 00000000 40c00000 00000000 42100000 00000000
+divd m eq - 42e00000 00000000 40c00000 00000000 42100000 00000000
+divd p eq - 42e00000 00000000 40c00000 00000000 42100000 00000000
+divd z eq - 42e00000 00000000 40c00000 00000000 42100000 00000000
+divd n eq - 40c38800 00000000 40240000 00000000 408f4000 00000000
+divd m eq - 40c38800 00000000 40240000 00000000 408f4000 00000000
+divd p eq - 40c38800 00000000 40240000 00000000 408f4000 00000000
+divd z eq - 40c38800 00000000 40240000 00000000 408f4000 00000000
+divd n eq - 40c38800 00000000 40590000 00000000 40590000 00000000
+divd m eq - 40c38800 00000000 40590000 00000000 40590000 00000000
+divd p eq - 40c38800 00000000 40590000 00000000 40590000 00000000
+divd z eq - 40c38800 00000000 40590000 00000000 40590000 00000000
+divd n eq - 40c38800 00000000 408f4000 00000000 40240000 00000000
+divd m eq - 40c38800 00000000 408f4000 00000000 40240000 00000000
+divd p eq - 40c38800 00000000 408f4000 00000000 40240000 00000000
+divd z eq - 40c38800 00000000 408f4000 00000000 40240000 00000000
+divd n eq - 3ff00000 00000000 3ff00000 00000000 3ff00000 00000000
+divd m eq - 3ff00000 00000000 3ff00000 00000000 3ff00000 00000000
+divd p eq - 3ff00000 00000000 3ff00000 00000000 3ff00000 00000000
+divd z eq - 3ff00000 00000000 3ff00000 00000000 3ff00000 00000000
+divd n eq - 40000000 00000000 3ff00000 00000000 40000000 00000000
+divd m eq - 40000000 00000000 3ff00000 00000000 40000000 00000000
+divd p eq - 40000000 00000000 3ff00000 00000000 40000000 00000000
+divd z eq - 40000000 00000000 3ff00000 00000000 40000000 00000000
+divd n eq - 40220000 00000000 40080000 00000000 40080000 00000000
+divd m eq - 40220000 00000000 40080000 00000000 40080000 00000000
+divd p eq - 40220000 00000000 40080000 00000000 40080000 00000000
+divd z eq - 40220000 00000000 40080000 00000000 40080000 00000000
+divd n eq - 40140000 00000000 40140000 00000000 3ff00000 00000000
+divd m eq - 40140000 00000000 40140000 00000000 3ff00000 00000000
+divd p eq - 40140000 00000000 40140000 00000000 3ff00000 00000000
+divd z eq - 40140000 00000000 40140000 00000000 3ff00000 00000000
+divd n eq - 40200000 00000000 40000000 00000000 40100000 00000000
+divd m eq - 40200000 00000000 40000000 00000000 40100000 00000000
+divd p eq - 40200000 00000000 40000000 00000000 40100000 00000000
+divd z eq - 40200000 00000000 40000000 00000000 40100000 00000000
+divd n eq - bff00000 00000000 3ff00000 00000000 bff00000 00000000
+divd m eq - bff00000 00000000 3ff00000 00000000 bff00000 00000000
+divd p eq - bff00000 00000000 3ff00000 00000000 bff00000 00000000
+divd z eq - bff00000 00000000 3ff00000 00000000 bff00000 00000000
+divd n eq - c0000000 00000000 3ff00000 00000000 c0000000 00000000
+divd m eq - c0000000 00000000 3ff00000 00000000 c0000000 00000000
+divd p eq - c0000000 00000000 3ff00000 00000000 c0000000 00000000
+divd z eq - c0000000 00000000 3ff00000 00000000 c0000000 00000000
+divd n eq - 40000000 00000000 bff00000 00000000 c0000000 00000000
+divd m eq - 40000000 00000000 bff00000 00000000 c0000000 00000000
+divd p eq - 40000000 00000000 bff00000 00000000 c0000000 00000000
+divd z eq - 40000000 00000000 bff00000 00000000 c0000000 00000000
+divd n eq - c0200000 00000000 40000000 00000000 c0100000 00000000
+divd m eq - c0200000 00000000 40000000 00000000 c0100000 00000000
+divd p eq - c0200000 00000000 40000000 00000000 c0100000 00000000
+divd z eq - c0200000 00000000 40000000 00000000 c0100000 00000000
+divd n eq - 40080000 00000000 c0080000 00000000 bff00000 00000000
+divd m eq - 40080000 00000000 c0080000 00000000 bff00000 00000000
+divd p eq - 40080000 00000000 c0080000 00000000 bff00000 00000000
+divd z eq - 40080000 00000000 c0080000 00000000 bff00000 00000000
+divd n eq - c01c0000 00000000 401c0000 00000000 bff00000 00000000
+divd m eq - c01c0000 00000000 401c0000 00000000 bff00000 00000000
+divd p eq - c01c0000 00000000 401c0000 00000000 bff00000 00000000
+divd z eq - c01c0000 00000000 401c0000 00000000 bff00000 00000000
+divd n eq - bff00000 00000000 bff00000 00000000 3ff00000 00000000
+divd m eq - bff00000 00000000 bff00000 00000000 3ff00000 00000000
+divd p eq - bff00000 00000000 bff00000 00000000 3ff00000 00000000
+divd z eq - bff00000 00000000 bff00000 00000000 3ff00000 00000000
+divd n eq - c0000000 00000000 bff00000 00000000 40000000 00000000
+divd m eq - c0000000 00000000 bff00000 00000000 40000000 00000000
+divd p eq - c0000000 00000000 bff00000 00000000 40000000 00000000
+divd z eq - c0000000 00000000 bff00000 00000000 40000000 00000000
+divd n eq - c0180000 00000000 c0080000 00000000 40000000 00000000
+divd m eq - c0180000 00000000 c0080000 00000000 40000000 00000000
+divd p eq - c0180000 00000000 c0080000 00000000 40000000 00000000
+divd z eq - c0180000 00000000 c0080000 00000000 40000000 00000000
+divd n eq - c0220000 00000000 c0080000 00000000 40080000 00000000
+divd m eq - c0220000 00000000 c0080000 00000000 40080000 00000000
+divd p eq - c0220000 00000000 c0080000 00000000 40080000 00000000
+divd z eq - c0220000 00000000 c0080000 00000000 40080000 00000000
+divd n uo v 00000000 00000000 00000000 00000000 7fffe000 00000000
+divd m uo v 00000000 00000000 00000000 00000000 7fffe000 00000000
+divd p uo v 00000000 00000000 00000000 00000000 7fffe000 00000000
+divd z uo v 00000000 00000000 00000000 00000000 7fffe000 00000000
+divd n uo v 80000000 00000000 00000000 00000000 ffffe000 00000000
+divd m uo v 80000000 00000000 00000000 00000000 ffffe000 00000000
+divd p uo v 80000000 00000000 00000000 00000000 ffffe000 00000000
+divd z uo v 80000000 00000000 00000000 00000000 ffffe000 00000000
+divd n uo v 00000000 00000000 80000000 00000000 ffffe000 00000000
+divd m uo v 00000000 00000000 80000000 00000000 ffffe000 00000000
+divd p uo v 00000000 00000000 80000000 00000000 ffffe000 00000000
+divd z uo v 00000000 00000000 80000000 00000000 ffffe000 00000000
+divd n uo v 80000000 00000000 80000000 00000000 7fffe000 00000000
+divd m uo v 80000000 00000000 80000000 00000000 7fffe000 00000000
+divd p uo v 80000000 00000000 80000000 00000000 7fffe000 00000000
+divd z uo v 80000000 00000000 80000000 00000000 7fffe000 00000000
+divd n uo v 7ff00000 00000000 7ff00000 00000000 7fffe000 00000000
+divd m uo v 7ff00000 00000000 7ff00000 00000000 7fffe000 00000000
+divd p uo v 7ff00000 00000000 7ff00000 00000000 7fffe000 00000000
+divd z uo v 7ff00000 00000000 7ff00000 00000000 7fffe000 00000000
+divd n uo v fff00000 00000000 7ff00000 00000000 ffffe000 00000000
+divd m uo v fff00000 00000000 7ff00000 00000000 ffffe000 00000000
+divd p uo v fff00000 00000000 7ff00000 00000000 ffffe000 00000000
+divd z uo v fff00000 00000000 7ff00000 00000000 ffffe000 00000000
+divd n uo v 7ff00000 00000000 fff00000 00000000 ffffe000 00000000
+divd m uo v 7ff00000 00000000 fff00000 00000000 ffffe000 00000000
+divd p uo v 7ff00000 00000000 fff00000 00000000 ffffe000 00000000
+divd z uo v 7ff00000 00000000 fff00000 00000000 ffffe000 00000000
+divd n uo v fff00000 00000000 fff00000 00000000 7fffe000 00000000
+divd m uo v fff00000 00000000 fff00000 00000000 7fffe000 00000000
+divd p uo v fff00000 00000000 fff00000 00000000 7fffe000 00000000
+divd z uo v fff00000 00000000 fff00000 00000000 7fffe000 00000000
+divd n eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+divd m eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+divd p eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+divd z eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+divd n eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+divd m eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+divd p eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+divd z eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+divd n eq - 7ff00000 00000000 80000000 00000000 fff00000 00000000
+divd m eq - 7ff00000 00000000 80000000 00000000 fff00000 00000000
+divd p eq - 7ff00000 00000000 80000000 00000000 fff00000 00000000
+divd z eq - 7ff00000 00000000 80000000 00000000 fff00000 00000000
+divd n eq - fff00000 00000000 80000000 00000000 7ff00000 00000000
+divd m eq - fff00000 00000000 80000000 00000000 7ff00000 00000000
+divd p eq - fff00000 00000000 80000000 00000000 7ff00000 00000000
+divd z eq - fff00000 00000000 80000000 00000000 7ff00000 00000000
+divd n eq - 00000000 00000000 7ff00000 00000000 00000000 00000000
+divd m eq - 00000000 00000000 7ff00000 00000000 00000000 00000000
+divd p eq - 00000000 00000000 7ff00000 00000000 00000000 00000000
+divd z eq - 00000000 00000000 7ff00000 00000000 00000000 00000000
+divd n eq - 80000000 00000000 7ff00000 00000000 80000000 00000000
+divd m eq - 80000000 00000000 7ff00000 00000000 80000000 00000000
+divd p eq - 80000000 00000000 7ff00000 00000000 80000000 00000000
+divd z eq - 80000000 00000000 7ff00000 00000000 80000000 00000000
+divd n eq - 00000000 00000000 fff00000 00000000 80000000 00000000
+divd m eq - 00000000 00000000 fff00000 00000000 80000000 00000000
+divd p eq - 00000000 00000000 fff00000 00000000 80000000 00000000
+divd z eq - 00000000 00000000 fff00000 00000000 80000000 00000000
+divd n eq - 80000000 00000000 fff00000 00000000 00000000 00000000
+divd m eq - 80000000 00000000 fff00000 00000000 00000000 00000000
+divd p eq - 80000000 00000000 fff00000 00000000 00000000 00000000
+divd z eq - 80000000 00000000 fff00000 00000000 00000000 00000000
+divd n eq - 7ff00000 00000000 3ff00000 00000000 7ff00000 00000000
+divd m eq - 7ff00000 00000000 3ff00000 00000000 7ff00000 00000000
+divd p eq - 7ff00000 00000000 3ff00000 00000000 7ff00000 00000000
+divd z eq - 7ff00000 00000000 3ff00000 00000000 7ff00000 00000000
+divd n eq - fff00000 00000000 40000000 00000000 fff00000 00000000
+divd m eq - fff00000 00000000 40000000 00000000 fff00000 00000000
+divd p eq - fff00000 00000000 40000000 00000000 fff00000 00000000
+divd z eq - fff00000 00000000 40000000 00000000 fff00000 00000000
+divd n eq - 7ff00000 00000000 c0080000 00000000 fff00000 00000000
+divd m eq - 7ff00000 00000000 c0080000 00000000 fff00000 00000000
+divd p eq - 7ff00000 00000000 c0080000 00000000 fff00000 00000000
+divd z eq - 7ff00000 00000000 c0080000 00000000 fff00000 00000000
+divd n eq - fff00000 00000000 c0100000 00000000 7ff00000 00000000
+divd m eq - fff00000 00000000 c0100000 00000000 7ff00000 00000000
+divd p eq - fff00000 00000000 c0100000 00000000 7ff00000 00000000
+divd z eq - fff00000 00000000 c0100000 00000000 7ff00000 00000000
+divd n eq - 7ff00000 00000000 40140000 00000000 7ff00000 00000000
+divd m eq - 7ff00000 00000000 40140000 00000000 7ff00000 00000000
+divd p eq - 7ff00000 00000000 40140000 00000000 7ff00000 00000000
+divd z eq - 7ff00000 00000000 40140000 00000000 7ff00000 00000000
+divd n eq - fff00000 00000000 40180000 00000000 fff00000 00000000
+divd m eq - fff00000 00000000 40180000 00000000 fff00000 00000000
+divd p eq - fff00000 00000000 40180000 00000000 fff00000 00000000
+divd z eq - fff00000 00000000 40180000 00000000 fff00000 00000000
+divd n eq - 7ff00000 00000000 c01c0000 00000000 fff00000 00000000
+divd m eq - 7ff00000 00000000 c01c0000 00000000 fff00000 00000000
+divd p eq - 7ff00000 00000000 c01c0000 00000000 fff00000 00000000
+divd z eq - 7ff00000 00000000 c01c0000 00000000 fff00000 00000000
+divd n eq - fff00000 00000000 c0200000 00000000 7ff00000 00000000
+divd m eq - fff00000 00000000 c0200000 00000000 7ff00000 00000000
+divd p eq - fff00000 00000000 c0200000 00000000 7ff00000 00000000
+divd z eq - fff00000 00000000 c0200000 00000000 7ff00000 00000000
+divd n eq - 3ff00000 00000000 7ff00000 00000000 00000000 00000000
+divd m eq - 3ff00000 00000000 7ff00000 00000000 00000000 00000000
+divd p eq - 3ff00000 00000000 7ff00000 00000000 00000000 00000000
+divd z eq - 3ff00000 00000000 7ff00000 00000000 00000000 00000000
+divd n eq - c0000000 00000000 7ff00000 00000000 80000000 00000000
+divd m eq - c0000000 00000000 7ff00000 00000000 80000000 00000000
+divd p eq - c0000000 00000000 7ff00000 00000000 80000000 00000000
+divd z eq - c0000000 00000000 7ff00000 00000000 80000000 00000000
+divd n eq - 40080000 00000000 fff00000 00000000 80000000 00000000
+divd m eq - 40080000 00000000 fff00000 00000000 80000000 00000000
+divd p eq - 40080000 00000000 fff00000 00000000 80000000 00000000
+divd z eq - 40080000 00000000 fff00000 00000000 80000000 00000000
+divd n eq - c0100000 00000000 fff00000 00000000 00000000 00000000
+divd m eq - c0100000 00000000 fff00000 00000000 00000000 00000000
+divd p eq - c0100000 00000000 fff00000 00000000 00000000 00000000
+divd z eq - c0100000 00000000 fff00000 00000000 00000000 00000000
+divd n eq - 40140000 00000000 7ff00000 00000000 00000000 00000000
+divd m eq - 40140000 00000000 7ff00000 00000000 00000000 00000000
+divd p eq - 40140000 00000000 7ff00000 00000000 00000000 00000000
+divd z eq - 40140000 00000000 7ff00000 00000000 00000000 00000000
+divd n eq - c0180000 00000000 7ff00000 00000000 80000000 00000000
+divd m eq - c0180000 00000000 7ff00000 00000000 80000000 00000000
+divd p eq - c0180000 00000000 7ff00000 00000000 80000000 00000000
+divd z eq - c0180000 00000000 7ff00000 00000000 80000000 00000000
+divd n eq - 401c0000 00000000 fff00000 00000000 80000000 00000000
+divd m eq - 401c0000 00000000 fff00000 00000000 80000000 00000000
+divd p eq - 401c0000 00000000 fff00000 00000000 80000000 00000000
+divd z eq - 401c0000 00000000 fff00000 00000000 80000000 00000000
+divd n eq - c0200000 00000000 fff00000 00000000 00000000 00000000
+divd m eq - c0200000 00000000 fff00000 00000000 00000000 00000000
+divd p eq - c0200000 00000000 fff00000 00000000 00000000 00000000
+divd z eq - c0200000 00000000 fff00000 00000000 00000000 00000000
+divd n eq - 7fe00000 00000000 7ff00000 00000000 00000000 00000000
+divd m eq - 7fe00000 00000000 7ff00000 00000000 00000000 00000000
+divd p eq - 7fe00000 00000000 7ff00000 00000000 00000000 00000000
+divd z eq - 7fe00000 00000000 7ff00000 00000000 00000000 00000000
+divd n eq - ffd00000 00000000 7ff00000 00000000 80000000 00000000
+divd m eq - ffd00000 00000000 7ff00000 00000000 80000000 00000000
+divd p eq - ffd00000 00000000 7ff00000 00000000 80000000 00000000
+divd z eq - ffd00000 00000000 7ff00000 00000000 80000000 00000000
+divd n eq - 7fe00000 00000000 fff00000 00000000 80000000 00000000
+divd m eq - 7fe00000 00000000 fff00000 00000000 80000000 00000000
+divd p eq - 7fe00000 00000000 fff00000 00000000 80000000 00000000
+divd z eq - 7fe00000 00000000 fff00000 00000000 80000000 00000000
+divd n eq - ffd00000 00000000 fff00000 00000000 00000000 00000000
+divd m eq - ffd00000 00000000 fff00000 00000000 00000000 00000000
+divd p eq - ffd00000 00000000 fff00000 00000000 00000000 00000000
+divd z eq - ffd00000 00000000 fff00000 00000000 00000000 00000000
+divd n eq - 7fdfffff ffffffff 7ff00000 00000000 00000000 00000000
+divd m eq - 7fdfffff ffffffff 7ff00000 00000000 00000000 00000000
+divd p eq - 7fdfffff ffffffff 7ff00000 00000000 00000000 00000000
+divd z eq - 7fdfffff ffffffff 7ff00000 00000000 00000000 00000000
+divd n eq - ffcfffff ffffffff 7ff00000 00000000 80000000 00000000
+divd m eq - ffcfffff ffffffff 7ff00000 00000000 80000000 00000000
+divd p eq - ffcfffff ffffffff 7ff00000 00000000 80000000 00000000
+divd z eq - ffcfffff ffffffff 7ff00000 00000000 80000000 00000000
+divd n eq - 7fefffff ffffffff fff00000 00000000 80000000 00000000
+divd m eq - 7fefffff ffffffff fff00000 00000000 80000000 00000000
+divd p eq - 7fefffff ffffffff fff00000 00000000 80000000 00000000
+divd z eq - 7fefffff ffffffff fff00000 00000000 80000000 00000000
+divd n eq - ffefffff ffffffff fff00000 00000000 00000000 00000000
+divd m eq - ffefffff ffffffff fff00000 00000000 00000000 00000000
+divd p eq - ffefffff ffffffff fff00000 00000000 00000000 00000000
+divd z eq - ffefffff ffffffff fff00000 00000000 00000000 00000000
+divd n eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+divd m eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+divd p eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+divd z eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+divd n eq - fff00000 00000000 7fd00000 00000000 fff00000 00000000
+divd m eq - fff00000 00000000 7fd00000 00000000 fff00000 00000000
+divd p eq - fff00000 00000000 7fd00000 00000000 fff00000 00000000
+divd z eq - fff00000 00000000 7fd00000 00000000 fff00000 00000000
+divd n eq - 7ff00000 00000000 ffe00000 00000000 fff00000 00000000
+divd m eq - 7ff00000 00000000 ffe00000 00000000 fff00000 00000000
+divd p eq - 7ff00000 00000000 ffe00000 00000000 fff00000 00000000
+divd z eq - 7ff00000 00000000 ffe00000 00000000 fff00000 00000000
+divd n eq - fff00000 00000000 ffd00000 00000000 7ff00000 00000000
+divd m eq - fff00000 00000000 ffd00000 00000000 7ff00000 00000000
+divd p eq - fff00000 00000000 ffd00000 00000000 7ff00000 00000000
+divd z eq - fff00000 00000000 ffd00000 00000000 7ff00000 00000000
+divd n eq - 7ff00000 00000000 7fdfffff ffffffff 7ff00000 00000000
+divd m eq - 7ff00000 00000000 7fdfffff ffffffff 7ff00000 00000000
+divd p eq - 7ff00000 00000000 7fdfffff ffffffff 7ff00000 00000000
+divd z eq - 7ff00000 00000000 7fdfffff ffffffff 7ff00000 00000000
+divd n eq - 7ff00000 00000000 ffcfffff ffffffff fff00000 00000000
+divd m eq - 7ff00000 00000000 ffcfffff ffffffff fff00000 00000000
+divd p eq - 7ff00000 00000000 ffcfffff ffffffff fff00000 00000000
+divd z eq - 7ff00000 00000000 ffcfffff ffffffff fff00000 00000000
+divd n eq - 7ff00000 00000000 ffefffff ffffffff fff00000 00000000
+divd m eq - 7ff00000 00000000 ffefffff ffffffff fff00000 00000000
+divd p eq - 7ff00000 00000000 ffefffff ffffffff fff00000 00000000
+divd z eq - 7ff00000 00000000 ffefffff ffffffff fff00000 00000000
+divd n eq - fff00000 00000000 ffefffff ffffffff 7ff00000 00000000
+divd m eq - fff00000 00000000 ffefffff ffffffff 7ff00000 00000000
+divd p eq - fff00000 00000000 ffefffff ffffffff 7ff00000 00000000
+divd z eq - fff00000 00000000 ffefffff ffffffff 7ff00000 00000000
+divd n eq - 7ff00000 00000000 00100000 00000000 7ff00000 00000000
+divd m eq - 7ff00000 00000000 00100000 00000000 7ff00000 00000000
+divd p eq - 7ff00000 00000000 00100000 00000000 7ff00000 00000000
+divd z eq - 7ff00000 00000000 00100000 00000000 7ff00000 00000000
+divd n eq - fff00000 00000000 00200000 00000000 fff00000 00000000
+divd m eq - fff00000 00000000 00200000 00000000 fff00000 00000000
+divd p eq - fff00000 00000000 00200000 00000000 fff00000 00000000
+divd z eq - fff00000 00000000 00200000 00000000 fff00000 00000000
+divd n eq - 7ff00000 00000000 80200000 00000000 fff00000 00000000
+divd m eq - 7ff00000 00000000 80200000 00000000 fff00000 00000000
+divd p eq - 7ff00000 00000000 80200000 00000000 fff00000 00000000
+divd z eq - 7ff00000 00000000 80200000 00000000 fff00000 00000000
+divd n eq - fff00000 00000000 80100000 00000000 7ff00000 00000000
+divd m eq - fff00000 00000000 80100000 00000000 7ff00000 00000000
+divd p eq - fff00000 00000000 80100000 00000000 7ff00000 00000000
+divd z eq - fff00000 00000000 80100000 00000000 7ff00000 00000000
+divd n eq - 7ff00000 00000000 001fffff ffffffff 7ff00000 00000000
+divd m eq - 7ff00000 00000000 001fffff ffffffff 7ff00000 00000000
+divd p eq - 7ff00000 00000000 001fffff ffffffff 7ff00000 00000000
+divd z eq - 7ff00000 00000000 001fffff ffffffff 7ff00000 00000000
+divd n eq - fff00000 00000000 00100000 00000001 fff00000 00000000
+divd m eq - fff00000 00000000 00100000 00000001 fff00000 00000000
+divd p eq - fff00000 00000000 00100000 00000001 fff00000 00000000
+divd z eq - fff00000 00000000 00100000 00000001 fff00000 00000000
+divd n eq - 7ff00000 00000000 80100000 00000001 fff00000 00000000
+divd m eq - 7ff00000 00000000 80100000 00000001 fff00000 00000000
+divd p eq - 7ff00000 00000000 80100000 00000001 fff00000 00000000
+divd z eq - 7ff00000 00000000 80100000 00000001 fff00000 00000000
+divd n eq - fff00000 00000000 801fffff ffffffff 7ff00000 00000000
+divd m eq - fff00000 00000000 801fffff ffffffff 7ff00000 00000000
+divd p eq - fff00000 00000000 801fffff ffffffff 7ff00000 00000000
+divd z eq - fff00000 00000000 801fffff ffffffff 7ff00000 00000000
+divd n eq - 00100000 00000000 7ff00000 00000000 00000000 00000000
+divd m eq - 00100000 00000000 7ff00000 00000000 00000000 00000000
+divd p eq - 00100000 00000000 7ff00000 00000000 00000000 00000000
+divd z eq - 00100000 00000000 7ff00000 00000000 00000000 00000000
+divd n eq - 80200000 00000000 7ff00000 00000000 80000000 00000000
+divd m eq - 80200000 00000000 7ff00000 00000000 80000000 00000000
+divd p eq - 80200000 00000000 7ff00000 00000000 80000000 00000000
+divd z eq - 80200000 00000000 7ff00000 00000000 80000000 00000000
+divd n eq - 00200000 00000000 fff00000 00000000 80000000 00000000
+divd m eq - 00200000 00000000 fff00000 00000000 80000000 00000000
+divd p eq - 00200000 00000000 fff00000 00000000 80000000 00000000
+divd z eq - 00200000 00000000 fff00000 00000000 80000000 00000000
+divd n eq - 80100000 00000000 fff00000 00000000 00000000 00000000
+divd m eq - 80100000 00000000 fff00000 00000000 00000000 00000000
+divd p eq - 80100000 00000000 fff00000 00000000 00000000 00000000
+divd z eq - 80100000 00000000 fff00000 00000000 00000000 00000000
+divd n eq - 001fffff ffffffff 7ff00000 00000000 00000000 00000000
+divd m eq - 001fffff ffffffff 7ff00000 00000000 00000000 00000000
+divd p eq - 001fffff ffffffff 7ff00000 00000000 00000000 00000000
+divd z eq - 001fffff ffffffff 7ff00000 00000000 00000000 00000000
+divd n eq - 80100000 00000001 7ff00000 00000000 80000000 00000000
+divd m eq - 80100000 00000001 7ff00000 00000000 80000000 00000000
+divd p eq - 80100000 00000001 7ff00000 00000000 80000000 00000000
+divd z eq - 80100000 00000001 7ff00000 00000000 80000000 00000000
+divd n eq - 00100000 00000001 fff00000 00000000 80000000 00000000
+divd m eq - 00100000 00000001 fff00000 00000000 80000000 00000000
+divd p eq - 00100000 00000001 fff00000 00000000 80000000 00000000
+divd z eq - 00100000 00000001 fff00000 00000000 80000000 00000000
+divd n eq - 801fffff ffffffff fff00000 00000000 00000000 00000000
+divd m eq - 801fffff ffffffff fff00000 00000000 00000000 00000000
+divd p eq - 801fffff ffffffff fff00000 00000000 00000000 00000000
+divd z eq - 801fffff ffffffff fff00000 00000000 00000000 00000000
+divd n eq - 7ff00000 00000000 00000000 00000001 7ff00000 00000000
+divd m eq - 7ff00000 00000000 00000000 00000001 7ff00000 00000000
+divd p eq - 7ff00000 00000000 00000000 00000001 7ff00000 00000000
+divd z eq - 7ff00000 00000000 00000000 00000001 7ff00000 00000000
+divd n eq - fff00000 00000000 00000000 00000003 fff00000 00000000
+divd m eq - fff00000 00000000 00000000 00000003 fff00000 00000000
+divd p eq - fff00000 00000000 00000000 00000003 fff00000 00000000
+divd z eq - fff00000 00000000 00000000 00000003 fff00000 00000000
+divd n eq - 7ff00000 00000000 80000000 00000002 fff00000 00000000
+divd m eq - 7ff00000 00000000 80000000 00000002 fff00000 00000000
+divd p eq - 7ff00000 00000000 80000000 00000002 fff00000 00000000
+divd z eq - 7ff00000 00000000 80000000 00000002 fff00000 00000000
+divd n eq - fff00000 00000000 80000000 00000004 7ff00000 00000000
+divd m eq - fff00000 00000000 80000000 00000004 7ff00000 00000000
+divd p eq - fff00000 00000000 80000000 00000004 7ff00000 00000000
+divd z eq - fff00000 00000000 80000000 00000004 7ff00000 00000000
+divd n eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+divd m eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+divd p eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+divd z eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+divd n eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+divd m eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+divd p eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+divd z eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+divd n eq - 7ff00000 00000000 800fffff ffffffff fff00000 00000000
+divd m eq - 7ff00000 00000000 800fffff ffffffff fff00000 00000000
+divd p eq - 7ff00000 00000000 800fffff ffffffff fff00000 00000000
+divd z eq - 7ff00000 00000000 800fffff ffffffff fff00000 00000000
+divd n eq - fff00000 00000000 800fffff ffffffff 7ff00000 00000000
+divd m eq - fff00000 00000000 800fffff ffffffff 7ff00000 00000000
+divd p eq - fff00000 00000000 800fffff ffffffff 7ff00000 00000000
+divd z eq - fff00000 00000000 800fffff ffffffff 7ff00000 00000000
+divd n eq - 00000000 00000001 7ff00000 00000000 00000000 00000000
+divd m eq - 00000000 00000001 7ff00000 00000000 00000000 00000000
+divd p eq - 00000000 00000001 7ff00000 00000000 00000000 00000000
+divd z eq - 00000000 00000001 7ff00000 00000000 00000000 00000000
+divd n eq - 80000000 00000003 7ff00000 00000000 80000000 00000000
+divd m eq - 80000000 00000003 7ff00000 00000000 80000000 00000000
+divd p eq - 80000000 00000003 7ff00000 00000000 80000000 00000000
+divd z eq - 80000000 00000003 7ff00000 00000000 80000000 00000000
+divd n eq - 00000000 00000002 fff00000 00000000 80000000 00000000
+divd m eq - 00000000 00000002 fff00000 00000000 80000000 00000000
+divd p eq - 00000000 00000002 fff00000 00000000 80000000 00000000
+divd z eq - 00000000 00000002 fff00000 00000000 80000000 00000000
+divd n eq - 80000000 00000004 fff00000 00000000 00000000 00000000
+divd m eq - 80000000 00000004 fff00000 00000000 00000000 00000000
+divd p eq - 80000000 00000004 fff00000 00000000 00000000 00000000
+divd z eq - 80000000 00000004 fff00000 00000000 00000000 00000000
+divd n eq - 000fffff ffffffff 7ff00000 00000000 00000000 00000000
+divd m eq - 000fffff ffffffff 7ff00000 00000000 00000000 00000000
+divd p eq - 000fffff ffffffff 7ff00000 00000000 00000000 00000000
+divd z eq - 000fffff ffffffff 7ff00000 00000000 00000000 00000000
+divd n eq - 800fffff ffffffff 7ff00000 00000000 80000000 00000000
+divd m eq - 800fffff ffffffff 7ff00000 00000000 80000000 00000000
+divd p eq - 800fffff ffffffff 7ff00000 00000000 80000000 00000000
+divd z eq - 800fffff ffffffff 7ff00000 00000000 80000000 00000000
+divd n eq - 000fffff ffffffff fff00000 00000000 80000000 00000000
+divd m eq - 000fffff ffffffff fff00000 00000000 80000000 00000000
+divd p eq - 000fffff ffffffff fff00000 00000000 80000000 00000000
+divd z eq - 000fffff ffffffff fff00000 00000000 80000000 00000000
+divd n eq - 800fffff ffffffff fff00000 00000000 00000000 00000000
+divd m eq - 800fffff ffffffff fff00000 00000000 00000000 00000000
+divd p eq - 800fffff ffffffff fff00000 00000000 00000000 00000000
+divd z eq - 800fffff ffffffff fff00000 00000000 00000000 00000000
+divd n eq - 00000000 00000000 3ff00000 00000000 00000000 00000000
+divd m eq - 00000000 00000000 3ff00000 00000000 00000000 00000000
+divd p eq - 00000000 00000000 3ff00000 00000000 00000000 00000000
+divd z eq - 00000000 00000000 3ff00000 00000000 00000000 00000000
+divd n eq - 80000000 00000000 40000000 00000000 80000000 00000000
+divd m eq - 80000000 00000000 40000000 00000000 80000000 00000000
+divd p eq - 80000000 00000000 40000000 00000000 80000000 00000000
+divd z eq - 80000000 00000000 40000000 00000000 80000000 00000000
+divd n eq - 00000000 00000000 c0080000 00000000 80000000 00000000
+divd m eq - 00000000 00000000 c0080000 00000000 80000000 00000000
+divd p eq - 00000000 00000000 c0080000 00000000 80000000 00000000
+divd z eq - 00000000 00000000 c0080000 00000000 80000000 00000000
+divd n eq - 80000000 00000000 c0100000 00000000 00000000 00000000
+divd m eq - 80000000 00000000 c0100000 00000000 00000000 00000000
+divd p eq - 80000000 00000000 c0100000 00000000 00000000 00000000
+divd z eq - 80000000 00000000 c0100000 00000000 00000000 00000000
+divd n eq - 00000000 00000000 40140000 00000000 00000000 00000000
+divd m eq - 00000000 00000000 40140000 00000000 00000000 00000000
+divd p eq - 00000000 00000000 40140000 00000000 00000000 00000000
+divd z eq - 00000000 00000000 40140000 00000000 00000000 00000000
+divd n eq - 80000000 00000000 40180000 00000000 80000000 00000000
+divd m eq - 80000000 00000000 40180000 00000000 80000000 00000000
+divd p eq - 80000000 00000000 40180000 00000000 80000000 00000000
+divd z eq - 80000000 00000000 40180000 00000000 80000000 00000000
+divd n eq - 00000000 00000000 c01c0000 00000000 80000000 00000000
+divd m eq - 00000000 00000000 c01c0000 00000000 80000000 00000000
+divd p eq - 00000000 00000000 c01c0000 00000000 80000000 00000000
+divd z eq - 00000000 00000000 c01c0000 00000000 80000000 00000000
+divd n eq - 80000000 00000000 c0200000 00000000 00000000 00000000
+divd m eq - 80000000 00000000 c0200000 00000000 00000000 00000000
+divd p eq - 80000000 00000000 c0200000 00000000 00000000 00000000
+divd z eq - 80000000 00000000 c0200000 00000000 00000000 00000000
+divd n eq d 3ff00000 00000000 00000000 00000000 7ff00000 00000000
+divd m eq d 3ff00000 00000000 00000000 00000000 7ff00000 00000000
+divd p eq d 3ff00000 00000000 00000000 00000000 7ff00000 00000000
+divd z eq d 3ff00000 00000000 00000000 00000000 7ff00000 00000000
+divd n eq d c0000000 00000000 00000000 00000000 fff00000 00000000
+divd m eq d c0000000 00000000 00000000 00000000 fff00000 00000000
+divd p eq d c0000000 00000000 00000000 00000000 fff00000 00000000
+divd z eq d c0000000 00000000 00000000 00000000 fff00000 00000000
+divd n eq d 40080000 00000000 80000000 00000000 fff00000 00000000
+divd m eq d 40080000 00000000 80000000 00000000 fff00000 00000000
+divd p eq d 40080000 00000000 80000000 00000000 fff00000 00000000
+divd z eq d 40080000 00000000 80000000 00000000 fff00000 00000000
+divd n eq d c0100000 00000000 80000000 00000000 7ff00000 00000000
+divd m eq d c0100000 00000000 80000000 00000000 7ff00000 00000000
+divd p eq d c0100000 00000000 80000000 00000000 7ff00000 00000000
+divd z eq d c0100000 00000000 80000000 00000000 7ff00000 00000000
+divd n eq d 40140000 00000000 00000000 00000000 7ff00000 00000000
+divd m eq d 40140000 00000000 00000000 00000000 7ff00000 00000000
+divd p eq d 40140000 00000000 00000000 00000000 7ff00000 00000000
+divd z eq d 40140000 00000000 00000000 00000000 7ff00000 00000000
+divd n eq d c0180000 00000000 00000000 00000000 fff00000 00000000
+divd m eq d c0180000 00000000 00000000 00000000 fff00000 00000000
+divd p eq d c0180000 00000000 00000000 00000000 fff00000 00000000
+divd z eq d c0180000 00000000 00000000 00000000 fff00000 00000000
+divd n eq d 401c0000 00000000 80000000 00000000 fff00000 00000000
+divd m eq d 401c0000 00000000 80000000 00000000 fff00000 00000000
+divd p eq d 401c0000 00000000 80000000 00000000 fff00000 00000000
+divd z eq d 401c0000 00000000 80000000 00000000 fff00000 00000000
+divd n eq d c0200000 00000000 80000000 00000000 7ff00000 00000000
+divd m eq d c0200000 00000000 80000000 00000000 7ff00000 00000000
+divd p eq d c0200000 00000000 80000000 00000000 7ff00000 00000000
+divd z eq d c0200000 00000000 80000000 00000000 7ff00000 00000000
+divd n eq - 00000000 00000000 7fe00000 00000000 00000000 00000000
+divd m eq - 00000000 00000000 7fe00000 00000000 00000000 00000000
+divd p eq - 00000000 00000000 7fe00000 00000000 00000000 00000000
+divd z eq - 00000000 00000000 7fe00000 00000000 00000000 00000000
+divd n eq - 80000000 00000000 7fd00000 00000000 80000000 00000000
+divd m eq - 80000000 00000000 7fd00000 00000000 80000000 00000000
+divd p eq - 80000000 00000000 7fd00000 00000000 80000000 00000000
+divd z eq - 80000000 00000000 7fd00000 00000000 80000000 00000000
+divd n eq - 00000000 00000000 ffe00000 00000000 80000000 00000000
+divd m eq - 00000000 00000000 ffe00000 00000000 80000000 00000000
+divd p eq - 00000000 00000000 ffe00000 00000000 80000000 00000000
+divd z eq - 00000000 00000000 ffe00000 00000000 80000000 00000000
+divd n eq - 80000000 00000000 ffd00000 00000000 00000000 00000000
+divd m eq - 80000000 00000000 ffd00000 00000000 00000000 00000000
+divd p eq - 80000000 00000000 ffd00000 00000000 00000000 00000000
+divd z eq - 80000000 00000000 ffd00000 00000000 00000000 00000000
+divd n eq - 00000000 00000000 7fdfffff ffffffff 00000000 00000000
+divd m eq - 00000000 00000000 7fdfffff ffffffff 00000000 00000000
+divd p eq - 00000000 00000000 7fdfffff ffffffff 00000000 00000000
+divd z eq - 00000000 00000000 7fdfffff ffffffff 00000000 00000000
+divd n eq - 80000000 00000000 7fcfffff ffffffff 80000000 00000000
+divd m eq - 80000000 00000000 7fcfffff ffffffff 80000000 00000000
+divd p eq - 80000000 00000000 7fcfffff ffffffff 80000000 00000000
+divd z eq - 80000000 00000000 7fcfffff ffffffff 80000000 00000000
+divd n eq - 00000000 00000000 ffcfffff ffffffff 80000000 00000000
+divd m eq - 00000000 00000000 ffcfffff ffffffff 80000000 00000000
+divd p eq - 00000000 00000000 ffcfffff ffffffff 80000000 00000000
+divd z eq - 00000000 00000000 ffcfffff ffffffff 80000000 00000000
+divd n eq - 80000000 00000000 ffdfffff ffffffff 00000000 00000000
+divd m eq - 80000000 00000000 ffdfffff ffffffff 00000000 00000000
+divd p eq - 80000000 00000000 ffdfffff ffffffff 00000000 00000000
+divd z eq - 80000000 00000000 ffdfffff ffffffff 00000000 00000000
+divd n eq d 7fe00000 00000000 00000000 00000000 7ff00000 00000000
+divd m eq d 7fe00000 00000000 00000000 00000000 7ff00000 00000000
+divd p eq d 7fe00000 00000000 00000000 00000000 7ff00000 00000000
+divd z eq d 7fe00000 00000000 00000000 00000000 7ff00000 00000000
+divd n eq d ffd00000 00000000 00000000 00000000 fff00000 00000000
+divd m eq d ffd00000 00000000 00000000 00000000 fff00000 00000000
+divd p eq d ffd00000 00000000 00000000 00000000 fff00000 00000000
+divd z eq d ffd00000 00000000 00000000 00000000 fff00000 00000000
+divd n eq d 7fe00000 00000000 80000000 00000000 fff00000 00000000
+divd m eq d 7fe00000 00000000 80000000 00000000 fff00000 00000000
+divd p eq d 7fe00000 00000000 80000000 00000000 fff00000 00000000
+divd z eq d 7fe00000 00000000 80000000 00000000 fff00000 00000000
+divd n eq d ffd00000 00000000 80000000 00000000 7ff00000 00000000
+divd m eq d ffd00000 00000000 80000000 00000000 7ff00000 00000000
+divd p eq d ffd00000 00000000 80000000 00000000 7ff00000 00000000
+divd z eq d ffd00000 00000000 80000000 00000000 7ff00000 00000000
+divd n eq d 7fdfffff ffffffff 00000000 00000000 7ff00000 00000000
+divd m eq d 7fdfffff ffffffff 00000000 00000000 7ff00000 00000000
+divd p eq d 7fdfffff ffffffff 00000000 00000000 7ff00000 00000000
+divd z eq d 7fdfffff ffffffff 00000000 00000000 7ff00000 00000000
+divd n eq d ffcfffff ffffffff 00000000 00000000 fff00000 00000000
+divd m eq d ffcfffff ffffffff 00000000 00000000 fff00000 00000000
+divd p eq d ffcfffff ffffffff 00000000 00000000 fff00000 00000000
+divd z eq d ffcfffff ffffffff 00000000 00000000 fff00000 00000000
+divd n eq d 7fcfffff ffffffff 80000000 00000000 fff00000 00000000
+divd m eq d 7fcfffff ffffffff 80000000 00000000 fff00000 00000000
+divd p eq d 7fcfffff ffffffff 80000000 00000000 fff00000 00000000
+divd z eq d 7fcfffff ffffffff 80000000 00000000 fff00000 00000000
+divd n eq d ffdfffff ffffffff 80000000 00000000 7ff00000 00000000
+divd m eq d ffdfffff ffffffff 80000000 00000000 7ff00000 00000000
+divd p eq d ffdfffff ffffffff 80000000 00000000 7ff00000 00000000
+divd z eq d ffdfffff ffffffff 80000000 00000000 7ff00000 00000000
+divd n eq - 00000000 00000000 00100000 00000000 00000000 00000000
+divd m eq - 00000000 00000000 00100000 00000000 00000000 00000000
+divd p eq - 00000000 00000000 00100000 00000000 00000000 00000000
+divd z eq - 00000000 00000000 00100000 00000000 00000000 00000000
+divd n eq - 80000000 00000000 00200000 00000000 80000000 00000000
+divd m eq - 80000000 00000000 00200000 00000000 80000000 00000000
+divd p eq - 80000000 00000000 00200000 00000000 80000000 00000000
+divd z eq - 80000000 00000000 00200000 00000000 80000000 00000000
+divd n eq - 00000000 00000000 80200000 00000000 80000000 00000000
+divd m eq - 00000000 00000000 80200000 00000000 80000000 00000000
+divd p eq - 00000000 00000000 80200000 00000000 80000000 00000000
+divd z eq - 00000000 00000000 80200000 00000000 80000000 00000000
+divd n eq - 80000000 00000000 80100000 00000000 00000000 00000000
+divd m eq - 80000000 00000000 80100000 00000000 00000000 00000000
+divd p eq - 80000000 00000000 80100000 00000000 00000000 00000000
+divd z eq - 80000000 00000000 80100000 00000000 00000000 00000000
+divd n eq - 00000000 00000000 001fffff ffffffff 00000000 00000000
+divd m eq - 00000000 00000000 001fffff ffffffff 00000000 00000000
+divd p eq - 00000000 00000000 001fffff ffffffff 00000000 00000000
+divd z eq - 00000000 00000000 001fffff ffffffff 00000000 00000000
+divd n eq - 80000000 00000000 00100000 00000001 80000000 00000000
+divd m eq - 80000000 00000000 00100000 00000001 80000000 00000000
+divd p eq - 80000000 00000000 00100000 00000001 80000000 00000000
+divd z eq - 80000000 00000000 00100000 00000001 80000000 00000000
+divd n eq - 00000000 00000000 80100000 00000001 80000000 00000000
+divd m eq - 00000000 00000000 80100000 00000001 80000000 00000000
+divd p eq - 00000000 00000000 80100000 00000001 80000000 00000000
+divd z eq - 00000000 00000000 80100000 00000001 80000000 00000000
+divd n eq - 80000000 00000000 801fffff ffffffff 00000000 00000000
+divd m eq - 80000000 00000000 801fffff ffffffff 00000000 00000000
+divd p eq - 80000000 00000000 801fffff ffffffff 00000000 00000000
+divd z eq - 80000000 00000000 801fffff ffffffff 00000000 00000000
+divd n eq d 00100000 00000000 00000000 00000000 7ff00000 00000000
+divd m eq d 00100000 00000000 00000000 00000000 7ff00000 00000000
+divd p eq d 00100000 00000000 00000000 00000000 7ff00000 00000000
+divd z eq d 00100000 00000000 00000000 00000000 7ff00000 00000000
+divd n eq d 80200000 00000000 00000000 00000000 fff00000 00000000
+divd m eq d 80200000 00000000 00000000 00000000 fff00000 00000000
+divd p eq d 80200000 00000000 00000000 00000000 fff00000 00000000
+divd z eq d 80200000 00000000 00000000 00000000 fff00000 00000000
+divd n eq d 00200000 00000000 80000000 00000000 fff00000 00000000
+divd m eq d 00200000 00000000 80000000 00000000 fff00000 00000000
+divd p eq d 00200000 00000000 80000000 00000000 fff00000 00000000
+divd z eq d 00200000 00000000 80000000 00000000 fff00000 00000000
+divd n eq d 80100000 00000000 80000000 00000000 7ff00000 00000000
+divd m eq d 80100000 00000000 80000000 00000000 7ff00000 00000000
+divd p eq d 80100000 00000000 80000000 00000000 7ff00000 00000000
+divd z eq d 80100000 00000000 80000000 00000000 7ff00000 00000000
+divd n eq d 001fffff ffffffff 00000000 00000000 7ff00000 00000000
+divd m eq d 001fffff ffffffff 00000000 00000000 7ff00000 00000000
+divd p eq d 001fffff ffffffff 00000000 00000000 7ff00000 00000000
+divd z eq d 001fffff ffffffff 00000000 00000000 7ff00000 00000000
+divd n eq d 80100000 00000001 00000000 00000000 fff00000 00000000
+divd m eq d 80100000 00000001 00000000 00000000 fff00000 00000000
+divd p eq d 80100000 00000001 00000000 00000000 fff00000 00000000
+divd z eq d 80100000 00000001 00000000 00000000 fff00000 00000000
+divd n eq d 00100000 00000001 80000000 00000000 fff00000 00000000
+divd m eq d 00100000 00000001 80000000 00000000 fff00000 00000000
+divd p eq d 00100000 00000001 80000000 00000000 fff00000 00000000
+divd z eq d 00100000 00000001 80000000 00000000 fff00000 00000000
+divd n eq d 801fffff ffffffff 80000000 00000000 7ff00000 00000000
+divd m eq d 801fffff ffffffff 80000000 00000000 7ff00000 00000000
+divd p eq d 801fffff ffffffff 80000000 00000000 7ff00000 00000000
+divd z eq d 801fffff ffffffff 80000000 00000000 7ff00000 00000000
+divd n eq - 00000000 00000000 00000000 00000001 00000000 00000000
+divd m eq - 00000000 00000000 00000000 00000001 00000000 00000000
+divd p eq - 00000000 00000000 00000000 00000001 00000000 00000000
+divd z eq - 00000000 00000000 00000000 00000001 00000000 00000000
+divd n eq - 80000000 00000000 00000000 00000003 80000000 00000000
+divd m eq - 80000000 00000000 00000000 00000003 80000000 00000000
+divd p eq - 80000000 00000000 00000000 00000003 80000000 00000000
+divd z eq - 80000000 00000000 00000000 00000003 80000000 00000000
+divd n eq - 00000000 00000000 80000000 00000002 80000000 00000000
+divd m eq - 00000000 00000000 80000000 00000002 80000000 00000000
+divd p eq - 00000000 00000000 80000000 00000002 80000000 00000000
+divd z eq - 00000000 00000000 80000000 00000002 80000000 00000000
+divd n eq - 80000000 00000000 80000000 00000004 00000000 00000000
+divd m eq - 80000000 00000000 80000000 00000004 00000000 00000000
+divd p eq - 80000000 00000000 80000000 00000004 00000000 00000000
+divd z eq - 80000000 00000000 80000000 00000004 00000000 00000000
+divd n eq - 00000000 00000000 000fffff ffffffff 00000000 00000000
+divd m eq - 00000000 00000000 000fffff ffffffff 00000000 00000000
+divd p eq - 00000000 00000000 000fffff ffffffff 00000000 00000000
+divd z eq - 00000000 00000000 000fffff ffffffff 00000000 00000000
+divd n eq - 80000000 00000000 000fffff ffffffff 80000000 00000000
+divd m eq - 80000000 00000000 000fffff ffffffff 80000000 00000000
+divd p eq - 80000000 00000000 000fffff ffffffff 80000000 00000000
+divd z eq - 80000000 00000000 000fffff ffffffff 80000000 00000000
+divd n eq - 00000000 00000000 800fffff ffffffff 80000000 00000000
+divd m eq - 00000000 00000000 800fffff ffffffff 80000000 00000000
+divd p eq - 00000000 00000000 800fffff ffffffff 80000000 00000000
+divd z eq - 00000000 00000000 800fffff ffffffff 80000000 00000000
+divd n eq - 80000000 00000000 800fffff ffffffff 00000000 00000000
+divd m eq - 80000000 00000000 800fffff ffffffff 00000000 00000000
+divd p eq - 80000000 00000000 800fffff ffffffff 00000000 00000000
+divd z eq - 80000000 00000000 800fffff ffffffff 00000000 00000000
+divd n eq d 00000000 00000001 00000000 00000000 7ff00000 00000000
+divd m eq d 00000000 00000001 00000000 00000000 7ff00000 00000000
+divd p eq d 00000000 00000001 00000000 00000000 7ff00000 00000000
+divd z eq d 00000000 00000001 00000000 00000000 7ff00000 00000000
+divd n eq d 80000000 00000003 00000000 00000000 fff00000 00000000
+divd m eq d 80000000 00000003 00000000 00000000 fff00000 00000000
+divd p eq d 80000000 00000003 00000000 00000000 fff00000 00000000
+divd z eq d 80000000 00000003 00000000 00000000 fff00000 00000000
+divd n eq d 00000000 00000002 80000000 00000000 fff00000 00000000
+divd m eq d 00000000 00000002 80000000 00000000 fff00000 00000000
+divd p eq d 00000000 00000002 80000000 00000000 fff00000 00000000
+divd z eq d 00000000 00000002 80000000 00000000 fff00000 00000000
+divd n eq d 80000000 00000004 80000000 00000000 7ff00000 00000000
+divd m eq d 80000000 00000004 80000000 00000000 7ff00000 00000000
+divd p eq d 80000000 00000004 80000000 00000000 7ff00000 00000000
+divd z eq d 80000000 00000004 80000000 00000000 7ff00000 00000000
+divd n eq d 000fffff ffffffff 00000000 00000000 7ff00000 00000000
+divd m eq d 000fffff ffffffff 00000000 00000000 7ff00000 00000000
+divd p eq d 000fffff ffffffff 00000000 00000000 7ff00000 00000000
+divd z eq d 000fffff ffffffff 00000000 00000000 7ff00000 00000000
+divd n eq d 800fffff ffffffff 00000000 00000000 fff00000 00000000
+divd m eq d 800fffff ffffffff 00000000 00000000 fff00000 00000000
+divd p eq d 800fffff ffffffff 00000000 00000000 fff00000 00000000
+divd z eq d 800fffff ffffffff 00000000 00000000 fff00000 00000000
+divd n eq d 000fffff ffffffff 80000000 00000000 fff00000 00000000
+divd m eq d 000fffff ffffffff 80000000 00000000 fff00000 00000000
+divd p eq d 000fffff ffffffff 80000000 00000000 fff00000 00000000
+divd z eq d 000fffff ffffffff 80000000 00000000 fff00000 00000000
+divd n eq d 800fffff ffffffff 80000000 00000000 7ff00000 00000000
+divd m eq d 800fffff ffffffff 80000000 00000000 7ff00000 00000000
+divd p eq d 800fffff ffffffff 80000000 00000000 7ff00000 00000000
+divd z eq d 800fffff ffffffff 80000000 00000000 7ff00000 00000000
+divd n eq - 7fe00000 00000000 40000000 00000000 7fd00000 00000000
+divd m eq - 7fe00000 00000000 40000000 00000000 7fd00000 00000000
+divd p eq - 7fe00000 00000000 40000000 00000000 7fd00000 00000000
+divd z eq - 7fe00000 00000000 40000000 00000000 7fd00000 00000000
+divd n eq - 7fe00000 00000000 c0000000 00000000 ffd00000 00000000
+divd m eq - 7fe00000 00000000 c0000000 00000000 ffd00000 00000000
+divd p eq - 7fe00000 00000000 c0000000 00000000 ffd00000 00000000
+divd z eq - 7fe00000 00000000 c0000000 00000000 ffd00000 00000000
+divd n eq - ffdfffff ffffffff 40000000 00000000 ffcfffff ffffffff
+divd m eq - ffdfffff ffffffff 40000000 00000000 ffcfffff ffffffff
+divd p eq - ffdfffff ffffffff 40000000 00000000 ffcfffff ffffffff
+divd z eq - ffdfffff ffffffff 40000000 00000000 ffcfffff ffffffff
+divd n eq - 7fdfffff fffffffd c0000000 00000000 ffcfffff fffffffd
+divd m eq - 7fdfffff fffffffd c0000000 00000000 ffcfffff fffffffd
+divd p eq - 7fdfffff fffffffd c0000000 00000000 ffcfffff fffffffd
+divd z eq - 7fdfffff fffffffd c0000000 00000000 ffcfffff fffffffd
+divd n eq - 7fefffff ffffffff 7fdfffff ffffffff 40000000 00000000
+divd m eq - 7fefffff ffffffff 7fdfffff ffffffff 40000000 00000000
+divd p eq - 7fefffff ffffffff 7fdfffff ffffffff 40000000 00000000
+divd z eq - 7fefffff ffffffff 7fdfffff ffffffff 40000000 00000000
+divd n eq - 7fefffff ffffffff c0000000 00000000 ffdfffff ffffffff
+divd m eq - 7fefffff ffffffff c0000000 00000000 ffdfffff ffffffff
+divd p eq - 7fefffff ffffffff c0000000 00000000 ffdfffff ffffffff
+divd z eq - 7fefffff ffffffff c0000000 00000000 ffdfffff ffffffff
+divd n eq - ffe00000 00000001 7fd00000 00000001 c0000000 00000000
+divd m eq - ffe00000 00000001 7fd00000 00000001 c0000000 00000000
+divd p eq - ffe00000 00000001 7fd00000 00000001 c0000000 00000000
+divd z eq - ffe00000 00000001 7fd00000 00000001 c0000000 00000000
+divd n eq - 7fe00000 00000003 ffd00000 00000003 c0000000 00000000
+divd m eq - 7fe00000 00000003 ffd00000 00000003 c0000000 00000000
+divd p eq - 7fe00000 00000003 ffd00000 00000003 c0000000 00000000
+divd z eq - 7fe00000 00000003 ffd00000 00000003 c0000000 00000000
+divd n eq - 7fefffff ffffffff 7fcfffff ffffffff 40100000 00000000
+divd m eq - 7fefffff ffffffff 7fcfffff ffffffff 40100000 00000000
+divd p eq - 7fefffff ffffffff 7fcfffff ffffffff 40100000 00000000
+divd z eq - 7fefffff ffffffff 7fcfffff ffffffff 40100000 00000000
+divd n eq - ffefffff ffffffff 7fcfffff ffffffff c0100000 00000000
+divd m eq - ffefffff ffffffff 7fcfffff ffffffff c0100000 00000000
+divd p eq - ffefffff ffffffff 7fcfffff ffffffff c0100000 00000000
+divd z eq - ffefffff ffffffff 7fcfffff ffffffff c0100000 00000000
+divd n eq - 7fefffff ffffffff ffcfffff ffffffff c0100000 00000000
+divd m eq - 7fefffff ffffffff ffcfffff ffffffff c0100000 00000000
+divd p eq - 7fefffff ffffffff ffcfffff ffffffff c0100000 00000000
+divd z eq - 7fefffff ffffffff ffcfffff ffffffff c0100000 00000000
+divd n eq - ffefffff ffffffff ffcfffff ffffffff 40100000 00000000
+divd m eq - ffefffff ffffffff ffcfffff ffffffff 40100000 00000000
+divd p eq - ffefffff ffffffff ffcfffff ffffffff 40100000 00000000
+divd z eq - ffefffff ffffffff ffcfffff ffffffff 40100000 00000000
+divd n eq - 7fefffff fffffffd 40100000 00000000 7fcfffff fffffffd
+divd m eq - 7fefffff fffffffd 40100000 00000000 7fcfffff fffffffd
+divd p eq - 7fefffff fffffffd 40100000 00000000 7fcfffff fffffffd
+divd z eq - 7fefffff fffffffd 40100000 00000000 7fcfffff fffffffd
+divd n eq - 7fefffff fffffffd c0100000 00000000 ffcfffff fffffffd
+divd m eq - 7fefffff fffffffd c0100000 00000000 ffcfffff fffffffd
+divd p eq - 7fefffff fffffffd c0100000 00000000 ffcfffff fffffffd
+divd z eq - 7fefffff fffffffd c0100000 00000000 ffcfffff fffffffd
+divd n eq - ffefffff fffffffd 40100000 00000000 ffcfffff fffffffd
+divd m eq - ffefffff fffffffd 40100000 00000000 ffcfffff fffffffd
+divd p eq - ffefffff fffffffd 40100000 00000000 ffcfffff fffffffd
+divd z eq - ffefffff fffffffd 40100000 00000000 ffcfffff fffffffd
+divd n eq - ffefffff fffffffd c0100000 00000000 7fcfffff fffffffd
+divd m eq - ffefffff fffffffd c0100000 00000000 7fcfffff fffffffd
+divd p eq - ffefffff fffffffd c0100000 00000000 7fcfffff fffffffd
+divd z eq - ffefffff fffffffd c0100000 00000000 7fcfffff fffffffd
+divd n eq - 00200000 00000000 00100000 00000000 40000000 00000000
+divd m eq - 00200000 00000000 00100000 00000000 40000000 00000000
+divd p eq - 00200000 00000000 00100000 00000000 40000000 00000000
+divd z eq - 00200000 00000000 00100000 00000000 40000000 00000000
+divd n eq - 00200000 00000000 c0000000 00000000 80100000 00000000
+divd m eq - 00200000 00000000 c0000000 00000000 80100000 00000000
+divd p eq - 00200000 00000000 c0000000 00000000 80100000 00000000
+divd z eq - 00200000 00000000 c0000000 00000000 80100000 00000000
+divd n eq - 80200000 00000001 00100000 00000001 c0000000 00000000
+divd m eq - 80200000 00000001 00100000 00000001 c0000000 00000000
+divd p eq - 80200000 00000001 00100000 00000001 c0000000 00000000
+divd z eq - 80200000 00000001 00100000 00000001 c0000000 00000000
+divd n eq - 00200000 00000003 c0000000 00000000 80100000 00000003
+divd m eq - 00200000 00000003 c0000000 00000000 80100000 00000003
+divd p eq - 00200000 00000003 c0000000 00000000 80100000 00000003
+divd z eq - 00200000 00000003 c0000000 00000000 80100000 00000003
+divd n eq - 00200000 00000001 00100000 00000001 40000000 00000000
+divd m eq - 00200000 00000001 00100000 00000001 40000000 00000000
+divd p eq - 00200000 00000001 00100000 00000001 40000000 00000000
+divd z eq - 00200000 00000001 00100000 00000001 40000000 00000000
+divd n eq - 00200000 00000001 c0000000 00000000 80100000 00000001
+divd m eq - 00200000 00000001 c0000000 00000000 80100000 00000001
+divd p eq - 00200000 00000001 c0000000 00000000 80100000 00000001
+divd z eq - 00200000 00000001 c0000000 00000000 80100000 00000001
+divd n eq - 80200000 00000005 00100000 00000005 c0000000 00000000
+divd m eq - 80200000 00000005 00100000 00000005 c0000000 00000000
+divd p eq - 80200000 00000005 00100000 00000005 c0000000 00000000
+divd z eq - 80200000 00000005 00100000 00000005 c0000000 00000000
+divd n eq - 00200000 00000003 80100000 00000003 c0000000 00000000
+divd m eq - 00200000 00000003 80100000 00000003 c0000000 00000000
+divd p eq - 00200000 00000003 80100000 00000003 c0000000 00000000
+divd z eq - 00200000 00000003 80100000 00000003 c0000000 00000000
+divd n eq - 000fffff ffffffff 3fe00000 00000000 001fffff fffffffe
+divd m eq - 000fffff ffffffff 3fe00000 00000000 001fffff fffffffe
+divd p eq - 000fffff ffffffff 3fe00000 00000000 001fffff fffffffe
+divd z eq - 000fffff ffffffff 3fe00000 00000000 001fffff fffffffe
+divd n eq - 000fffff ffffffff 3f600000 00000000 009fffff fffffffe
+divd m eq - 000fffff ffffffff 3f600000 00000000 009fffff fffffffe
+divd p eq - 000fffff ffffffff 3f600000 00000000 009fffff fffffffe
+divd z eq - 000fffff ffffffff 3f600000 00000000 009fffff fffffffe
+divd n eq xo 7fe00000 00000000 3fe00000 00000000 7ff00000 00000000
+divd p eq xo 7fe00000 00000000 3fe00000 00000000 7ff00000 00000000
+divd z eq xo 7fe00000 00000000 3fe00000 00000000 7fefffff ffffffff
+divd m eq xo 7fe00000 00000000 3fe00000 00000000 7fefffff ffffffff
+divd n eq xo ffe00000 00000000 bfe00000 00000000 7ff00000 00000000
+divd p eq xo ffe00000 00000000 bfe00000 00000000 7ff00000 00000000
+divd z eq xo ffe00000 00000000 bfe00000 00000000 7fefffff ffffffff
+divd m eq xo ffe00000 00000000 bfe00000 00000000 7fefffff ffffffff
+divd n eq xo 7fe00000 00000000 bfe00000 00000000 fff00000 00000000
+divd m eq xo 7fe00000 00000000 bfe00000 00000000 fff00000 00000000
+divd n eq xo ffe00000 00000000 3fe00000 00000000 fff00000 00000000
+divd m eq xo ffe00000 00000000 3fe00000 00000000 fff00000 00000000
+divd z eq xo 7fe00000 00000000 bfe00000 00000000 ffefffff ffffffff
+divd p eq xo 7fe00000 00000000 bfe00000 00000000 ffefffff ffffffff
+divd z eq xo ffe00000 00000000 3fe00000 00000000 ffefffff ffffffff
+divd p eq xo ffe00000 00000000 3fe00000 00000000 ffefffff ffffffff
+divd n eq xo 7f600000 00000000 00a00000 00000000 7ff00000 00000000
+divd p eq xo 7f600000 00000000 00a00000 00000000 7ff00000 00000000
+divd z eq xo 7f600000 00000000 00a00000 00000000 7fefffff ffffffff
+divd m eq xo 7f600000 00000000 00a00000 00000000 7fefffff ffffffff
+divd n eq xo 7fefffff ffffffff 00000000 00000001 7ff00000 00000000
+divd p eq xo 7fefffff ffffffff 00000000 00000001 7ff00000 00000000
+divd z eq xo 7fefffff ffffffff 00000000 00000001 7fefffff ffffffff
+divd m eq xo 7fefffff ffffffff 00000000 00000001 7fefffff ffffffff
+divd n eq xo 7fe00000 00000000 000fffff ffffffff 7ff00000 00000000
+divd p eq xo 7fe00000 00000000 000fffff ffffffff 7ff00000 00000000
+divd z eq xo 7fe00000 00000000 000fffff ffffffff 7fefffff ffffffff
+divd m eq xo 7fe00000 00000000 000fffff ffffffff 7fefffff ffffffff
+divd n eq xo 7fefffff ffffffff 3fefffff ffffffff 7ff00000 00000000
+divd p eq xo 7fefffff ffffffff 3fefffff ffffffff 7ff00000 00000000
+divd z eq xo 7fefffff ffffffff 3fefffff ffffffff 7fefffff ffffffff
+divd m eq xo 7fefffff ffffffff 3fefffff ffffffff 7fefffff ffffffff
+divd n eq xu 00100000 00000000 3ff00000 00000001 000fffff ffffffff
+divd z eq xu 00100000 00000000 3ff00000 00000001 000fffff ffffffff
+divd m eq xu 00100000 00000000 3ff00000 00000001 000fffff ffffffff
+divd n eq xu 80100000 00000000 3ff00000 00000001 800fffff ffffffff
+divd z eq xu 80100000 00000000 3ff00000 00000001 800fffff ffffffff
+divd p eq xu 80100000 00000000 3ff00000 00000001 800fffff ffffffff
+divd p eq xu 000fffff fffffffe 3fefffff fffffffe 000fffff ffffffff
+divd n eq xu 000fffff fffffffe 3fefffff fffffffe 000fffff ffffffff
+divd p eq xu 000fffff fffffff7 3fefffff fffffffe 000fffff fffffff8
+divd n eq xu 000fffff fffffff7 3fefffff fffffffe 000fffff fffffff8
+divd m eq xu 800fffff fffffff8 3fefffff fffffffe 800fffff fffffff9
+divd n eq xu 800fffff fffffff8 3fefffff fffffffe 800fffff fffffff9
+divd m eq xu 00100000 00000001 3ff00000 00000002 000fffff ffffffff
+divd n eq xu 00100000 00000001 3ff00000 00000002 000fffff ffffffff
+divd z eq xu 00100000 00000001 3ff00000 00000002 000fffff ffffffff
+divd m eq xu 000fffff ffffffff 3ff00000 00000002 000fffff fffffffd
+divd n eq xu 000fffff ffffffff 3ff00000 00000002 000fffff fffffffd
+divd z eq xu 000fffff ffffffff 3ff00000 00000002 000fffff fffffffd
+divd m eq xu 00100000 00000002 3ff00000 00000006 000fffff fffffffc
+divd n eq xu 00100000 00000002 3ff00000 00000006 000fffff fffffffc
+divd z eq xu 00100000 00000002 3ff00000 00000006 000fffff fffffffc
+divd z eq xu 000fffff ffffffff 3ff00000 00000001 000fffff fffffffe
+divd m eq xu 000fffff ffffffff 3ff00000 00000001 000fffff fffffffe
+divd n eq xu 00000000 00000001 7fefffff ffffffff 00000000 00000000
+divd m eq xu 00000000 00000001 7fefffff ffffffff 00000000 00000000
+divd z eq xu 00000000 00000001 7fefffff ffffffff 00000000 00000000
+divd p eq xu 00000000 00000001 7fefffff ffffffff 00000000 00000001
+divd n eq xu 80000000 00000001 ffefffff ffffffff 00000000 00000000
+divd m eq xu 80000000 00000001 ffefffff ffffffff 00000000 00000000
+divd z eq xu 80000000 00000001 ffefffff ffffffff 00000000 00000000
+divd p eq xu 80000000 00000001 ffefffff ffffffff 00000000 00000001
+divd n eq xu 00000000 00000001 ffefffff ffffffff 80000000 00000000
+divd z eq xu 00000000 00000001 ffefffff ffffffff 80000000 00000000
+divd p eq xu 00000000 00000001 ffefffff ffffffff 80000000 00000000
+divd m eq xu 00000000 00000001 ffefffff ffffffff 80000000 00000001
+divd n eq xu 80000000 00000001 7fefffff ffffffff 80000000 00000000
+divd z eq xu 80000000 00000001 7fefffff ffffffff 80000000 00000000
+divd p eq xu 80000000 00000001 7fefffff ffffffff 80000000 00000000
+divd m eq xu 80000000 00000001 7fefffff ffffffff 80000000 00000001
+divd p eq xu 00000000 00000001 40000000 00000000 00000000 00000001
+divd n eq xu 00000000 00000001 40000000 00000000 00000000 00000000
+divd z eq xu 00000000 00000001 40000000 00000000 00000000 00000000
+divd m eq xu 00000000 00000001 40000000 00000000 00000000 00000000
+divd p eq xu 80000000 00000001 c0000000 00000000 00000000 00000001
+divd n eq xu 80000000 00000001 c0000000 00000000 00000000 00000000
+divd z eq xu 80000000 00000001 c0000000 00000000 00000000 00000000
+divd m eq xu 80000000 00000001 c0000000 00000000 00000000 00000000
+divd m eq xu 00000000 00000001 c0000000 00000000 80000000 00000001
+divd n eq xu 00000000 00000001 c0000000 00000000 80000000 00000000
+divd z eq xu 00000000 00000001 c0000000 00000000 80000000 00000000
+divd p eq xu 00000000 00000001 c0000000 00000000 80000000 00000000
+divd m eq xu 80000000 00000001 40000000 00000000 80000000 00000001
+divd n eq xu 80000000 00000001 40000000 00000000 80000000 00000000
+divd z eq xu 80000000 00000001 40000000 00000000 80000000 00000000
+divd p eq xu 80000000 00000001 40000000 00000000 80000000 00000000
+divd z eq xu 001fffff ffffffff 40000000 00000000 000fffff ffffffff
+divd m eq xu 001fffff ffffffff 40000000 00000000 000fffff ffffffff
+divd z eq xu 001fffff ffffffff c0000000 00000000 800fffff ffffffff
+divd p eq xu 001fffff ffffffff c0000000 00000000 800fffff ffffffff
+divd p eq xu 001fffff ffffffff 40000000 00000000 00100000 00000000
+divd n eq xu 001fffff ffffffff 40000000 00000000 00100000 00000000
+divd p eq xu 00100000 00000000 3ff00000 00000001 00100000 00000000
+divd m eq xu 80100000 00000000 3ff00000 00000001 80100000 00000000
+divd p eq xu 00100000 00000001 3ff00000 00000002 00100000 00000000
+divd p eq xu 000fffff ffffffff 3ff00000 00000002 000fffff fffffffe
+divd n eq - 001fffff fffffffe 40000000 00000000 000fffff ffffffff
+divd m eq - 001fffff fffffffe 40000000 00000000 000fffff ffffffff
+divd p eq - 001fffff fffffffe 40000000 00000000 000fffff ffffffff
+divd z eq - 001fffff fffffffe 40000000 00000000 000fffff ffffffff
+divd n eq - 000fffff ffffffff 3ff00000 00000000 000fffff ffffffff
+divd m eq - 000fffff ffffffff 3ff00000 00000000 000fffff ffffffff
+divd p eq - 000fffff ffffffff 3ff00000 00000000 000fffff ffffffff
+divd z eq - 000fffff ffffffff 3ff00000 00000000 000fffff ffffffff
+divd n eq - 00000000 00000001 3fe00000 00000000 00000000 00000002
+divd m eq - 00000000 00000001 3fe00000 00000000 00000000 00000002
+divd p eq - 00000000 00000001 3fe00000 00000000 00000000 00000002
+divd z eq - 00000000 00000001 3fe00000 00000000 00000000 00000002
+divd n eq - 00000000 00000001 3fc00000 00000000 00000000 00000008
+divd m eq - 00000000 00000001 3fc00000 00000000 00000000 00000008
+divd p eq - 00000000 00000001 3fc00000 00000000 00000000 00000008
+divd z eq - 00000000 00000001 3fc00000 00000000 00000000 00000008
+divd n eq - 00000000 00000009 40220000 00000000 00000000 00000001
+divd m eq - 00000000 00000009 40220000 00000000 00000000 00000001
+divd p eq - 00000000 00000009 40220000 00000000 00000000 00000001
+divd z eq - 00000000 00000009 40220000 00000000 00000000 00000001
+divd n eq - 00000000 00000009 c0220000 00000000 80000000 00000001
+divd m eq - 00000000 00000009 c0220000 00000000 80000000 00000001
+divd p eq - 00000000 00000009 c0220000 00000000 80000000 00000001
+divd z eq - 00000000 00000009 c0220000 00000000 80000000 00000001
+divd n eq - 000fffff ffffffff bff00000 00000000 800fffff ffffffff
+divd m eq - 000fffff ffffffff bff00000 00000000 800fffff ffffffff
+divd p eq - 000fffff ffffffff bff00000 00000000 800fffff ffffffff
+divd z eq - 000fffff ffffffff bff00000 00000000 800fffff ffffffff
+divd n eq - 80000000 00000001 3fe00000 00000000 80000000 00000002
+divd m eq - 80000000 00000001 3fe00000 00000000 80000000 00000002
+divd p eq - 80000000 00000001 3fe00000 00000000 80000000 00000002
+divd z eq - 80000000 00000001 3fe00000 00000000 80000000 00000002
+divd n eq x 3ff00000 00000000 3ff00000 00000001 3fefffff fffffffe
+divd z eq x 3ff00000 00000000 3ff00000 00000001 3fefffff fffffffe
+divd m eq x 3ff00000 00000000 3ff00000 00000001 3fefffff fffffffe
+divd p eq x 3ff00000 00000000 3ff00000 00000001 3fefffff ffffffff
+divd n eq x 3ff00000 00000000 3ff00000 00000002 3fefffff fffffffc
+divd z eq x 3ff00000 00000000 3ff00000 00000002 3fefffff fffffffc
+divd m eq x 3ff00000 00000000 3ff00000 00000002 3fefffff fffffffc
+divd p eq x 3ff00000 00000000 3ff00000 00000002 3fefffff fffffffd
+divd n eq x 3ff00000 00000000 3ff00000 00000003 3fefffff fffffffa
+divd z eq x 3ff00000 00000000 3ff00000 00000003 3fefffff fffffffa
+divd m eq x 3ff00000 00000000 3ff00000 00000003 3fefffff fffffffa
+divd p eq x 3ff00000 00000000 3ff00000 00000003 3fefffff fffffffb
+divd n eq x 3ff00000 00000000 3ff00000 00000004 3fefffff fffffff8
+divd z eq x 3ff00000 00000000 3ff00000 00000004 3fefffff fffffff8
+divd m eq x 3ff00000 00000000 3ff00000 00000004 3fefffff fffffff8
+divd p eq x 3ff00000 00000000 3ff00000 00000004 3fefffff fffffff9
+divd n eq x 3ff00000 00000000 3fefffff ffffffff 3ff00000 00000001
+divd z eq x 3ff00000 00000000 3fefffff ffffffff 3ff00000 00000000
+divd m eq x 3ff00000 00000000 3fefffff ffffffff 3ff00000 00000000
+divd p eq x 3ff00000 00000000 3fefffff ffffffff 3ff00000 00000001
+divd n eq x 3ff00000 00000000 3fefffff fffffffe 3ff00000 00000001
+divd z eq x 3ff00000 00000000 3fefffff fffffffe 3ff00000 00000001
+divd m eq x 3ff00000 00000000 3fefffff fffffffe 3ff00000 00000001
+divd p eq x 3ff00000 00000000 3fefffff fffffffe 3ff00000 00000002
+divd n eq x 3ff00000 00000000 3fefffff fffffffd 3ff00000 00000002
+divd z eq x 3ff00000 00000000 3fefffff fffffffd 3ff00000 00000001
+divd m eq x 3ff00000 00000000 3fefffff fffffffd 3ff00000 00000001
+divd p eq x 3ff00000 00000000 3fefffff fffffffd 3ff00000 00000002
+divd n eq x 3ff00000 00000000 3fefffff fffffffc 3ff00000 00000002
+divd z eq x 3ff00000 00000000 3fefffff fffffffc 3ff00000 00000002
+divd m eq x 3ff00000 00000000 3fefffff fffffffc 3ff00000 00000002
+divd p eq x 3ff00000 00000000 3fefffff fffffffc 3ff00000 00000003
+divd n eq x 3ff00000 00000000 3fefffff fffffffb 3ff00000 00000003
+divd z eq x 3ff00000 00000000 3fefffff fffffffb 3ff00000 00000002
+divd m eq x 3ff00000 00000000 3fefffff fffffffb 3ff00000 00000002
+divd p eq x 3ff00000 00000000 3fefffff fffffffb 3ff00000 00000003
+divd n eq x 3ff00000 00000000 3fefffff fffffff8 3ff00000 00000004
+divd z eq x 3ff00000 00000000 3fefffff fffffff8 3ff00000 00000004
+divd m eq x 3ff00000 00000000 3fefffff fffffff8 3ff00000 00000004
+divd p eq x 3ff00000 00000000 3fefffff fffffff8 3ff00000 00000005
+divd n eq x 3ff00000 00000000 3fefffff fffffff7 3ff00000 00000005
+divd z eq x 3ff00000 00000000 3fefffff fffffff7 3ff00000 00000004
+divd m eq x 3ff00000 00000000 3fefffff fffffff7 3ff00000 00000004
+divd p eq x 3ff00000 00000000 3fefffff fffffff7 3ff00000 00000005
+divd n eq x 3ff00000 00000002 3ff00000 00000001 3ff00000 00000001
+divd z eq x 3ff00000 00000002 3ff00000 00000001 3ff00000 00000000
+divd m eq x 3ff00000 00000002 3ff00000 00000001 3ff00000 00000000
+divd p eq x 3ff00000 00000002 3ff00000 00000001 3ff00000 00000001
+divd n eq x 3ff00000 00000003 3ff00000 00000001 3ff00000 00000002
+divd z eq x 3ff00000 00000003 3ff00000 00000001 3ff00000 00000001
+divd m eq x 3ff00000 00000003 3ff00000 00000001 3ff00000 00000001
+divd p eq x 3ff00000 00000003 3ff00000 00000001 3ff00000 00000002
+divd n eq x 3ff00000 00000004 3ff00000 00000001 3ff00000 00000003
+divd z eq x 3ff00000 00000004 3ff00000 00000001 3ff00000 00000002
+divd m eq x 3ff00000 00000004 3ff00000 00000001 3ff00000 00000002
+divd p eq x 3ff00000 00000004 3ff00000 00000001 3ff00000 00000003
+divd n eq x 3ff00000 00000007 3ff00000 00000002 3ff00000 00000005
+divd z eq x 3ff00000 00000007 3ff00000 00000002 3ff00000 00000004
+divd m eq x 3ff00000 00000007 3ff00000 00000002 3ff00000 00000004
+divd p eq x 3ff00000 00000007 3ff00000 00000002 3ff00000 00000005
+divd n eq x 3ff00000 00000009 3ff00000 00000008 3ff00000 00000001
+divd z eq x 3ff00000 00000009 3ff00000 00000008 3ff00000 00000000
+divd m eq x 3ff00000 00000009 3ff00000 00000008 3ff00000 00000000
+divd p eq x 3ff00000 00000009 3ff00000 00000008 3ff00000 00000001
+divd n eq x 3ff00000 00000001 3ff00000 00000002 3fefffff fffffffe
+divd z eq x 3ff00000 00000001 3ff00000 00000002 3fefffff fffffffe
+divd m eq x 3ff00000 00000001 3ff00000 00000002 3fefffff fffffffe
+divd p eq x 3ff00000 00000001 3ff00000 00000002 3fefffff ffffffff
+divd n eq x 3ff00000 00000001 3ff00000 00000003 3fefffff fffffffc
+divd z eq x 3ff00000 00000001 3ff00000 00000003 3fefffff fffffffc
+divd m eq x 3ff00000 00000001 3ff00000 00000003 3fefffff fffffffc
+divd p eq x 3ff00000 00000001 3ff00000 00000003 3fefffff fffffffd
+divd n eq x 3ff00000 00000002 3ff00000 00000003 3fefffff fffffffe
+divd z eq x 3ff00000 00000002 3ff00000 00000003 3fefffff fffffffe
+divd m eq x 3ff00000 00000002 3ff00000 00000003 3fefffff fffffffe
+divd p eq x 3ff00000 00000002 3ff00000 00000003 3fefffff ffffffff
+divd n eq x 3ff00000 00000004 3ff00000 00000007 3fefffff fffffffa
+divd z eq x 3ff00000 00000004 3ff00000 00000007 3fefffff fffffffa
+divd m eq x 3ff00000 00000004 3ff00000 00000007 3fefffff fffffffa
+divd p eq x 3ff00000 00000004 3ff00000 00000007 3fefffff fffffffb
+divd n eq x 3ff00000 00000006 3ff00000 00000008 3fefffff fffffffc
+divd z eq x 3ff00000 00000006 3ff00000 00000008 3fefffff fffffffc
+divd m eq x 3ff00000 00000006 3ff00000 00000008 3fefffff fffffffc
+divd p eq x 3ff00000 00000006 3ff00000 00000008 3fefffff fffffffd
+divd n eq x 3fefffff fffffffe 3fefffff ffffffff 3fefffff ffffffff
+divd z eq x 3fefffff fffffffe 3fefffff ffffffff 3fefffff fffffffe
+divd m eq x 3fefffff fffffffe 3fefffff ffffffff 3fefffff fffffffe
+divd p eq x 3fefffff fffffffe 3fefffff ffffffff 3fefffff ffffffff
+divd n eq x 3fefffff fffffffd 3fefffff ffffffff 3fefffff fffffffe
+divd z eq x 3fefffff fffffffd 3fefffff ffffffff 3fefffff fffffffd
+divd m eq x 3fefffff fffffffd 3fefffff ffffffff 3fefffff fffffffd
+divd p eq x 3fefffff fffffffd 3fefffff ffffffff 3fefffff fffffffe
+divd n eq x 3fefffff fffffffd 3fefffff fffffffe 3fefffff ffffffff
+divd z eq x 3fefffff fffffffd 3fefffff fffffffe 3fefffff fffffffe
+divd m eq x 3fefffff fffffffd 3fefffff fffffffe 3fefffff fffffffe
+divd p eq x 3fefffff fffffffd 3fefffff fffffffe 3fefffff ffffffff
+divd n eq x 3fefffff fffffffc 3fefffff ffffffff 3fefffff fffffffd
+divd z eq x 3fefffff fffffffc 3fefffff ffffffff 3fefffff fffffffc
+divd m eq x 3fefffff fffffffc 3fefffff ffffffff 3fefffff fffffffc
+divd p eq x 3fefffff fffffffc 3fefffff ffffffff 3fefffff fffffffd
+divd n eq x 3fefffff fffffffc 3fefffff fffffffe 3fefffff fffffffe
+divd z eq x 3fefffff fffffffc 3fefffff fffffffe 3fefffff fffffffd
+divd m eq x 3fefffff fffffffc 3fefffff fffffffe 3fefffff fffffffd
+divd p eq x 3fefffff fffffffc 3fefffff fffffffe 3fefffff fffffffe
+divd n eq x 3fefffff fffffffc 3fefffff fffffffd 3fefffff ffffffff
+divd z eq x 3fefffff fffffffc 3fefffff fffffffd 3fefffff fffffffe
+divd m eq x 3fefffff fffffffc 3fefffff fffffffd 3fefffff fffffffe
+divd p eq x 3fefffff fffffffc 3fefffff fffffffd 3fefffff ffffffff
+divd n eq x 3fefffff fffffff8 3fefffff fffffffd 3fefffff fffffffb
+divd z eq x 3fefffff fffffff8 3fefffff fffffffd 3fefffff fffffffa
+divd m eq x 3fefffff fffffff8 3fefffff fffffffd 3fefffff fffffffa
+divd p eq x 3fefffff fffffff8 3fefffff fffffffd 3fefffff fffffffb
+divd n eq x 3fefffff fffffff7 3fefffff fffffffe 3fefffff fffffff9
+divd z eq x 3fefffff fffffff7 3fefffff fffffffe 3fefffff fffffff8
+divd m eq x 3fefffff fffffff7 3fefffff fffffffe 3fefffff fffffff8
+divd p eq x 3fefffff fffffff7 3fefffff fffffffe 3fefffff fffffff9
+divd n eq x 3fefffff fffffff8 3fefffff fffffffc 3fefffff fffffffc
+divd z eq x 3fefffff fffffff8 3fefffff fffffffc 3fefffff fffffffb
+divd m eq x 3fefffff fffffff8 3fefffff fffffffc 3fefffff fffffffb
+divd p eq x 3fefffff fffffff8 3fefffff fffffffc 3fefffff fffffffc
+divd n eq x 3fefffff fffffff7 3fefffff fffffffb 3fefffff fffffffc
+divd z eq x 3fefffff fffffff7 3fefffff fffffffb 3fefffff fffffffb
+divd m eq x 3fefffff fffffff7 3fefffff fffffffb 3fefffff fffffffb
+divd p eq x 3fefffff fffffff7 3fefffff fffffffb 3fefffff fffffffc
+divd n eq x 3fefffff ffffffff 3fefffff fffffffe 3ff00000 00000001
+divd z eq x 3fefffff ffffffff 3fefffff fffffffe 3ff00000 00000000
+divd m eq x 3fefffff ffffffff 3fefffff fffffffe 3ff00000 00000000
+divd p eq x 3fefffff ffffffff 3fefffff fffffffe 3ff00000 00000001
+divd n eq x 3fefffff ffffffff 3fefffff fffffffd 3ff00000 00000001
+divd z eq x 3fefffff ffffffff 3fefffff fffffffd 3ff00000 00000001
+divd m eq x 3fefffff ffffffff 3fefffff fffffffd 3ff00000 00000001
+divd p eq x 3fefffff ffffffff 3fefffff fffffffd 3ff00000 00000002
+divd n eq x 3fefffff fffffffe 3fefffff fffffffd 3ff00000 00000001
+divd z eq x 3fefffff fffffffe 3fefffff fffffffd 3ff00000 00000000
+divd m eq x 3fefffff fffffffe 3fefffff fffffffd 3ff00000 00000000
+divd p eq x 3fefffff fffffffe 3fefffff fffffffd 3ff00000 00000001
+divd n eq x 3fefffff ffffffff 3fefffff fffffffc 3ff00000 00000002
+divd z eq x 3fefffff ffffffff 3fefffff fffffffc 3ff00000 00000001
+divd m eq x 3fefffff ffffffff 3fefffff fffffffc 3ff00000 00000001
+divd p eq x 3fefffff ffffffff 3fefffff fffffffc 3ff00000 00000002
+divd n eq x 3fefffff fffffffe 3fefffff fffffffc 3ff00000 00000001
+divd z eq x 3fefffff fffffffe 3fefffff fffffffc 3ff00000 00000001
+divd m eq x 3fefffff fffffffe 3fefffff fffffffc 3ff00000 00000001
+divd p eq x 3fefffff fffffffe 3fefffff fffffffc 3ff00000 00000002
+divd n eq x 3fefffff fffffffd 3fefffff fffffffc 3ff00000 00000001
+divd z eq x 3fefffff fffffffd 3fefffff fffffffc 3ff00000 00000000
+divd m eq x 3fefffff fffffffd 3fefffff fffffffc 3ff00000 00000000
+divd p eq x 3fefffff fffffffd 3fefffff fffffffc 3ff00000 00000001
+divd n eq x 3fefffff ffffffff 3fefffff fffffff9 3ff00000 00000003
+divd z eq x 3fefffff ffffffff 3fefffff fffffff9 3ff00000 00000003
+divd m eq x 3fefffff ffffffff 3fefffff fffffff9 3ff00000 00000003
+divd p eq x 3fefffff ffffffff 3fefffff fffffff9 3ff00000 00000004
+divd n eq x 3fefffff fffffffe 3fefffff fffffff9 3ff00000 00000003
+divd z eq x 3fefffff fffffffe 3fefffff fffffff9 3ff00000 00000002
+divd m eq x 3fefffff fffffffe 3fefffff fffffff9 3ff00000 00000002
+divd p eq x 3fefffff fffffffe 3fefffff fffffff9 3ff00000 00000003
+divd n eq x 3fefffff fffffffd 3fefffff fffffff9 3ff00000 00000002
+divd z eq x 3fefffff fffffffd 3fefffff fffffff9 3ff00000 00000002
+divd m eq x 3fefffff fffffffd 3fefffff fffffff9 3ff00000 00000002
+divd p eq x 3fefffff fffffffd 3fefffff fffffff9 3ff00000 00000003
+divd n eq x 3fefffff fffffffc 3fefffff fffffff9 3ff00000 00000002
+divd z eq x 3fefffff fffffffc 3fefffff fffffff9 3ff00000 00000001
+divd m eq x 3fefffff fffffffc 3fefffff fffffff9 3ff00000 00000001
+divd p eq x 3fefffff fffffffc 3fefffff fffffff9 3ff00000 00000002
+divd n eq x 3fefffff fffffffb 3fefffff fffffff9 3ff00000 00000001
+divd z eq x 3fefffff fffffffb 3fefffff fffffff9 3ff00000 00000001
+divd m eq x 3fefffff fffffffb 3fefffff fffffff9 3ff00000 00000001
+divd p eq x 3fefffff fffffffb 3fefffff fffffff9 3ff00000 00000002
+divd n eq x 3fefffff fffffffa 3fefffff fffffff9 3ff00000 00000001
+divd z eq x 3fefffff fffffffa 3fefffff fffffff9 3ff00000 00000000
+divd m eq x 3fefffff fffffffa 3fefffff fffffff9 3ff00000 00000000
+divd p eq x 3fefffff fffffffa 3fefffff fffffff9 3ff00000 00000001
+divd n eq x 3ff00000 00000001 3fefffff ffffffff 3ff00000 00000002
+divd z eq x 3ff00000 00000001 3fefffff ffffffff 3ff00000 00000001
+divd m eq x 3ff00000 00000001 3fefffff ffffffff 3ff00000 00000001
+divd p eq x 3ff00000 00000001 3fefffff ffffffff 3ff00000 00000002
+divd n eq x 3ff00000 00000001 3fefffff fffffffe 3ff00000 00000002
+divd z eq x 3ff00000 00000001 3fefffff fffffffe 3ff00000 00000002
+divd m eq x 3ff00000 00000001 3fefffff fffffffe 3ff00000 00000002
+divd p eq x 3ff00000 00000001 3fefffff fffffffe 3ff00000 00000003
+divd n eq x 3ff00000 00000002 3fefffff ffffffff 3ff00000 00000003
+divd z eq x 3ff00000 00000002 3fefffff ffffffff 3ff00000 00000002
+divd m eq x 3ff00000 00000002 3fefffff ffffffff 3ff00000 00000002
+divd p eq x 3ff00000 00000002 3fefffff ffffffff 3ff00000 00000003
+divd n eq x 3ff00000 00000001 3fefffff fffffffd 3ff00000 00000003
+divd z eq x 3ff00000 00000001 3fefffff fffffffd 3ff00000 00000002
+divd m eq x 3ff00000 00000001 3fefffff fffffffd 3ff00000 00000002
+divd p eq x 3ff00000 00000001 3fefffff fffffffd 3ff00000 00000003
+divd n eq x 3ff00000 00000003 3fefffff ffffffff 3ff00000 00000004
+divd z eq x 3ff00000 00000003 3fefffff ffffffff 3ff00000 00000003
+divd m eq x 3ff00000 00000003 3fefffff ffffffff 3ff00000 00000003
+divd p eq x 3ff00000 00000003 3fefffff ffffffff 3ff00000 00000004
+divd n eq x 3ff00000 00000002 3fefffff fffffffe 3ff00000 00000003
+divd z eq x 3ff00000 00000002 3fefffff fffffffe 3ff00000 00000003
+divd m eq x 3ff00000 00000002 3fefffff fffffffe 3ff00000 00000003
+divd p eq x 3ff00000 00000002 3fefffff fffffffe 3ff00000 00000004
+divd n eq x 3ff00000 00000003 3fefffff fffffffe 3ff00000 00000004
+divd z eq x 3ff00000 00000003 3fefffff fffffffe 3ff00000 00000004
+divd m eq x 3ff00000 00000003 3fefffff fffffffe 3ff00000 00000004
+divd p eq x 3ff00000 00000003 3fefffff fffffffe 3ff00000 00000005
+divd n eq x 3ff00000 00000002 3fefffff fffffffd 3ff00000 00000004
+divd z eq x 3ff00000 00000002 3fefffff fffffffd 3ff00000 00000003
+divd m eq x 3ff00000 00000002 3fefffff fffffffd 3ff00000 00000003
+divd p eq x 3ff00000 00000002 3fefffff fffffffd 3ff00000 00000004
+divd n eq x 3ff00000 00000003 3fefffff fffffffd 3ff00000 00000005
+divd z eq x 3ff00000 00000003 3fefffff fffffffd 3ff00000 00000004
+divd m eq x 3ff00000 00000003 3fefffff fffffffd 3ff00000 00000004
+divd p eq x 3ff00000 00000003 3fefffff fffffffd 3ff00000 00000005
+divd n eq x 3ff00000 00000001 3fefffff fffffffb 3ff00000 00000004
+divd z eq x 3ff00000 00000001 3fefffff fffffffb 3ff00000 00000003
+divd m eq x 3ff00000 00000001 3fefffff fffffffb 3ff00000 00000003
+divd p eq x 3ff00000 00000001 3fefffff fffffffb 3ff00000 00000004
+divd n eq x 3ff00000 00000005 3fefffff ffffffff 3ff00000 00000006
+divd z eq x 3ff00000 00000005 3fefffff ffffffff 3ff00000 00000005
+divd m eq x 3ff00000 00000005 3fefffff ffffffff 3ff00000 00000005
+divd p eq x 3ff00000 00000005 3fefffff ffffffff 3ff00000 00000006
+divd n eq x 3ff00000 00000002 3fefffff fffffffc 3ff00000 00000004
+divd z eq x 3ff00000 00000002 3fefffff fffffffc 3ff00000 00000004
+divd m eq x 3ff00000 00000002 3fefffff fffffffc 3ff00000 00000004
+divd p eq x 3ff00000 00000002 3fefffff fffffffc 3ff00000 00000005
+divd n eq x 3ff00000 00000004 3fefffff fffffffe 3ff00000 00000005
+divd z eq x 3ff00000 00000004 3fefffff fffffffe 3ff00000 00000005
+divd m eq x 3ff00000 00000004 3fefffff fffffffe 3ff00000 00000005
+divd p eq x 3ff00000 00000004 3fefffff fffffffe 3ff00000 00000006
+divd n eq x 3fefffff ffffffff 3ff00000 00000001 3fefffff fffffffd
+divd z eq x 3fefffff ffffffff 3ff00000 00000001 3fefffff fffffffd
+divd m eq x 3fefffff ffffffff 3ff00000 00000001 3fefffff fffffffd
+divd p eq x 3fefffff ffffffff 3ff00000 00000001 3fefffff fffffffe
+divd n eq x 3fefffff fffffffe 3ff00000 00000001 3fefffff fffffffc
+divd z eq x 3fefffff fffffffe 3ff00000 00000001 3fefffff fffffffc
+divd m eq x 3fefffff fffffffe 3ff00000 00000001 3fefffff fffffffc
+divd p eq x 3fefffff fffffffe 3ff00000 00000001 3fefffff fffffffd
+divd n eq x 3fefffff ffffffff 3ff00000 00000002 3fefffff fffffffb
+divd z eq x 3fefffff ffffffff 3ff00000 00000002 3fefffff fffffffb
+divd m eq x 3fefffff ffffffff 3ff00000 00000002 3fefffff fffffffb
+divd p eq x 3fefffff ffffffff 3ff00000 00000002 3fefffff fffffffc
+divd n eq x 3fefffff fffffffd 3ff00000 00000001 3fefffff fffffffb
+divd z eq x 3fefffff fffffffd 3ff00000 00000001 3fefffff fffffffb
+divd m eq x 3fefffff fffffffd 3ff00000 00000001 3fefffff fffffffb
+divd p eq x 3fefffff fffffffd 3ff00000 00000001 3fefffff fffffffc
+divd n eq x 3fefffff ffffffff 3ff00000 00000003 3fefffff fffffff9
+divd z eq x 3fefffff ffffffff 3ff00000 00000003 3fefffff fffffff9
+divd m eq x 3fefffff ffffffff 3ff00000 00000003 3fefffff fffffff9
+divd p eq x 3fefffff ffffffff 3ff00000 00000003 3fefffff fffffffa
+divd n eq x 3fefffff fffffffe 3ff00000 00000002 3fefffff fffffffa
+divd z eq x 3fefffff fffffffe 3ff00000 00000002 3fefffff fffffffa
+divd m eq x 3fefffff fffffffe 3ff00000 00000002 3fefffff fffffffa
+divd p eq x 3fefffff fffffffe 3ff00000 00000002 3fefffff fffffffb
+divd n eq x 3fefffff fffffffc 3ff00000 00000001 3fefffff fffffffa
+divd z eq x 3fefffff fffffffc 3ff00000 00000001 3fefffff fffffffa
+divd m eq x 3fefffff fffffffc 3ff00000 00000001 3fefffff fffffffa
+divd p eq x 3fefffff fffffffc 3ff00000 00000001 3fefffff fffffffb
+divd n eq x 3fefffff ffffffff 3ff00000 00000004 3fefffff fffffff7
+divd z eq x 3fefffff ffffffff 3ff00000 00000004 3fefffff fffffff7
+divd m eq x 3fefffff ffffffff 3ff00000 00000004 3fefffff fffffff7
+divd p eq x 3fefffff ffffffff 3ff00000 00000004 3fefffff fffffff8
+divd n eq x 3fefffff fffffffd 3ff00000 00000002 3fefffff fffffff9
+divd z eq x 3fefffff fffffffd 3ff00000 00000002 3fefffff fffffff9
+divd m eq x 3fefffff fffffffd 3ff00000 00000002 3fefffff fffffff9
+divd p eq x 3fefffff fffffffd 3ff00000 00000002 3fefffff fffffffa
+divd n eq x 3fefffff fffffffe 3ff00000 00000003 3fefffff fffffff8
+divd z eq x 3fefffff fffffffe 3ff00000 00000003 3fefffff fffffff8
+divd m eq x 3fefffff fffffffe 3ff00000 00000003 3fefffff fffffff8
+divd p eq x 3fefffff fffffffe 3ff00000 00000003 3fefffff fffffff9
+divd n uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+divd m uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+divd p uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+divd z uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+divd n uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+divd m uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+divd p uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+divd z uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+divd n uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+divd m uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+divd p uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+divd z uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+divd n uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+divd m uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+divd p uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+divd z uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+divd n uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+divd m uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+divd p uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+divd z uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+divd n uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+divd m uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+divd p uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+divd z uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+divd n uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd m uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd p uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd z uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd n uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd m uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd p uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd z uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd n uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd m uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd p uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd z uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd n uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd m uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd p uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd z uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd n uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+divd m uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+divd p uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+divd z uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+divd n uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+divd m uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+divd p uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+divd z uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+divd n uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+divd m uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+divd p uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+divd z uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+divd n uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+divd m uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+divd p uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+divd z uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+divd n uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+divd m uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+divd p uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+divd z uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+divd n uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+divd m uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+divd p uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+divd z uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+divd n uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+divd m uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+divd p uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+divd z uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+divd n uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+divd m uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+divd p uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+divd z uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+divd n uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd m uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd p uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd z uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd n uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd m uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd p uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd z uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+divd n uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+divd m uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+divd p uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+divd z uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+divd n uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+divd m uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+divd p uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+divd z uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+divd n uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd m uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd p uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd z uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd n uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd m uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd p uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd z uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+divd n uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+divd m uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+divd p uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+divd z uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+divd m uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+divd p uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+divd z uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+divd m uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+divd p uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+divd z uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+divd n uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+divd m uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+divd p uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+divd z uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+divd n uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+divd m uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+divd p uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+divd z uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+divd m uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+divd p uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+divd z uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+divd m uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+divd p uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+divd z uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+divd n uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd m uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd p uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd z uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd n uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd m uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd p uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd z uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd n uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd m uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd p uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd z uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd n uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd m uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd p uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd z uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+divd m uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+divd p uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+divd z uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+divd n uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+divd m uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+divd p uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+divd z uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+divd n uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+divd m uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+divd p uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+divd z uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+divd n uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+divd m uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+divd p uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+divd z uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+divd n uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+divd m uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+divd p uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+divd z uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+divd n uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+divd m uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+divd p uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+divd z uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+divd m uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+divd p uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+divd z uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+divd n uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+divd m uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+divd p uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+divd z uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+divd n uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd m uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd p uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd z uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd n uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd m uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd p uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd z uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+divd m uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+divd p uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+divd z uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+divd m uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+divd p uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+divd z uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+divd n uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd m uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd p uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd z uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd n uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd m uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd p uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd z uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+divd n uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+divd m uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+divd p uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+divd z uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+divd m uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+divd p uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+divd z uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+divd n uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+divd m uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+divd p uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+divd z uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+divd n eq x 3ff80000 00000001 3ff00000 00000001 3ff80000 00000000
+divd p eq x 3ff80000 00000001 3ff00000 00000001 3ff80000 00000000
+divd z eq x 3ff80000 00000001 3ff00000 00000001 3ff7ffff ffffffff
+divd m eq x 3ff80000 00000001 3ff00000 00000001 3ff7ffff ffffffff
+divd n eq x 3ff7ffff ffffffff 3fefffff fffffffe 3ff80000 00000001
+divd p eq x 3ff7ffff ffffffff 3fefffff fffffffe 3ff80000 00000001
+divd z eq x 3ff7ffff ffffffff 3fefffff fffffffe 3ff80000 00000000
+divd m eq x 3ff7ffff ffffffff 3fefffff fffffffe 3ff80000 00000000
+divd n eq - 3ff80000 00000000 3ff00000 00000000 3ff80000 00000000
+divd m eq - 3ff80000 00000000 3ff00000 00000000 3ff80000 00000000
+divd p eq - 3ff80000 00000000 3ff00000 00000000 3ff80000 00000000
+divd z eq - 3ff80000 00000000 3ff00000 00000000 3ff80000 00000000
+divd n eq - 42000000 00000000 40900000 00000000 41600000 00000000
+divd m eq - 42000000 00000000 40900000 00000000 41600000 00000000
+divd p eq - 42000000 00000000 40900000 00000000 41600000 00000000
+divd z eq - 42000000 00000000 40900000 00000000 41600000 00000000
+divd n eq - 41e00000 00000000 40a00000 00000000 41300000 00000000
+divd m eq - 41e00000 00000000 40a00000 00000000 41300000 00000000
+divd p eq - 41e00000 00000000 40a00000 00000000 41300000 00000000
+divd z eq - 41e00000 00000000 40a00000 00000000 41300000 00000000
+divd n eq - 43d00000 00000000 40000000 00000000 43c00000 00000000
+divd m eq - 43d00000 00000000 40000000 00000000 43c00000 00000000
+divd p eq - 43d00000 00000000 40000000 00000000 43c00000 00000000
+divd z eq - 43d00000 00000000 40000000 00000000 43c00000 00000000
+divd n eq - 40100000 00000000 40900000 00000000 3f700000 00000000
+divd m eq - 40100000 00000000 40900000 00000000 3f700000 00000000
+divd p eq - 40100000 00000000 40900000 00000000 3f700000 00000000
+divd z eq - 40100000 00000000 40900000 00000000 3f700000 00000000
+divd n eq - 40000000 00000000 40a00000 00000000 3f500000 00000000
+divd m eq - 40000000 00000000 40a00000 00000000 3f500000 00000000
+divd p eq - 40000000 00000000 40a00000 00000000 3f500000 00000000
+divd z eq - 40000000 00000000 40a00000 00000000 3f500000 00000000
+divd n eq - c1e00000 00000000 c0b00000 00000000 41200000 00000000
+divd m eq - c1e00000 00000000 c0b00000 00000000 41200000 00000000
+divd p eq - c1e00000 00000000 c0b00000 00000000 41200000 00000000
+divd z eq - c1e00000 00000000 c0b00000 00000000 41200000 00000000
+divd n eq - c1e00000 00000000 40a00000 00000000 c1300000 00000000
+divd m eq - c1e00000 00000000 40a00000 00000000 c1300000 00000000
+divd p eq - c1e00000 00000000 40a00000 00000000 c1300000 00000000
+divd z eq - c1e00000 00000000 40a00000 00000000 c1300000 00000000
+divd n eq - c1e00000 00000000 c0a00000 00000000 41300000 00000000
+divd m eq - c1e00000 00000000 c0a00000 00000000 41300000 00000000
+divd p eq - c1e00000 00000000 c0a00000 00000000 41300000 00000000
+divd z eq - c1e00000 00000000 c0a00000 00000000 41300000 00000000
+divd n eq - 00000000 00000000 40b00000 00000000 00000000 00000000
+divd m eq - 00000000 00000000 40b00000 00000000 00000000 00000000
+divd p eq - 00000000 00000000 40b00000 00000000 00000000 00000000
+divd z eq - 00000000 00000000 40b00000 00000000 00000000 00000000
+divd n eq - 80000000 00000000 40b00000 00000000 80000000 00000000
+divd m eq - 80000000 00000000 40b00000 00000000 80000000 00000000
+divd p eq - 80000000 00000000 40b00000 00000000 80000000 00000000
+divd z eq - 80000000 00000000 40b00000 00000000 80000000 00000000
+divd n eq d 43d00000 00000000 00000000 00000000 7ff00000 00000000
+divd m eq d 43d00000 00000000 00000000 00000000 7ff00000 00000000
+divd p eq d 43d00000 00000000 00000000 00000000 7ff00000 00000000
+divd z eq d 43d00000 00000000 00000000 00000000 7ff00000 00000000
+divd n eq - c1800000 00000000 40d00000 00000000 c0a00000 00000000
+divd m eq - c1800000 00000000 40d00000 00000000 c0a00000 00000000
+divd p eq - c1800000 00000000 40d00000 00000000 c0a00000 00000000
+divd z eq - c1800000 00000000 40d00000 00000000 c0a00000 00000000
+divd n eq - 42400000 00000000 c0c00000 00000000 c1700000 00000000
+divd m eq - 42400000 00000000 c0c00000 00000000 c1700000 00000000
+divd p eq - 42400000 00000000 c0c00000 00000000 c1700000 00000000
+divd z eq - 42400000 00000000 c0c00000 00000000 c1700000 00000000
+divd n eq - c0100000 00000000 40a00000 00000000 bf600000 00000000
+divd m eq - c0100000 00000000 40a00000 00000000 bf600000 00000000
+divd p eq - c0100000 00000000 40a00000 00000000 bf600000 00000000
+divd z eq - c0100000 00000000 40a00000 00000000 bf600000 00000000
+divd n eq - 43d00000 00000000 c0000000 00000000 c3c00000 00000000
+divd m eq - 43d00000 00000000 c0000000 00000000 c3c00000 00000000
+divd p eq - 43d00000 00000000 c0000000 00000000 c3c00000 00000000
+divd z eq - 43d00000 00000000 c0000000 00000000 c3c00000 00000000
+divd n eq - 43d00000 00000000 c0a00000 00000000 c3200000 00000000
+divd m eq - 43d00000 00000000 c0a00000 00000000 c3200000 00000000
+divd p eq - 43d00000 00000000 c0a00000 00000000 c3200000 00000000
+divd z eq - 43d00000 00000000 c0a00000 00000000 c3200000 00000000
+divd n eq - c3c00000 00000000 c0000000 00000000 43b00000 00000000
+divd m eq - c3c00000 00000000 c0000000 00000000 43b00000 00000000
+divd p eq - c3c00000 00000000 c0000000 00000000 43b00000 00000000
+divd z eq - c3c00000 00000000 c0000000 00000000 43b00000 00000000
+divd n eq - c0000000 00000000 c0a00000 00000000 3f500000 00000000
+divd m eq - c0000000 00000000 c0a00000 00000000 3f500000 00000000
+divd p eq - c0000000 00000000 c0a00000 00000000 3f500000 00000000
+divd z eq - c0000000 00000000 c0a00000 00000000 3f500000 00000000
+divd n eq - c0000000 00000000 40a00000 00000000 bf500000 00000000
+divd m eq - c0000000 00000000 40a00000 00000000 bf500000 00000000
+divd p eq - c0000000 00000000 40a00000 00000000 bf500000 00000000
+divd z eq - c0000000 00000000 40a00000 00000000 bf500000 00000000
+divd n eq - 40dfff80 00000000 40000000 00000000 40cfff80 00000000
+divd m eq - 40dfff80 00000000 40000000 00000000 40cfff80 00000000
+divd p eq - 40dfff80 00000000 40000000 00000000 40cfff80 00000000
+divd z eq - 40dfff80 00000000 40000000 00000000 40cfff80 00000000
+divd n eq - 40000000 00000000 40d00000 00000000 3f200000 00000000
+divd m eq - 40000000 00000000 40d00000 00000000 3f200000 00000000
+divd p eq - 40000000 00000000 40d00000 00000000 3f200000 00000000
+divd z eq - 40000000 00000000 40d00000 00000000 3f200000 00000000
+divd n eq - 40dfffc0 00000000 40dfffc0 00000000 3ff00000 00000000
+divd m eq - 40dfffc0 00000000 40dfffc0 00000000 3ff00000 00000000
+divd p eq - 40dfffc0 00000000 40dfffc0 00000000 3ff00000 00000000
+divd z eq - 40dfffc0 00000000 40dfffc0 00000000 3ff00000 00000000
+divd n eq - 40dfffc0 00000000 c0dfffc0 00000000 bff00000 00000000
+divd m eq - 40dfffc0 00000000 c0dfffc0 00000000 bff00000 00000000
+divd p eq - 40dfffc0 00000000 c0dfffc0 00000000 bff00000 00000000
+divd z eq - 40dfffc0 00000000 c0dfffc0 00000000 bff00000 00000000
+divd n eq - 40dfff40 00000000 40dfff40 00000000 3ff00000 00000000
+divd m eq - 40dfff40 00000000 40dfff40 00000000 3ff00000 00000000
+divd p eq - 40dfff40 00000000 40dfff40 00000000 3ff00000 00000000
+divd z eq - 40dfff40 00000000 40dfff40 00000000 3ff00000 00000000
+divd n eq - 40dfffc0 00000000 3ff00000 00000000 40dfffc0 00000000
+divd m eq - 40dfffc0 00000000 3ff00000 00000000 40dfffc0 00000000
+divd p eq - 40dfffc0 00000000 3ff00000 00000000 40dfffc0 00000000
+divd z eq - 40dfffc0 00000000 3ff00000 00000000 40dfffc0 00000000
+divd n eq - 00000000 00000000 40dfffc0 00000000 00000000 00000000
+divd m eq - 00000000 00000000 40dfffc0 00000000 00000000 00000000
+divd p eq - 00000000 00000000 40dfffc0 00000000 00000000 00000000
+divd z eq - 00000000 00000000 40dfffc0 00000000 00000000 00000000
+divd n eq d 40dfffc0 00000000 00000000 00000000 7ff00000 00000000
+divd m eq d 40dfffc0 00000000 00000000 00000000 7ff00000 00000000
+divd p eq d 40dfffc0 00000000 00000000 00000000 7ff00000 00000000
+divd z eq d 40dfffc0 00000000 00000000 00000000 7ff00000 00000000
+divd n eq d c0dfffc0 00000000 00000000 00000000 fff00000 00000000
+divd m eq d c0dfffc0 00000000 00000000 00000000 fff00000 00000000
+divd p eq d c0dfffc0 00000000 00000000 00000000 fff00000 00000000
+divd z eq d c0dfffc0 00000000 00000000 00000000 fff00000 00000000
+divd n eq d 3ff00000 00000000 80000000 00000000 fff00000 00000000
+divd n eq x 3ff55555 55555557 3ff55555 55555555 3ff00000 00000002
+divd n eq x 3ff55555 55555558 3ff55555 55555556 3ff00000 00000001
diff --git a/verrou/unitTest/checkUCB-vecto/inputData/divs.input b/verrou/unitTest/checkUCB-vecto/inputData/divs.input
new file mode 100644
index 0000000000000000000000000000000000000000..5286bb9e5fbede222b050d5ab5e13a5a5b7d899f
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/inputData/divs.input
@@ -0,0 +1,1552 @@
+/* Copyright (C) 1988-1994 Sun Microsystems, Inc. 2550 Garcia Avenue */
+/* Mountain View, California  94043 All rights reserved. */
+/*  */
+/* Any person is hereby authorized to download, copy, use, create bug fixes,  */
+/* and distribute, subject to the following conditions: */
+/*  */
+/* 	1.  the software may not be redistributed for a fee except as */
+/* 	    reasonable to cover media costs; */
+/* 	2.  any copy of the software must include this notice, as well as  */
+/* 	    any other embedded copyright notices; and  */
+/* 	3.  any distribution of this software or derivative works thereof  */
+/* 	    must comply with all applicable U.S. export control laws. */
+/*  */
+/* THE SOFTWARE IS MADE AVAILABLE "AS IS" AND WITHOUT EXPRESS OR IMPLIED */
+/* WARRANTY OF ANY KIND, INCLUDING BUT NOT LIMITED TO THE IMPLIED */
+/* WARRANTIES OF DESIGN, MERCHANTIBILITY, FITNESS FOR A PARTICULAR */
+/* PURPOSE, NON-INFRINGEMENT, PERFORMANCE OR CONFORMANCE TO */
+/* SPECIFICATIONS.   */
+/*  */
+/* BY DOWNLOADING AND/OR USING THIS SOFTWARE, THE USER WAIVES ALL CLAIMS */
+/* AGAINST SUN MICROSYSTEMS, INC. AND ITS AFFILIATED COMPANIES IN ANY */
+/* JURISDICTION, INCLUDING BUT NOT LIMITED TO CLAIMS FOR DAMAGES OR */
+/* EQUITABLE RELIEF BASED ON LOSS OF DATA, AND SPECIFICALLY WAIVES EVEN */
+/* UNKNOWN OR UNANTICIPATED CLAIMS OR LOSSES, PRESENT AND FUTURE. */
+/*  */
+/* IN NO EVENT WILL SUN MICROSYSTEMS, INC. OR ANY OF ITS AFFILIATED */
+/* COMPANIES BE LIABLE FOR ANY LOST REVENUE OR PROFITS OR OTHER SPECIAL, */
+/* INDIRECT AND CONSEQUENTIAL DAMAGES, EVEN IF IT HAS BEEN ADVISED OF THE */
+/* POSSIBILITY OF SUCH DAMAGES. */
+/*  */
+/* This file is provided with no support and without any obligation on the */
+/* part of Sun Microsystems, Inc. ("Sun") or any of its affiliated */
+/* companies to assist in its use, correction, modification or */
+/* enhancement.  Nevertheless, and without creating any obligation on its */
+/* part, Sun welcomes your comments concerning the software and requests */
+/* that they be sent to fdlibm-comments@sunpro.sun.com. */
+
+divs z eq xu 197e03f7 587e03f8 007fffff
+divs p eq xu 197e03f7 587e03f8 00800000
+divs n eq - 47000000 42000000 44800000
+divs m eq - 47000000 42000000 44800000
+divs p eq - 47000000 42000000 44800000
+divs z eq - 47000000 42000000 44800000
+divs n eq - 5f000000 4b000000 53800000
+divs m eq - 5f000000 4b000000 53800000
+divs p eq - 5f000000 4b000000 53800000
+divs z eq - 5f000000 4b000000 53800000
+divs n eq - 46fff000 41200000 454cc000
+divs m eq - 46fff000 41200000 454cc000
+divs p eq - 46fff000 41200000 454cc000
+divs z eq - 46fff000 41200000 454cc000
+divs n eq - c7000000 42000000 c4800000
+divs m eq - c7000000 42000000 c4800000
+divs p eq - c7000000 42000000 c4800000
+divs z eq - c7000000 42000000 c4800000
+divs n eq - 47000000 c2000000 c4800000
+divs m eq - 47000000 c2000000 c4800000
+divs p eq - 47000000 c2000000 c4800000
+divs z eq - 47000000 c2000000 c4800000
+divs n eq - 7b800000 49800000 71800000
+divs m eq - 7b800000 49800000 71800000
+divs p eq - 7b800000 49800000 71800000
+divs z eq - 7b800000 49800000 71800000
+divs n eq - fb800000 49800000 f1800000
+divs m eq - fb800000 49800000 f1800000
+divs p eq - fb800000 49800000 f1800000
+divs z eq - fb800000 49800000 f1800000
+divs n eq - 7b800000 c9800000 f1800000
+divs m eq - 7b800000 c9800000 f1800000
+divs p eq - 7b800000 c9800000 f1800000
+divs z eq - 7b800000 c9800000 f1800000
+divs n eq - 57000000 46000000 50800000
+divs m eq - 57000000 46000000 50800000
+divs p eq - 57000000 46000000 50800000
+divs z eq - 57000000 46000000 50800000
+divs n eq - 461c4000 41200000 447a0000
+divs m eq - 461c4000 41200000 447a0000
+divs p eq - 461c4000 41200000 447a0000
+divs z eq - 461c4000 41200000 447a0000
+divs n eq - 461c4000 42c80000 42c80000
+divs m eq - 461c4000 42c80000 42c80000
+divs p eq - 461c4000 42c80000 42c80000
+divs z eq - 461c4000 42c80000 42c80000
+divs n eq - 461c4000 447a0000 41200000
+divs m eq - 461c4000 447a0000 41200000
+divs p eq - 461c4000 447a0000 41200000
+divs z eq - 461c4000 447a0000 41200000
+divs n eq - 3f800000 3f800000 3f800000
+divs m eq - 3f800000 3f800000 3f800000
+divs p eq - 3f800000 3f800000 3f800000
+divs z eq - 3f800000 3f800000 3f800000
+divs n eq - 40000000 3f800000 40000000
+divs m eq - 40000000 3f800000 40000000
+divs p eq - 40000000 3f800000 40000000
+divs z eq - 40000000 3f800000 40000000
+divs n eq - 41100000 40400000 40400000
+divs m eq - 41100000 40400000 40400000
+divs p eq - 41100000 40400000 40400000
+divs z eq - 41100000 40400000 40400000
+divs n eq - 40a00000 40a00000 3f800000
+divs m eq - 40a00000 40a00000 3f800000
+divs p eq - 40a00000 40a00000 3f800000
+divs z eq - 40a00000 40a00000 3f800000
+divs n eq - 41000000 40000000 40800000
+divs m eq - 41000000 40000000 40800000
+divs p eq - 41000000 40000000 40800000
+divs z eq - 41000000 40000000 40800000
+divs n eq - bf800000 3f800000 bf800000
+divs m eq - bf800000 3f800000 bf800000
+divs p eq - bf800000 3f800000 bf800000
+divs z eq - bf800000 3f800000 bf800000
+divs n eq - c0000000 3f800000 c0000000
+divs m eq - c0000000 3f800000 c0000000
+divs p eq - c0000000 3f800000 c0000000
+divs z eq - c0000000 3f800000 c0000000
+divs n eq - 40000000 bf800000 c0000000
+divs m eq - 40000000 bf800000 c0000000
+divs p eq - 40000000 bf800000 c0000000
+divs z eq - 40000000 bf800000 c0000000
+divs n eq - c1000000 40000000 c0800000
+divs m eq - c1000000 40000000 c0800000
+divs p eq - c1000000 40000000 c0800000
+divs z eq - c1000000 40000000 c0800000
+divs n eq - 40400000 c0400000 bf800000
+divs m eq - 40400000 c0400000 bf800000
+divs p eq - 40400000 c0400000 bf800000
+divs z eq - 40400000 c0400000 bf800000
+divs n eq - c0e00000 40e00000 bf800000
+divs m eq - c0e00000 40e00000 bf800000
+divs p eq - c0e00000 40e00000 bf800000
+divs z eq - c0e00000 40e00000 bf800000
+divs n eq - bf800000 bf800000 3f800000
+divs m eq - bf800000 bf800000 3f800000
+divs p eq - bf800000 bf800000 3f800000
+divs z eq - bf800000 bf800000 3f800000
+divs n eq - c0000000 bf800000 40000000
+divs m eq - c0000000 bf800000 40000000
+divs p eq - c0000000 bf800000 40000000
+divs z eq - c0000000 bf800000 40000000
+divs n eq - c0c00000 c0400000 40000000
+divs m eq - c0c00000 c0400000 40000000
+divs p eq - c0c00000 c0400000 40000000
+divs z eq - c0c00000 c0400000 40000000
+divs n eq - c1100000 c0400000 40400000
+divs m eq - c1100000 c0400000 40400000
+divs p eq - c1100000 c0400000 40400000
+divs z eq - c1100000 c0400000 40400000
+divs n uo v 00000000 00000000 7fff0000
+divs m uo v 00000000 00000000 7fff0000
+divs p uo v 00000000 00000000 7fff0000
+divs z uo v 00000000 00000000 7fff0000
+divs n uo v 80000000 00000000 ffff0000
+divs m uo v 80000000 00000000 ffff0000
+divs p uo v 80000000 00000000 ffff0000
+divs z uo v 80000000 00000000 ffff0000
+divs n uo v 00000000 80000000 ffff0000
+divs m uo v 00000000 80000000 ffff0000
+divs p uo v 00000000 80000000 ffff0000
+divs z uo v 00000000 80000000 ffff0000
+divs n uo v 80000000 80000000 7fff0000
+divs m uo v 80000000 80000000 7fff0000
+divs p uo v 80000000 80000000 7fff0000
+divs z uo v 80000000 80000000 7fff0000
+divs n uo v 7f800000 7f800000 7fff0000
+divs m uo v 7f800000 7f800000 7fff0000
+divs p uo v 7f800000 7f800000 7fff0000
+divs z uo v 7f800000 7f800000 7fff0000
+divs n uo v ff800000 7f800000 ffff0000
+divs m uo v ff800000 7f800000 ffff0000
+divs p uo v ff800000 7f800000 ffff0000
+divs z uo v ff800000 7f800000 ffff0000
+divs n uo v 7f800000 ff800000 ffff0000
+divs m uo v 7f800000 ff800000 ffff0000
+divs p uo v 7f800000 ff800000 ffff0000
+divs z uo v 7f800000 ff800000 ffff0000
+divs n uo v ff800000 ff800000 7fff0000
+divs m uo v ff800000 ff800000 7fff0000
+divs p uo v ff800000 ff800000 7fff0000
+divs z uo v ff800000 ff800000 7fff0000
+divs n eq - 7f800000 00000000 7f800000
+divs m eq - 7f800000 00000000 7f800000
+divs p eq - 7f800000 00000000 7f800000
+divs z eq - 7f800000 00000000 7f800000
+divs n eq - ff800000 00000000 ff800000
+divs m eq - ff800000 00000000 ff800000
+divs p eq - ff800000 00000000 ff800000
+divs z eq - ff800000 00000000 ff800000
+divs n eq - 7f800000 80000000 ff800000
+divs m eq - 7f800000 80000000 ff800000
+divs p eq - 7f800000 80000000 ff800000
+divs z eq - 7f800000 80000000 ff800000
+divs n eq - ff800000 80000000 7f800000
+divs m eq - ff800000 80000000 7f800000
+divs p eq - ff800000 80000000 7f800000
+divs z eq - ff800000 80000000 7f800000
+divs n eq - 00000000 7f800000 00000000
+divs m eq - 00000000 7f800000 00000000
+divs p eq - 00000000 7f800000 00000000
+divs z eq - 00000000 7f800000 00000000
+divs n eq - 80000000 7f800000 80000000
+divs m eq - 80000000 7f800000 80000000
+divs p eq - 80000000 7f800000 80000000
+divs z eq - 80000000 7f800000 80000000
+divs n eq - 00000000 ff800000 80000000
+divs m eq - 00000000 ff800000 80000000
+divs p eq - 00000000 ff800000 80000000
+divs z eq - 00000000 ff800000 80000000
+divs n eq - 80000000 ff800000 00000000
+divs m eq - 80000000 ff800000 00000000
+divs p eq - 80000000 ff800000 00000000
+divs z eq - 80000000 ff800000 00000000
+divs n eq - 7f800000 3f800000 7f800000
+divs m eq - 7f800000 3f800000 7f800000
+divs p eq - 7f800000 3f800000 7f800000
+divs z eq - 7f800000 3f800000 7f800000
+divs n eq - ff800000 40000000 ff800000
+divs m eq - ff800000 40000000 ff800000
+divs p eq - ff800000 40000000 ff800000
+divs z eq - ff800000 40000000 ff800000
+divs n eq - 7f800000 c0400000 ff800000
+divs m eq - 7f800000 c0400000 ff800000
+divs p eq - 7f800000 c0400000 ff800000
+divs z eq - 7f800000 c0400000 ff800000
+divs n eq - ff800000 c0800000 7f800000
+divs m eq - ff800000 c0800000 7f800000
+divs p eq - ff800000 c0800000 7f800000
+divs z eq - ff800000 c0800000 7f800000
+divs n eq - 7f800000 40a00000 7f800000
+divs m eq - 7f800000 40a00000 7f800000
+divs p eq - 7f800000 40a00000 7f800000
+divs z eq - 7f800000 40a00000 7f800000
+divs n eq - ff800000 40c00000 ff800000
+divs m eq - ff800000 40c00000 ff800000
+divs p eq - ff800000 40c00000 ff800000
+divs z eq - ff800000 40c00000 ff800000
+divs n eq - 7f800000 c0e00000 ff800000
+divs m eq - 7f800000 c0e00000 ff800000
+divs p eq - 7f800000 c0e00000 ff800000
+divs z eq - 7f800000 c0e00000 ff800000
+divs n eq - ff800000 c1000000 7f800000
+divs m eq - ff800000 c1000000 7f800000
+divs p eq - ff800000 c1000000 7f800000
+divs z eq - ff800000 c1000000 7f800000
+divs n eq - 3f800000 7f800000 00000000
+divs m eq - 3f800000 7f800000 00000000
+divs p eq - 3f800000 7f800000 00000000
+divs z eq - 3f800000 7f800000 00000000
+divs n eq - c0000000 7f800000 80000000
+divs m eq - c0000000 7f800000 80000000
+divs p eq - c0000000 7f800000 80000000
+divs z eq - c0000000 7f800000 80000000
+divs n eq - 40400000 ff800000 80000000
+divs m eq - 40400000 ff800000 80000000
+divs p eq - 40400000 ff800000 80000000
+divs z eq - 40400000 ff800000 80000000
+divs n eq - c0800000 ff800000 00000000
+divs m eq - c0800000 ff800000 00000000
+divs p eq - c0800000 ff800000 00000000
+divs z eq - c0800000 ff800000 00000000
+divs n eq - 40a00000 7f800000 00000000
+divs m eq - 40a00000 7f800000 00000000
+divs p eq - 40a00000 7f800000 00000000
+divs z eq - 40a00000 7f800000 00000000
+divs n eq - c0c00000 7f800000 80000000
+divs m eq - c0c00000 7f800000 80000000
+divs p eq - c0c00000 7f800000 80000000
+divs z eq - c0c00000 7f800000 80000000
+divs n eq - 40e00000 ff800000 80000000
+divs m eq - 40e00000 ff800000 80000000
+divs p eq - 40e00000 ff800000 80000000
+divs z eq - 40e00000 ff800000 80000000
+divs n eq - c1000000 ff800000 00000000
+divs m eq - c1000000 ff800000 00000000
+divs p eq - c1000000 ff800000 00000000
+divs z eq - c1000000 ff800000 00000000
+divs n eq - 7f000000 7f800000 00000000
+divs m eq - 7f000000 7f800000 00000000
+divs p eq - 7f000000 7f800000 00000000
+divs z eq - 7f000000 7f800000 00000000
+divs n eq - fe800000 7f800000 80000000
+divs m eq - fe800000 7f800000 80000000
+divs p eq - fe800000 7f800000 80000000
+divs z eq - fe800000 7f800000 80000000
+divs n eq - 7f000000 ff800000 80000000
+divs m eq - 7f000000 ff800000 80000000
+divs p eq - 7f000000 ff800000 80000000
+divs z eq - 7f000000 ff800000 80000000
+divs n eq - fe800000 ff800000 00000000
+divs m eq - fe800000 ff800000 00000000
+divs p eq - fe800000 ff800000 00000000
+divs z eq - fe800000 ff800000 00000000
+divs n eq - 7effffff 7f800000 00000000
+divs m eq - 7effffff 7f800000 00000000
+divs p eq - 7effffff 7f800000 00000000
+divs z eq - 7effffff 7f800000 00000000
+divs n eq - fe7fffff 7f800000 80000000
+divs m eq - fe7fffff 7f800000 80000000
+divs p eq - fe7fffff 7f800000 80000000
+divs z eq - fe7fffff 7f800000 80000000
+divs n eq - 7f7fffff ff800000 80000000
+divs m eq - 7f7fffff ff800000 80000000
+divs p eq - 7f7fffff ff800000 80000000
+divs z eq - 7f7fffff ff800000 80000000
+divs n eq - ff7fffff ff800000 00000000
+divs m eq - ff7fffff ff800000 00000000
+divs p eq - ff7fffff ff800000 00000000
+divs z eq - ff7fffff ff800000 00000000
+divs n eq - 7f800000 7f000000 7f800000
+divs m eq - 7f800000 7f000000 7f800000
+divs p eq - 7f800000 7f000000 7f800000
+divs z eq - 7f800000 7f000000 7f800000
+divs n eq - ff800000 7e800000 ff800000
+divs m eq - ff800000 7e800000 ff800000
+divs p eq - ff800000 7e800000 ff800000
+divs z eq - ff800000 7e800000 ff800000
+divs n eq - 7f800000 ff000000 ff800000
+divs m eq - 7f800000 ff000000 ff800000
+divs p eq - 7f800000 ff000000 ff800000
+divs z eq - 7f800000 ff000000 ff800000
+divs n eq - ff800000 fe800000 7f800000
+divs m eq - ff800000 fe800000 7f800000
+divs p eq - ff800000 fe800000 7f800000
+divs z eq - ff800000 fe800000 7f800000
+divs n eq - 7f800000 7effffff 7f800000
+divs m eq - 7f800000 7effffff 7f800000
+divs p eq - 7f800000 7effffff 7f800000
+divs z eq - 7f800000 7effffff 7f800000
+divs n eq - 7f800000 fe7fffff ff800000
+divs m eq - 7f800000 fe7fffff ff800000
+divs p eq - 7f800000 fe7fffff ff800000
+divs z eq - 7f800000 fe7fffff ff800000
+divs n eq - 7f800000 ff7fffff ff800000
+divs m eq - 7f800000 ff7fffff ff800000
+divs p eq - 7f800000 ff7fffff ff800000
+divs z eq - 7f800000 ff7fffff ff800000
+divs n eq - ff800000 ff7fffff 7f800000
+divs m eq - ff800000 ff7fffff 7f800000
+divs p eq - ff800000 ff7fffff 7f800000
+divs z eq - ff800000 ff7fffff 7f800000
+divs n eq - 7f800000 00800000 7f800000
+divs m eq - 7f800000 00800000 7f800000
+divs p eq - 7f800000 00800000 7f800000
+divs z eq - 7f800000 00800000 7f800000
+divs n eq - ff800000 01000000 ff800000
+divs m eq - ff800000 01000000 ff800000
+divs p eq - ff800000 01000000 ff800000
+divs z eq - ff800000 01000000 ff800000
+divs n eq - 7f800000 81000000 ff800000
+divs m eq - 7f800000 81000000 ff800000
+divs p eq - 7f800000 81000000 ff800000
+divs z eq - 7f800000 81000000 ff800000
+divs n eq - ff800000 80800000 7f800000
+divs m eq - ff800000 80800000 7f800000
+divs p eq - ff800000 80800000 7f800000
+divs z eq - ff800000 80800000 7f800000
+divs n eq - 7f800000 00ffffff 7f800000
+divs m eq - 7f800000 00ffffff 7f800000
+divs p eq - 7f800000 00ffffff 7f800000
+divs z eq - 7f800000 00ffffff 7f800000
+divs n eq - ff800000 00800001 ff800000
+divs m eq - ff800000 00800001 ff800000
+divs p eq - ff800000 00800001 ff800000
+divs z eq - ff800000 00800001 ff800000
+divs n eq - 7f800000 80800001 ff800000
+divs m eq - 7f800000 80800001 ff800000
+divs p eq - 7f800000 80800001 ff800000
+divs z eq - 7f800000 80800001 ff800000
+divs n eq - ff800000 80ffffff 7f800000
+divs m eq - ff800000 80ffffff 7f800000
+divs p eq - ff800000 80ffffff 7f800000
+divs z eq - ff800000 80ffffff 7f800000
+divs n eq - 00800000 7f800000 00000000
+divs m eq - 00800000 7f800000 00000000
+divs p eq - 00800000 7f800000 00000000
+divs z eq - 00800000 7f800000 00000000
+divs n eq - 81000000 7f800000 80000000
+divs m eq - 81000000 7f800000 80000000
+divs p eq - 81000000 7f800000 80000000
+divs z eq - 81000000 7f800000 80000000
+divs n eq - 01000000 ff800000 80000000
+divs m eq - 01000000 ff800000 80000000
+divs p eq - 01000000 ff800000 80000000
+divs z eq - 01000000 ff800000 80000000
+divs n eq - 80800000 ff800000 00000000
+divs m eq - 80800000 ff800000 00000000
+divs p eq - 80800000 ff800000 00000000
+divs z eq - 80800000 ff800000 00000000
+divs n eq - 00ffffff 7f800000 00000000
+divs m eq - 00ffffff 7f800000 00000000
+divs p eq - 00ffffff 7f800000 00000000
+divs z eq - 00ffffff 7f800000 00000000
+divs n eq - 80800001 7f800000 80000000
+divs m eq - 80800001 7f800000 80000000
+divs p eq - 80800001 7f800000 80000000
+divs z eq - 80800001 7f800000 80000000
+divs n eq - 00800001 ff800000 80000000
+divs m eq - 00800001 ff800000 80000000
+divs p eq - 00800001 ff800000 80000000
+divs z eq - 00800001 ff800000 80000000
+divs n eq - 80ffffff ff800000 00000000
+divs m eq - 80ffffff ff800000 00000000
+divs p eq - 80ffffff ff800000 00000000
+divs z eq - 80ffffff ff800000 00000000
+divs n eq - 7f800000 00000001 7f800000
+divs m eq - 7f800000 00000001 7f800000
+divs p eq - 7f800000 00000001 7f800000
+divs z eq - 7f800000 00000001 7f800000
+divs n eq - ff800000 00000003 ff800000
+divs m eq - ff800000 00000003 ff800000
+divs p eq - ff800000 00000003 ff800000
+divs z eq - ff800000 00000003 ff800000
+divs n eq - 7f800000 80000002 ff800000
+divs m eq - 7f800000 80000002 ff800000
+divs p eq - 7f800000 80000002 ff800000
+divs z eq - 7f800000 80000002 ff800000
+divs n eq - ff800000 80000004 7f800000
+divs m eq - ff800000 80000004 7f800000
+divs p eq - ff800000 80000004 7f800000
+divs z eq - ff800000 80000004 7f800000
+divs n eq - 7f800000 007fffff 7f800000
+divs m eq - 7f800000 007fffff 7f800000
+divs p eq - 7f800000 007fffff 7f800000
+divs z eq - 7f800000 007fffff 7f800000
+divs n eq - ff800000 007fffff ff800000
+divs m eq - ff800000 007fffff ff800000
+divs p eq - ff800000 007fffff ff800000
+divs z eq - ff800000 007fffff ff800000
+divs n eq - 7f800000 807fffff ff800000
+divs m eq - 7f800000 807fffff ff800000
+divs p eq - 7f800000 807fffff ff800000
+divs z eq - 7f800000 807fffff ff800000
+divs n eq - ff800000 807fffff 7f800000
+divs m eq - ff800000 807fffff 7f800000
+divs p eq - ff800000 807fffff 7f800000
+divs z eq - ff800000 807fffff 7f800000
+divs n eq - 00000001 7f800000 00000000
+divs m eq - 00000001 7f800000 00000000
+divs p eq - 00000001 7f800000 00000000
+divs z eq - 00000001 7f800000 00000000
+divs n eq - 80000003 7f800000 80000000
+divs m eq - 80000003 7f800000 80000000
+divs p eq - 80000003 7f800000 80000000
+divs z eq - 80000003 7f800000 80000000
+divs n eq - 00000002 ff800000 80000000
+divs m eq - 00000002 ff800000 80000000
+divs p eq - 00000002 ff800000 80000000
+divs z eq - 00000002 ff800000 80000000
+divs n eq - 80000004 ff800000 00000000
+divs m eq - 80000004 ff800000 00000000
+divs p eq - 80000004 ff800000 00000000
+divs z eq - 80000004 ff800000 00000000
+divs n eq - 007fffff 7f800000 00000000
+divs m eq - 007fffff 7f800000 00000000
+divs p eq - 007fffff 7f800000 00000000
+divs z eq - 007fffff 7f800000 00000000
+divs n eq - 807fffff 7f800000 80000000
+divs m eq - 807fffff 7f800000 80000000
+divs p eq - 807fffff 7f800000 80000000
+divs z eq - 807fffff 7f800000 80000000
+divs n eq - 007fffff ff800000 80000000
+divs m eq - 007fffff ff800000 80000000
+divs p eq - 007fffff ff800000 80000000
+divs z eq - 007fffff ff800000 80000000
+divs n eq - 807fffff ff800000 00000000
+divs m eq - 807fffff ff800000 00000000
+divs p eq - 807fffff ff800000 00000000
+divs z eq - 807fffff ff800000 00000000
+divs n eq - 00000000 3f800000 00000000
+divs m eq - 00000000 3f800000 00000000
+divs p eq - 00000000 3f800000 00000000
+divs z eq - 00000000 3f800000 00000000
+divs n eq - 80000000 40000000 80000000
+divs m eq - 80000000 40000000 80000000
+divs p eq - 80000000 40000000 80000000
+divs z eq - 80000000 40000000 80000000
+divs n eq - 00000000 c0400000 80000000
+divs m eq - 00000000 c0400000 80000000
+divs p eq - 00000000 c0400000 80000000
+divs z eq - 00000000 c0400000 80000000
+divs n eq - 80000000 c0800000 00000000
+divs m eq - 80000000 c0800000 00000000
+divs p eq - 80000000 c0800000 00000000
+divs z eq - 80000000 c0800000 00000000
+divs n eq - 00000000 40a00000 00000000
+divs m eq - 00000000 40a00000 00000000
+divs p eq - 00000000 40a00000 00000000
+divs z eq - 00000000 40a00000 00000000
+divs n eq - 80000000 40c00000 80000000
+divs m eq - 80000000 40c00000 80000000
+divs p eq - 80000000 40c00000 80000000
+divs z eq - 80000000 40c00000 80000000
+divs n eq - 00000000 c0e00000 80000000
+divs m eq - 00000000 c0e00000 80000000
+divs p eq - 00000000 c0e00000 80000000
+divs z eq - 00000000 c0e00000 80000000
+divs n eq - 80000000 c1000000 00000000
+divs m eq - 80000000 c1000000 00000000
+divs p eq - 80000000 c1000000 00000000
+divs z eq - 80000000 c1000000 00000000
+divs n eq d 3f800000 00000000 7f800000
+divs m eq d 3f800000 00000000 7f800000
+divs p eq d 3f800000 00000000 7f800000
+divs z eq d 3f800000 00000000 7f800000
+divs n eq d c0000000 00000000 ff800000
+divs m eq d c0000000 00000000 ff800000
+divs p eq d c0000000 00000000 ff800000
+divs z eq d c0000000 00000000 ff800000
+divs n eq d 40400000 80000000 ff800000
+divs m eq d 40400000 80000000 ff800000
+divs p eq d 40400000 80000000 ff800000
+divs z eq d 40400000 80000000 ff800000
+divs n eq d c0800000 80000000 7f800000
+divs m eq d c0800000 80000000 7f800000
+divs p eq d c0800000 80000000 7f800000
+divs z eq d c0800000 80000000 7f800000
+divs n eq d 40a00000 00000000 7f800000
+divs m eq d 40a00000 00000000 7f800000
+divs p eq d 40a00000 00000000 7f800000
+divs z eq d 40a00000 00000000 7f800000
+divs n eq d c0c00000 00000000 ff800000
+divs m eq d c0c00000 00000000 ff800000
+divs p eq d c0c00000 00000000 ff800000
+divs z eq d c0c00000 00000000 ff800000
+divs n eq d 40e00000 80000000 ff800000
+divs m eq d 40e00000 80000000 ff800000
+divs p eq d 40e00000 80000000 ff800000
+divs z eq d 40e00000 80000000 ff800000
+divs n eq d c1000000 80000000 7f800000
+divs m eq d c1000000 80000000 7f800000
+divs p eq d c1000000 80000000 7f800000
+divs z eq d c1000000 80000000 7f800000
+divs n eq - 00000000 7f000000 00000000
+divs m eq - 00000000 7f000000 00000000
+divs p eq - 00000000 7f000000 00000000
+divs z eq - 00000000 7f000000 00000000
+divs n eq - 80000000 7e800000 80000000
+divs m eq - 80000000 7e800000 80000000
+divs p eq - 80000000 7e800000 80000000
+divs z eq - 80000000 7e800000 80000000
+divs n eq - 00000000 ff000000 80000000
+divs m eq - 00000000 ff000000 80000000
+divs p eq - 00000000 ff000000 80000000
+divs z eq - 00000000 ff000000 80000000
+divs n eq - 80000000 fe800000 00000000
+divs m eq - 80000000 fe800000 00000000
+divs p eq - 80000000 fe800000 00000000
+divs z eq - 80000000 fe800000 00000000
+divs n eq - 00000000 7effffff 00000000
+divs m eq - 00000000 7effffff 00000000
+divs p eq - 00000000 7effffff 00000000
+divs z eq - 00000000 7effffff 00000000
+divs n eq - 80000000 7e7fffff 80000000
+divs m eq - 80000000 7e7fffff 80000000
+divs p eq - 80000000 7e7fffff 80000000
+divs z eq - 80000000 7e7fffff 80000000
+divs n eq - 00000000 fe7fffff 80000000
+divs m eq - 00000000 fe7fffff 80000000
+divs p eq - 00000000 fe7fffff 80000000
+divs z eq - 00000000 fe7fffff 80000000
+divs n eq - 80000000 feffffff 00000000
+divs m eq - 80000000 feffffff 00000000
+divs p eq - 80000000 feffffff 00000000
+divs z eq - 80000000 feffffff 00000000
+divs n eq d 7f000000 00000000 7f800000
+divs m eq d 7f000000 00000000 7f800000
+divs p eq d 7f000000 00000000 7f800000
+divs z eq d 7f000000 00000000 7f800000
+divs n eq d fe800000 00000000 ff800000
+divs m eq d fe800000 00000000 ff800000
+divs p eq d fe800000 00000000 ff800000
+divs z eq d fe800000 00000000 ff800000
+divs n eq d 7f000000 80000000 ff800000
+divs m eq d 7f000000 80000000 ff800000
+divs p eq d 7f000000 80000000 ff800000
+divs z eq d 7f000000 80000000 ff800000
+divs n eq d fe800000 80000000 7f800000
+divs m eq d fe800000 80000000 7f800000
+divs p eq d fe800000 80000000 7f800000
+divs z eq d fe800000 80000000 7f800000
+divs n eq d 7effffff 00000000 7f800000
+divs m eq d 7effffff 00000000 7f800000
+divs p eq d 7effffff 00000000 7f800000
+divs z eq d 7effffff 00000000 7f800000
+divs n eq d fe7fffff 00000000 ff800000
+divs m eq d fe7fffff 00000000 ff800000
+divs p eq d fe7fffff 00000000 ff800000
+divs z eq d fe7fffff 00000000 ff800000
+divs n eq d 7e7fffff 80000000 ff800000
+divs m eq d 7e7fffff 80000000 ff800000
+divs p eq d 7e7fffff 80000000 ff800000
+divs z eq d 7e7fffff 80000000 ff800000
+divs n eq d feffffff 80000000 7f800000
+divs m eq d feffffff 80000000 7f800000
+divs p eq d feffffff 80000000 7f800000
+divs z eq d feffffff 80000000 7f800000
+divs n eq - 00000000 00800000 00000000
+divs m eq - 00000000 00800000 00000000
+divs p eq - 00000000 00800000 00000000
+divs z eq - 00000000 00800000 00000000
+divs n eq - 80000000 01000000 80000000
+divs m eq - 80000000 01000000 80000000
+divs p eq - 80000000 01000000 80000000
+divs z eq - 80000000 01000000 80000000
+divs n eq - 00000000 81000000 80000000
+divs m eq - 00000000 81000000 80000000
+divs p eq - 00000000 81000000 80000000
+divs z eq - 00000000 81000000 80000000
+divs n eq - 80000000 80800000 00000000
+divs m eq - 80000000 80800000 00000000
+divs p eq - 80000000 80800000 00000000
+divs z eq - 80000000 80800000 00000000
+divs n eq - 00000000 00ffffff 00000000
+divs m eq - 00000000 00ffffff 00000000
+divs p eq - 00000000 00ffffff 00000000
+divs z eq - 00000000 00ffffff 00000000
+divs n eq - 80000000 00800001 80000000
+divs m eq - 80000000 00800001 80000000
+divs p eq - 80000000 00800001 80000000
+divs z eq - 80000000 00800001 80000000
+divs n eq - 00000000 80800001 80000000
+divs m eq - 00000000 80800001 80000000
+divs p eq - 00000000 80800001 80000000
+divs z eq - 00000000 80800001 80000000
+divs n eq - 80000000 80ffffff 00000000
+divs m eq - 80000000 80ffffff 00000000
+divs p eq - 80000000 80ffffff 00000000
+divs z eq - 80000000 80ffffff 00000000
+divs n eq d 00800000 00000000 7f800000
+divs m eq d 00800000 00000000 7f800000
+divs p eq d 00800000 00000000 7f800000
+divs z eq d 00800000 00000000 7f800000
+divs n eq d 81000000 00000000 ff800000
+divs m eq d 81000000 00000000 ff800000
+divs p eq d 81000000 00000000 ff800000
+divs z eq d 81000000 00000000 ff800000
+divs n eq d 01000000 80000000 ff800000
+divs m eq d 01000000 80000000 ff800000
+divs p eq d 01000000 80000000 ff800000
+divs z eq d 01000000 80000000 ff800000
+divs n eq d 80800000 80000000 7f800000
+divs m eq d 80800000 80000000 7f800000
+divs p eq d 80800000 80000000 7f800000
+divs z eq d 80800000 80000000 7f800000
+divs n eq d 00ffffff 00000000 7f800000
+divs m eq d 00ffffff 00000000 7f800000
+divs p eq d 00ffffff 00000000 7f800000
+divs z eq d 00ffffff 00000000 7f800000
+divs n eq d 80800001 00000000 ff800000
+divs m eq d 80800001 00000000 ff800000
+divs p eq d 80800001 00000000 ff800000
+divs z eq d 80800001 00000000 ff800000
+divs n eq d 00800001 80000000 ff800000
+divs m eq d 00800001 80000000 ff800000
+divs p eq d 00800001 80000000 ff800000
+divs z eq d 00800001 80000000 ff800000
+divs n eq d 80ffffff 80000000 7f800000
+divs m eq d 80ffffff 80000000 7f800000
+divs p eq d 80ffffff 80000000 7f800000
+divs z eq d 80ffffff 80000000 7f800000
+divs n eq - 00000000 00000001 00000000
+divs m eq - 00000000 00000001 00000000
+divs p eq - 00000000 00000001 00000000
+divs z eq - 00000000 00000001 00000000
+divs n eq - 80000000 00000003 80000000
+divs m eq - 80000000 00000003 80000000
+divs p eq - 80000000 00000003 80000000
+divs z eq - 80000000 00000003 80000000
+divs n eq - 00000000 80000002 80000000
+divs m eq - 00000000 80000002 80000000
+divs p eq - 00000000 80000002 80000000
+divs z eq - 00000000 80000002 80000000
+divs n eq - 80000000 80000004 00000000
+divs m eq - 80000000 80000004 00000000
+divs p eq - 80000000 80000004 00000000
+divs z eq - 80000000 80000004 00000000
+divs n eq - 00000000 007fffff 00000000
+divs m eq - 00000000 007fffff 00000000
+divs p eq - 00000000 007fffff 00000000
+divs z eq - 00000000 007fffff 00000000
+divs n eq - 80000000 007fffff 80000000
+divs m eq - 80000000 007fffff 80000000
+divs p eq - 80000000 007fffff 80000000
+divs z eq - 80000000 007fffff 80000000
+divs n eq - 00000000 807fffff 80000000
+divs m eq - 00000000 807fffff 80000000
+divs p eq - 00000000 807fffff 80000000
+divs z eq - 00000000 807fffff 80000000
+divs n eq - 80000000 807fffff 00000000
+divs m eq - 80000000 807fffff 00000000
+divs p eq - 80000000 807fffff 00000000
+divs z eq - 80000000 807fffff 00000000
+divs n eq d 00000001 00000000 7f800000
+divs m eq d 00000001 00000000 7f800000
+divs p eq d 00000001 00000000 7f800000
+divs z eq d 00000001 00000000 7f800000
+divs n eq d 80000003 00000000 ff800000
+divs m eq d 80000003 00000000 ff800000
+divs p eq d 80000003 00000000 ff800000
+divs z eq d 80000003 00000000 ff800000
+divs n eq d 00000002 80000000 ff800000
+divs m eq d 00000002 80000000 ff800000
+divs p eq d 00000002 80000000 ff800000
+divs z eq d 00000002 80000000 ff800000
+divs n eq d 80000004 80000000 7f800000
+divs m eq d 80000004 80000000 7f800000
+divs p eq d 80000004 80000000 7f800000
+divs z eq d 80000004 80000000 7f800000
+divs n eq d 007fffff 00000000 7f800000
+divs m eq d 007fffff 00000000 7f800000
+divs p eq d 007fffff 00000000 7f800000
+divs z eq d 007fffff 00000000 7f800000
+divs n eq d 807fffff 00000000 ff800000
+divs m eq d 807fffff 00000000 ff800000
+divs p eq d 807fffff 00000000 ff800000
+divs z eq d 807fffff 00000000 ff800000
+divs n eq d 007fffff 80000000 ff800000
+divs m eq d 007fffff 80000000 ff800000
+divs p eq d 007fffff 80000000 ff800000
+divs z eq d 007fffff 80000000 ff800000
+divs n eq d 807fffff 80000000 7f800000
+divs m eq d 807fffff 80000000 7f800000
+divs p eq d 807fffff 80000000 7f800000
+divs z eq d 807fffff 80000000 7f800000
+divs n eq - 7f000000 40000000 7e800000
+divs m eq - 7f000000 40000000 7e800000
+divs p eq - 7f000000 40000000 7e800000
+divs z eq - 7f000000 40000000 7e800000
+divs n eq - 7f000000 c0000000 fe800000
+divs m eq - 7f000000 c0000000 fe800000
+divs p eq - 7f000000 c0000000 fe800000
+divs z eq - 7f000000 c0000000 fe800000
+divs n eq - feffffff 40000000 fe7fffff
+divs m eq - feffffff 40000000 fe7fffff
+divs p eq - feffffff 40000000 fe7fffff
+divs z eq - feffffff 40000000 fe7fffff
+divs n eq - 7efffffd c0000000 fe7ffffd
+divs m eq - 7efffffd c0000000 fe7ffffd
+divs p eq - 7efffffd c0000000 fe7ffffd
+divs z eq - 7efffffd c0000000 fe7ffffd
+divs n eq - 7f7fffff 7effffff 40000000
+divs m eq - 7f7fffff 7effffff 40000000
+divs p eq - 7f7fffff 7effffff 40000000
+divs z eq - 7f7fffff 7effffff 40000000
+divs n eq - 7f7fffff c0000000 feffffff
+divs m eq - 7f7fffff c0000000 feffffff
+divs p eq - 7f7fffff c0000000 feffffff
+divs z eq - 7f7fffff c0000000 feffffff
+divs n eq - ff000001 7e800001 c0000000
+divs m eq - ff000001 7e800001 c0000000
+divs p eq - ff000001 7e800001 c0000000
+divs z eq - ff000001 7e800001 c0000000
+divs n eq - 7f000003 fe800003 c0000000
+divs m eq - 7f000003 fe800003 c0000000
+divs p eq - 7f000003 fe800003 c0000000
+divs z eq - 7f000003 fe800003 c0000000
+divs n eq - 7f7fffff 7e7fffff 40800000
+divs m eq - 7f7fffff 7e7fffff 40800000
+divs p eq - 7f7fffff 7e7fffff 40800000
+divs z eq - 7f7fffff 7e7fffff 40800000
+divs n eq - ff7fffff 7e7fffff c0800000
+divs m eq - ff7fffff 7e7fffff c0800000
+divs p eq - ff7fffff 7e7fffff c0800000
+divs z eq - ff7fffff 7e7fffff c0800000
+divs n eq - 7f7fffff fe7fffff c0800000
+divs m eq - 7f7fffff fe7fffff c0800000
+divs p eq - 7f7fffff fe7fffff c0800000
+divs z eq - 7f7fffff fe7fffff c0800000
+divs n eq - ff7fffff fe7fffff 40800000
+divs m eq - ff7fffff fe7fffff 40800000
+divs p eq - ff7fffff fe7fffff 40800000
+divs z eq - ff7fffff fe7fffff 40800000
+divs n eq - 7f7ffffd 40800000 7e7ffffd
+divs m eq - 7f7ffffd 40800000 7e7ffffd
+divs p eq - 7f7ffffd 40800000 7e7ffffd
+divs z eq - 7f7ffffd 40800000 7e7ffffd
+divs n eq - 7f7ffffd c0800000 fe7ffffd
+divs m eq - 7f7ffffd c0800000 fe7ffffd
+divs p eq - 7f7ffffd c0800000 fe7ffffd
+divs z eq - 7f7ffffd c0800000 fe7ffffd
+divs n eq - ff7ffffd 40800000 fe7ffffd
+divs m eq - ff7ffffd 40800000 fe7ffffd
+divs p eq - ff7ffffd 40800000 fe7ffffd
+divs z eq - ff7ffffd 40800000 fe7ffffd
+divs n eq - ff7ffffd c0800000 7e7ffffd
+divs m eq - ff7ffffd c0800000 7e7ffffd
+divs p eq - ff7ffffd c0800000 7e7ffffd
+divs z eq - ff7ffffd c0800000 7e7ffffd
+divs n eq - 01000000 00800000 40000000
+divs m eq - 01000000 00800000 40000000
+divs p eq - 01000000 00800000 40000000
+divs z eq - 01000000 00800000 40000000
+divs n eq - 01000000 c0000000 80800000
+divs m eq - 01000000 c0000000 80800000
+divs p eq - 01000000 c0000000 80800000
+divs z eq - 01000000 c0000000 80800000
+divs n eq - 81000001 00800001 c0000000
+divs m eq - 81000001 00800001 c0000000
+divs p eq - 81000001 00800001 c0000000
+divs z eq - 81000001 00800001 c0000000
+divs n eq - 01000003 c0000000 80800003
+divs m eq - 01000003 c0000000 80800003
+divs p eq - 01000003 c0000000 80800003
+divs z eq - 01000003 c0000000 80800003
+divs n eq - 01000001 00800001 40000000
+divs m eq - 01000001 00800001 40000000
+divs p eq - 01000001 00800001 40000000
+divs z eq - 01000001 00800001 40000000
+divs n eq - 01000001 c0000000 80800001
+divs m eq - 01000001 c0000000 80800001
+divs p eq - 01000001 c0000000 80800001
+divs z eq - 01000001 c0000000 80800001
+divs n eq - 81000005 00800005 c0000000
+divs m eq - 81000005 00800005 c0000000
+divs p eq - 81000005 00800005 c0000000
+divs z eq - 81000005 00800005 c0000000
+divs n eq - 01000003 80800003 c0000000
+divs m eq - 01000003 80800003 c0000000
+divs p eq - 01000003 80800003 c0000000
+divs z eq - 01000003 80800003 c0000000
+divs n eq - 007fffff 3f000000 00fffffe
+divs m eq - 007fffff 3f000000 00fffffe
+divs p eq - 007fffff 3f000000 00fffffe
+divs z eq - 007fffff 3f000000 00fffffe
+divs n eq - 007fffff 3b000000 04fffffe
+divs m eq - 007fffff 3b000000 04fffffe
+divs p eq - 007fffff 3b000000 04fffffe
+divs z eq - 007fffff 3b000000 04fffffe
+divs n eq xo 7f000000 3f000000 7f800000
+divs p eq xo 7f000000 3f000000 7f800000
+divs z eq xo 7f000000 3f000000 7f7fffff
+divs m eq xo 7f000000 3f000000 7f7fffff
+divs n eq xo ff000000 bf000000 7f800000
+divs p eq xo ff000000 bf000000 7f800000
+divs z eq xo ff000000 bf000000 7f7fffff
+divs m eq xo ff000000 bf000000 7f7fffff
+divs n eq xo 7f000000 bf000000 ff800000
+divs m eq xo 7f000000 bf000000 ff800000
+divs n eq xo ff000000 3f000000 ff800000
+divs m eq xo ff000000 3f000000 ff800000
+divs z eq xo 7f000000 bf000000 ff7fffff
+divs p eq xo 7f000000 bf000000 ff7fffff
+divs z eq xo ff000000 3f000000 ff7fffff
+divs p eq xo ff000000 3f000000 ff7fffff
+divs n eq xo 7b000000 05000000 7f800000
+divs p eq xo 7b000000 05000000 7f800000
+divs z eq xo 7b000000 05000000 7f7fffff
+divs m eq xo 7b000000 05000000 7f7fffff
+divs n eq xo 7f7fffff 00000001 7f800000
+divs p eq xo 7f7fffff 00000001 7f800000
+divs z eq xo 7f7fffff 00000001 7f7fffff
+divs m eq xo 7f7fffff 00000001 7f7fffff
+divs n eq xo 7f000000 007fffff 7f800000
+divs p eq xo 7f000000 007fffff 7f800000
+divs z eq xo 7f000000 007fffff 7f7fffff
+divs m eq xo 7f000000 007fffff 7f7fffff
+divs n eq xo 7f7fffff 3f7fffff 7f800000
+divs p eq xo 7f7fffff 3f7fffff 7f800000
+divs z eq xo 7f7fffff 3f7fffff 7f7fffff
+divs m eq xo 7f7fffff 3f7fffff 7f7fffff
+divs n eq xu 00800000 3f800001 007fffff
+divs z eq xu 00800000 3f800001 007fffff
+divs m eq xu 00800000 3f800001 007fffff
+divs n eq xu 80800000 3f800001 807fffff
+divs z eq xu 80800000 3f800001 807fffff
+divs p eq xu 80800000 3f800001 807fffff
+divs p eq xu 007ffffe 3f7ffffe 007fffff
+divs n eq xu 007ffffe 3f7ffffe 007fffff
+divs p eq xu 007ffff7 3f7ffffe 007ffff8
+divs n eq xu 007ffff7 3f7ffffe 007ffff8
+divs m eq xu 807ffff8 3f7ffffe 807ffff9
+divs n eq xu 807ffff8 3f7ffffe 807ffff9
+divs m eq xu 00800001 3f800002 007fffff
+divs n eq xu 00800001 3f800002 007fffff
+divs z eq xu 00800001 3f800002 007fffff
+divs m eq xu 007fffff 3f800002 007ffffd
+divs n eq xu 007fffff 3f800002 007ffffd
+divs z eq xu 007fffff 3f800002 007ffffd
+divs m eq xu 00800002 3f800006 007ffffc
+divs n eq xu 00800002 3f800006 007ffffc
+divs z eq xu 00800002 3f800006 007ffffc
+divs z eq xu 007fffff 3f800001 007ffffe
+divs m eq xu 007fffff 3f800001 007ffffe
+divs n eq xu 00000001 7f7fffff 00000000
+divs m eq xu 00000001 7f7fffff 00000000
+divs z eq xu 00000001 7f7fffff 00000000
+divs p eq xu 00000001 7f7fffff 00000001
+divs n eq xu 80000001 ff7fffff 00000000
+divs m eq xu 80000001 ff7fffff 00000000
+divs z eq xu 80000001 ff7fffff 00000000
+divs p eq xu 80000001 ff7fffff 00000001
+divs n eq xu 00000001 ff7fffff 80000000
+divs z eq xu 00000001 ff7fffff 80000000
+divs p eq xu 00000001 ff7fffff 80000000
+divs m eq xu 00000001 ff7fffff 80000001
+divs n eq xu 80000001 7f7fffff 80000000
+divs z eq xu 80000001 7f7fffff 80000000
+divs p eq xu 80000001 7f7fffff 80000000
+divs m eq xu 80000001 7f7fffff 80000001
+divs p eq xu 00000001 40000000 00000001
+divs n eq xu 00000001 40000000 00000000
+divs z eq xu 00000001 40000000 00000000
+divs m eq xu 00000001 40000000 00000000
+divs p eq xu 80000001 c0000000 00000001
+divs n eq xu 80000001 c0000000 00000000
+divs z eq xu 80000001 c0000000 00000000
+divs m eq xu 80000001 c0000000 00000000
+divs m eq xu 00000001 c0000000 80000001
+divs n eq xu 00000001 c0000000 80000000
+divs z eq xu 00000001 c0000000 80000000
+divs p eq xu 00000001 c0000000 80000000
+divs m eq xu 80000001 40000000 80000001
+divs n eq xu 80000001 40000000 80000000
+divs z eq xu 80000001 40000000 80000000
+divs p eq xu 80000001 40000000 80000000
+divs z eq xu 00ffffff 40000000 007fffff
+divs m eq xu 00ffffff 40000000 007fffff
+divs z eq xu 00ffffff c0000000 807fffff
+divs p eq xu 00ffffff c0000000 807fffff
+divs p eq xu 00ffffff 40000000 00800000
+divs n eq xu 00ffffff 40000000 00800000
+divs p eq xu 00800000 3f800001 00800000
+divs m eq xu 80800000 3f800001 80800000
+divs p eq xu 00800001 3f800002 00800000
+divs p eq xu 007fffff 3f800002 007ffffe
+divs n eq - 00fffffe 40000000 007fffff
+divs m eq - 00fffffe 40000000 007fffff
+divs p eq - 00fffffe 40000000 007fffff
+divs z eq - 00fffffe 40000000 007fffff
+divs n eq - 007fffff 3f800000 007fffff
+divs m eq - 007fffff 3f800000 007fffff
+divs p eq - 007fffff 3f800000 007fffff
+divs z eq - 007fffff 3f800000 007fffff
+divs n eq - 00000001 3f000000 00000002
+divs m eq - 00000001 3f000000 00000002
+divs p eq - 00000001 3f000000 00000002
+divs z eq - 00000001 3f000000 00000002
+divs n eq - 00000001 3e000000 00000008
+divs m eq - 00000001 3e000000 00000008
+divs p eq - 00000001 3e000000 00000008
+divs z eq - 00000001 3e000000 00000008
+divs n eq - 00000009 41100000 00000001
+divs m eq - 00000009 41100000 00000001
+divs p eq - 00000009 41100000 00000001
+divs z eq - 00000009 41100000 00000001
+divs n eq - 00000009 c1100000 80000001
+divs m eq - 00000009 c1100000 80000001
+divs p eq - 00000009 c1100000 80000001
+divs z eq - 00000009 c1100000 80000001
+divs n eq - 007fffff bf800000 807fffff
+divs m eq - 007fffff bf800000 807fffff
+divs p eq - 007fffff bf800000 807fffff
+divs z eq - 007fffff bf800000 807fffff
+divs n eq - 80000001 3f000000 80000002
+divs m eq - 80000001 3f000000 80000002
+divs p eq - 80000001 3f000000 80000002
+divs z eq - 80000001 3f000000 80000002
+divs n eq x 3f800000 3f800001 3f7ffffe
+divs z eq x 3f800000 3f800001 3f7ffffe
+divs m eq x 3f800000 3f800001 3f7ffffe
+divs p eq x 3f800000 3f800001 3f7fffff
+divs n eq x 3f800000 3f800002 3f7ffffc
+divs z eq x 3f800000 3f800002 3f7ffffc
+divs m eq x 3f800000 3f800002 3f7ffffc
+divs p eq x 3f800000 3f800002 3f7ffffd
+divs n eq x 3f800000 3f800003 3f7ffffa
+divs z eq x 3f800000 3f800003 3f7ffffa
+divs m eq x 3f800000 3f800003 3f7ffffa
+divs p eq x 3f800000 3f800003 3f7ffffb
+divs n eq x 3f800000 3f800004 3f7ffff8
+divs z eq x 3f800000 3f800004 3f7ffff8
+divs m eq x 3f800000 3f800004 3f7ffff8
+divs p eq x 3f800000 3f800004 3f7ffff9
+divs n eq x 3f800000 3f7fffff 3f800001
+divs z eq x 3f800000 3f7fffff 3f800000
+divs m eq x 3f800000 3f7fffff 3f800000
+divs p eq x 3f800000 3f7fffff 3f800001
+divs n eq x 3f800000 3f7ffffe 3f800001
+divs z eq x 3f800000 3f7ffffe 3f800001
+divs m eq x 3f800000 3f7ffffe 3f800001
+divs p eq x 3f800000 3f7ffffe 3f800002
+divs n eq x 3f800000 3f7ffffd 3f800002
+divs z eq x 3f800000 3f7ffffd 3f800001
+divs m eq x 3f800000 3f7ffffd 3f800001
+divs p eq x 3f800000 3f7ffffd 3f800002
+divs n eq x 3f800000 3f7ffffc 3f800002
+divs z eq x 3f800000 3f7ffffc 3f800002
+divs m eq x 3f800000 3f7ffffc 3f800002
+divs p eq x 3f800000 3f7ffffc 3f800003
+divs n eq x 3f800000 3f7ffffb 3f800003
+divs z eq x 3f800000 3f7ffffb 3f800002
+divs m eq x 3f800000 3f7ffffb 3f800002
+divs p eq x 3f800000 3f7ffffb 3f800003
+divs n eq x 3f800000 3f7ffff8 3f800004
+divs z eq x 3f800000 3f7ffff8 3f800004
+divs m eq x 3f800000 3f7ffff8 3f800004
+divs p eq x 3f800000 3f7ffff8 3f800005
+divs n eq x 3f800000 3f7ffff7 3f800005
+divs z eq x 3f800000 3f7ffff7 3f800004
+divs m eq x 3f800000 3f7ffff7 3f800004
+divs p eq x 3f800000 3f7ffff7 3f800005
+divs n eq x 3f800002 3f800001 3f800001
+divs z eq x 3f800002 3f800001 3f800000
+divs m eq x 3f800002 3f800001 3f800000
+divs p eq x 3f800002 3f800001 3f800001
+divs n eq x 3f800003 3f800001 3f800002
+divs z eq x 3f800003 3f800001 3f800001
+divs m eq x 3f800003 3f800001 3f800001
+divs p eq x 3f800003 3f800001 3f800002
+divs n eq x 3f800004 3f800001 3f800003
+divs z eq x 3f800004 3f800001 3f800002
+divs m eq x 3f800004 3f800001 3f800002
+divs p eq x 3f800004 3f800001 3f800003
+divs n eq x 3f800007 3f800002 3f800005
+divs z eq x 3f800007 3f800002 3f800004
+divs m eq x 3f800007 3f800002 3f800004
+divs p eq x 3f800007 3f800002 3f800005
+divs n eq x 3f800009 3f800008 3f800001
+divs z eq x 3f800009 3f800008 3f800000
+divs m eq x 3f800009 3f800008 3f800000
+divs p eq x 3f800009 3f800008 3f800001
+divs n eq x 3f800001 3f800002 3f7ffffe
+divs z eq x 3f800001 3f800002 3f7ffffe
+divs m eq x 3f800001 3f800002 3f7ffffe
+divs p eq x 3f800001 3f800002 3f7fffff
+divs n eq x 3f800001 3f800003 3f7ffffc
+divs z eq x 3f800001 3f800003 3f7ffffc
+divs m eq x 3f800001 3f800003 3f7ffffc
+divs p eq x 3f800001 3f800003 3f7ffffd
+divs n eq x 3f800002 3f800003 3f7ffffe
+divs z eq x 3f800002 3f800003 3f7ffffe
+divs m eq x 3f800002 3f800003 3f7ffffe
+divs p eq x 3f800002 3f800003 3f7fffff
+divs n eq x 3f800004 3f800007 3f7ffffa
+divs z eq x 3f800004 3f800007 3f7ffffa
+divs m eq x 3f800004 3f800007 3f7ffffa
+divs p eq x 3f800004 3f800007 3f7ffffb
+divs n eq x 3f800006 3f800008 3f7ffffc
+divs z eq x 3f800006 3f800008 3f7ffffc
+divs m eq x 3f800006 3f800008 3f7ffffc
+divs p eq x 3f800006 3f800008 3f7ffffd
+divs n eq x 3f7ffffe 3f7fffff 3f7fffff
+divs z eq x 3f7ffffe 3f7fffff 3f7ffffe
+divs m eq x 3f7ffffe 3f7fffff 3f7ffffe
+divs p eq x 3f7ffffe 3f7fffff 3f7fffff
+divs n eq x 3f7ffffd 3f7fffff 3f7ffffe
+divs z eq x 3f7ffffd 3f7fffff 3f7ffffd
+divs m eq x 3f7ffffd 3f7fffff 3f7ffffd
+divs p eq x 3f7ffffd 3f7fffff 3f7ffffe
+divs n eq x 3f7ffffd 3f7ffffe 3f7fffff
+divs z eq x 3f7ffffd 3f7ffffe 3f7ffffe
+divs m eq x 3f7ffffd 3f7ffffe 3f7ffffe
+divs p eq x 3f7ffffd 3f7ffffe 3f7fffff
+divs n eq x 3f7ffffc 3f7fffff 3f7ffffd
+divs z eq x 3f7ffffc 3f7fffff 3f7ffffc
+divs m eq x 3f7ffffc 3f7fffff 3f7ffffc
+divs p eq x 3f7ffffc 3f7fffff 3f7ffffd
+divs n eq x 3f7ffffc 3f7ffffe 3f7ffffe
+divs z eq x 3f7ffffc 3f7ffffe 3f7ffffd
+divs m eq x 3f7ffffc 3f7ffffe 3f7ffffd
+divs p eq x 3f7ffffc 3f7ffffe 3f7ffffe
+divs n eq x 3f7ffffc 3f7ffffd 3f7fffff
+divs z eq x 3f7ffffc 3f7ffffd 3f7ffffe
+divs m eq x 3f7ffffc 3f7ffffd 3f7ffffe
+divs p eq x 3f7ffffc 3f7ffffd 3f7fffff
+divs n eq x 3f7ffff8 3f7ffffd 3f7ffffb
+divs z eq x 3f7ffff8 3f7ffffd 3f7ffffa
+divs m eq x 3f7ffff8 3f7ffffd 3f7ffffa
+divs p eq x 3f7ffff8 3f7ffffd 3f7ffffb
+divs n eq x 3f7ffff7 3f7ffffe 3f7ffff9
+divs z eq x 3f7ffff7 3f7ffffe 3f7ffff8
+divs m eq x 3f7ffff7 3f7ffffe 3f7ffff8
+divs p eq x 3f7ffff7 3f7ffffe 3f7ffff9
+divs n eq x 3f7ffff8 3f7ffffc 3f7ffffc
+divs z eq x 3f7ffff8 3f7ffffc 3f7ffffb
+divs m eq x 3f7ffff8 3f7ffffc 3f7ffffb
+divs p eq x 3f7ffff8 3f7ffffc 3f7ffffc
+divs n eq x 3f7ffff7 3f7ffffb 3f7ffffc
+divs z eq x 3f7ffff7 3f7ffffb 3f7ffffb
+divs m eq x 3f7ffff7 3f7ffffb 3f7ffffb
+divs p eq x 3f7ffff7 3f7ffffb 3f7ffffc
+divs n eq x 3f7fffff 3f7ffffe 3f800001
+divs z eq x 3f7fffff 3f7ffffe 3f800000
+divs m eq x 3f7fffff 3f7ffffe 3f800000
+divs p eq x 3f7fffff 3f7ffffe 3f800001
+divs n eq x 3f7fffff 3f7ffffd 3f800001
+divs z eq x 3f7fffff 3f7ffffd 3f800001
+divs m eq x 3f7fffff 3f7ffffd 3f800001
+divs p eq x 3f7fffff 3f7ffffd 3f800002
+divs n eq x 3f7ffffe 3f7ffffd 3f800001
+divs z eq x 3f7ffffe 3f7ffffd 3f800000
+divs m eq x 3f7ffffe 3f7ffffd 3f800000
+divs p eq x 3f7ffffe 3f7ffffd 3f800001
+divs n eq x 3f7fffff 3f7ffffc 3f800002
+divs z eq x 3f7fffff 3f7ffffc 3f800001
+divs m eq x 3f7fffff 3f7ffffc 3f800001
+divs p eq x 3f7fffff 3f7ffffc 3f800002
+divs n eq x 3f7ffffe 3f7ffffc 3f800001
+divs z eq x 3f7ffffe 3f7ffffc 3f800001
+divs m eq x 3f7ffffe 3f7ffffc 3f800001
+divs p eq x 3f7ffffe 3f7ffffc 3f800002
+divs n eq x 3f7ffffd 3f7ffffc 3f800001
+divs z eq x 3f7ffffd 3f7ffffc 3f800000
+divs m eq x 3f7ffffd 3f7ffffc 3f800000
+divs p eq x 3f7ffffd 3f7ffffc 3f800001
+divs n eq x 3f7fffff 3f7ffff9 3f800003
+divs z eq x 3f7fffff 3f7ffff9 3f800003
+divs m eq x 3f7fffff 3f7ffff9 3f800003
+divs p eq x 3f7fffff 3f7ffff9 3f800004
+divs n eq x 3f7ffffe 3f7ffff9 3f800003
+divs z eq x 3f7ffffe 3f7ffff9 3f800002
+divs m eq x 3f7ffffe 3f7ffff9 3f800002
+divs p eq x 3f7ffffe 3f7ffff9 3f800003
+divs n eq x 3f7ffffd 3f7ffff9 3f800002
+divs z eq x 3f7ffffd 3f7ffff9 3f800002
+divs m eq x 3f7ffffd 3f7ffff9 3f800002
+divs p eq x 3f7ffffd 3f7ffff9 3f800003
+divs n eq x 3f7ffffc 3f7ffff9 3f800002
+divs z eq x 3f7ffffc 3f7ffff9 3f800001
+divs m eq x 3f7ffffc 3f7ffff9 3f800001
+divs p eq x 3f7ffffc 3f7ffff9 3f800002
+divs n eq x 3f7ffffb 3f7ffff9 3f800001
+divs z eq x 3f7ffffb 3f7ffff9 3f800001
+divs m eq x 3f7ffffb 3f7ffff9 3f800001
+divs p eq x 3f7ffffb 3f7ffff9 3f800002
+divs n eq x 3f7ffffa 3f7ffff9 3f800001
+divs z eq x 3f7ffffa 3f7ffff9 3f800000
+divs m eq x 3f7ffffa 3f7ffff9 3f800000
+divs p eq x 3f7ffffa 3f7ffff9 3f800001
+divs n eq x 3f800001 3f7fffff 3f800002
+divs z eq x 3f800001 3f7fffff 3f800001
+divs m eq x 3f800001 3f7fffff 3f800001
+divs p eq x 3f800001 3f7fffff 3f800002
+divs n eq x 3f800001 3f7ffffe 3f800002
+divs z eq x 3f800001 3f7ffffe 3f800002
+divs m eq x 3f800001 3f7ffffe 3f800002
+divs p eq x 3f800001 3f7ffffe 3f800003
+divs n eq x 3f800002 3f7fffff 3f800003
+divs z eq x 3f800002 3f7fffff 3f800002
+divs m eq x 3f800002 3f7fffff 3f800002
+divs p eq x 3f800002 3f7fffff 3f800003
+divs n eq x 3f800001 3f7ffffd 3f800003
+divs z eq x 3f800001 3f7ffffd 3f800002
+divs m eq x 3f800001 3f7ffffd 3f800002
+divs p eq x 3f800001 3f7ffffd 3f800003
+divs n eq x 3f800003 3f7fffff 3f800004
+divs z eq x 3f800003 3f7fffff 3f800003
+divs m eq x 3f800003 3f7fffff 3f800003
+divs p eq x 3f800003 3f7fffff 3f800004
+divs n eq x 3f800002 3f7ffffe 3f800003
+divs z eq x 3f800002 3f7ffffe 3f800003
+divs m eq x 3f800002 3f7ffffe 3f800003
+divs p eq x 3f800002 3f7ffffe 3f800004
+divs n eq x 3f800003 3f7ffffe 3f800004
+divs z eq x 3f800003 3f7ffffe 3f800004
+divs m eq x 3f800003 3f7ffffe 3f800004
+divs p eq x 3f800003 3f7ffffe 3f800005
+divs n eq x 3f800002 3f7ffffd 3f800004
+divs z eq x 3f800002 3f7ffffd 3f800003
+divs m eq x 3f800002 3f7ffffd 3f800003
+divs p eq x 3f800002 3f7ffffd 3f800004
+divs n eq x 3f800003 3f7ffffd 3f800005
+divs z eq x 3f800003 3f7ffffd 3f800004
+divs m eq x 3f800003 3f7ffffd 3f800004
+divs p eq x 3f800003 3f7ffffd 3f800005
+divs n eq x 3f800001 3f7ffffb 3f800004
+divs z eq x 3f800001 3f7ffffb 3f800003
+divs m eq x 3f800001 3f7ffffb 3f800003
+divs p eq x 3f800001 3f7ffffb 3f800004
+divs n eq x 3f800005 3f7fffff 3f800006
+divs z eq x 3f800005 3f7fffff 3f800005
+divs m eq x 3f800005 3f7fffff 3f800005
+divs p eq x 3f800005 3f7fffff 3f800006
+divs n eq x 3f800002 3f7ffffc 3f800004
+divs z eq x 3f800002 3f7ffffc 3f800004
+divs m eq x 3f800002 3f7ffffc 3f800004
+divs p eq x 3f800002 3f7ffffc 3f800005
+divs n eq x 3f800004 3f7ffffe 3f800005
+divs z eq x 3f800004 3f7ffffe 3f800005
+divs m eq x 3f800004 3f7ffffe 3f800005
+divs p eq x 3f800004 3f7ffffe 3f800006
+divs n eq x 3f7fffff 3f800001 3f7ffffd
+divs z eq x 3f7fffff 3f800001 3f7ffffd
+divs m eq x 3f7fffff 3f800001 3f7ffffd
+divs p eq x 3f7fffff 3f800001 3f7ffffe
+divs n eq x 3f7ffffe 3f800001 3f7ffffc
+divs z eq x 3f7ffffe 3f800001 3f7ffffc
+divs m eq x 3f7ffffe 3f800001 3f7ffffc
+divs p eq x 3f7ffffe 3f800001 3f7ffffd
+divs n eq x 3f7fffff 3f800002 3f7ffffb
+divs z eq x 3f7fffff 3f800002 3f7ffffb
+divs m eq x 3f7fffff 3f800002 3f7ffffb
+divs p eq x 3f7fffff 3f800002 3f7ffffc
+divs n eq x 3f7ffffd 3f800001 3f7ffffb
+divs z eq x 3f7ffffd 3f800001 3f7ffffb
+divs m eq x 3f7ffffd 3f800001 3f7ffffb
+divs p eq x 3f7ffffd 3f800001 3f7ffffc
+divs n eq x 3f7fffff 3f800003 3f7ffff9
+divs z eq x 3f7fffff 3f800003 3f7ffff9
+divs m eq x 3f7fffff 3f800003 3f7ffff9
+divs p eq x 3f7fffff 3f800003 3f7ffffa
+divs n eq x 3f7ffffe 3f800002 3f7ffffa
+divs z eq x 3f7ffffe 3f800002 3f7ffffa
+divs m eq x 3f7ffffe 3f800002 3f7ffffa
+divs p eq x 3f7ffffe 3f800002 3f7ffffb
+divs n eq x 3f7ffffc 3f800001 3f7ffffa
+divs z eq x 3f7ffffc 3f800001 3f7ffffa
+divs m eq x 3f7ffffc 3f800001 3f7ffffa
+divs p eq x 3f7ffffc 3f800001 3f7ffffb
+divs n eq x 3f7fffff 3f800004 3f7ffff7
+divs z eq x 3f7fffff 3f800004 3f7ffff7
+divs m eq x 3f7fffff 3f800004 3f7ffff7
+divs p eq x 3f7fffff 3f800004 3f7ffff8
+divs n eq x 3f7ffffd 3f800002 3f7ffff9
+divs z eq x 3f7ffffd 3f800002 3f7ffff9
+divs m eq x 3f7ffffd 3f800002 3f7ffff9
+divs p eq x 3f7ffffd 3f800002 3f7ffffa
+divs n eq x 3f7ffffe 3f800003 3f7ffff8
+divs z eq x 3f7ffffe 3f800003 3f7ffff8
+divs m eq x 3f7ffffe 3f800003 3f7ffff8
+divs p eq x 3f7ffffe 3f800003 3f7ffff9
+divs n uo - 7fff0000 00000000 7fff0000
+divs m uo - 7fff0000 00000000 7fff0000
+divs p uo - 7fff0000 00000000 7fff0000
+divs z uo - 7fff0000 00000000 7fff0000
+divs n uo - 7fff0000 80000000 7fff0000
+divs m uo - 7fff0000 80000000 7fff0000
+divs p uo - 7fff0000 80000000 7fff0000
+divs z uo - 7fff0000 80000000 7fff0000
+divs n uo - 00000000 7fff0000 7fff0000
+divs m uo - 00000000 7fff0000 7fff0000
+divs p uo - 00000000 7fff0000 7fff0000
+divs z uo - 00000000 7fff0000 7fff0000
+divs n uo - 80000000 7fff0000 7fff0000
+divs m uo - 80000000 7fff0000 7fff0000
+divs p uo - 80000000 7fff0000 7fff0000
+divs z uo - 80000000 7fff0000 7fff0000
+divs n uo - 7fff0000 3f800000 7fff0000
+divs m uo - 7fff0000 3f800000 7fff0000
+divs p uo - 7fff0000 3f800000 7fff0000
+divs z uo - 7fff0000 3f800000 7fff0000
+divs n uo - 7fff0000 bf800000 7fff0000
+divs m uo - 7fff0000 bf800000 7fff0000
+divs p uo - 7fff0000 bf800000 7fff0000
+divs z uo - 7fff0000 bf800000 7fff0000
+divs n uo - 3f800000 7fff0000 7fff0000
+divs m uo - 3f800000 7fff0000 7fff0000
+divs p uo - 3f800000 7fff0000 7fff0000
+divs z uo - 3f800000 7fff0000 7fff0000
+divs n uo - bf800000 7fff0000 7fff0000
+divs m uo - bf800000 7fff0000 7fff0000
+divs p uo - bf800000 7fff0000 7fff0000
+divs z uo - bf800000 7fff0000 7fff0000
+divs n uo - 007fffff 7fff0000 7fff0000
+divs m uo - 007fffff 7fff0000 7fff0000
+divs p uo - 007fffff 7fff0000 7fff0000
+divs z uo - 007fffff 7fff0000 7fff0000
+divs n uo - 807fffff 7fff0000 7fff0000
+divs m uo - 807fffff 7fff0000 7fff0000
+divs p uo - 807fffff 7fff0000 7fff0000
+divs z uo - 807fffff 7fff0000 7fff0000
+divs n uo - 7fff0000 007fffff 7fff0000
+divs m uo - 7fff0000 007fffff 7fff0000
+divs p uo - 7fff0000 007fffff 7fff0000
+divs z uo - 7fff0000 007fffff 7fff0000
+divs n uo - 7fff0000 807fffff 7fff0000
+divs m uo - 7fff0000 807fffff 7fff0000
+divs p uo - 7fff0000 807fffff 7fff0000
+divs z uo - 7fff0000 807fffff 7fff0000
+divs n uo - 7fff0000 00000001 7fff0000
+divs m uo - 7fff0000 00000001 7fff0000
+divs p uo - 7fff0000 00000001 7fff0000
+divs z uo - 7fff0000 00000001 7fff0000
+divs n uo - 7fff0000 80000001 7fff0000
+divs m uo - 7fff0000 80000001 7fff0000
+divs p uo - 7fff0000 80000001 7fff0000
+divs z uo - 7fff0000 80000001 7fff0000
+divs n uo - 00000001 7fff0000 7fff0000
+divs m uo - 00000001 7fff0000 7fff0000
+divs p uo - 00000001 7fff0000 7fff0000
+divs z uo - 00000001 7fff0000 7fff0000
+divs n uo - 80000001 7fff0000 7fff0000
+divs m uo - 80000001 7fff0000 7fff0000
+divs p uo - 80000001 7fff0000 7fff0000
+divs z uo - 80000001 7fff0000 7fff0000
+divs n uo - 7fff0000 7f7fffff 7fff0000
+divs m uo - 7fff0000 7f7fffff 7fff0000
+divs p uo - 7fff0000 7f7fffff 7fff0000
+divs z uo - 7fff0000 7f7fffff 7fff0000
+divs n uo - 7fff0000 ff7fffff 7fff0000
+divs m uo - 7fff0000 ff7fffff 7fff0000
+divs p uo - 7fff0000 ff7fffff 7fff0000
+divs z uo - 7fff0000 ff7fffff 7fff0000
+divs n uo - 7f7fffff 7fff0000 7fff0000
+divs m uo - 7f7fffff 7fff0000 7fff0000
+divs p uo - 7f7fffff 7fff0000 7fff0000
+divs z uo - 7f7fffff 7fff0000 7fff0000
+divs n uo - ff7fffff 7fff0000 7fff0000
+divs m uo - ff7fffff 7fff0000 7fff0000
+divs p uo - ff7fffff 7fff0000 7fff0000
+divs z uo - ff7fffff 7fff0000 7fff0000
+divs n uo - 7fff0000 7f800000 7fff0000
+divs m uo - 7fff0000 7f800000 7fff0000
+divs p uo - 7fff0000 7f800000 7fff0000
+divs z uo - 7fff0000 7f800000 7fff0000
+divs n uo - 7fff0000 ff800000 7fff0000
+divs m uo - 7fff0000 ff800000 7fff0000
+divs p uo - 7fff0000 ff800000 7fff0000
+divs z uo - 7fff0000 ff800000 7fff0000
+divs n uo - 7f800000 7fff0000 7fff0000
+divs m uo - 7f800000 7fff0000 7fff0000
+divs p uo - 7f800000 7fff0000 7fff0000
+divs z uo - 7f800000 7fff0000 7fff0000
+divs n uo - ff800000 7fff0000 7fff0000
+divs m uo - ff800000 7fff0000 7fff0000
+divs p uo - ff800000 7fff0000 7fff0000
+divs z uo - ff800000 7fff0000 7fff0000
+divs n uo - 7fff0000 7fff0000 7fff0000
+divs m uo - 7fff0000 7fff0000 7fff0000
+divs p uo - 7fff0000 7fff0000 7fff0000
+divs z uo - 7fff0000 7fff0000 7fff0000
+divs n uo v 7f810000 00000000 7fff0000
+divs m uo v 7f810000 00000000 7fff0000
+divs p uo v 7f810000 00000000 7fff0000
+divs z uo v 7f810000 00000000 7fff0000
+divs n uo v 7f810000 80000000 7fff0000
+divs m uo v 7f810000 80000000 7fff0000
+divs p uo v 7f810000 80000000 7fff0000
+divs z uo v 7f810000 80000000 7fff0000
+divs n uo v 00000000 7f810000 7fff0000
+divs m uo v 00000000 7f810000 7fff0000
+divs p uo v 00000000 7f810000 7fff0000
+divs z uo v 00000000 7f810000 7fff0000
+divs n uo v 80000000 7f810000 7fff0000
+divs m uo v 80000000 7f810000 7fff0000
+divs p uo v 80000000 7f810000 7fff0000
+divs z uo v 80000000 7f810000 7fff0000
+divs n uo v 7f810000 3f800000 7fff0000
+divs m uo v 7f810000 3f800000 7fff0000
+divs p uo v 7f810000 3f800000 7fff0000
+divs z uo v 7f810000 3f800000 7fff0000
+divs n uo v 7f810000 bf800000 7fff0000
+divs m uo v 7f810000 bf800000 7fff0000
+divs p uo v 7f810000 bf800000 7fff0000
+divs z uo v 7f810000 bf800000 7fff0000
+divs n uo v 3f800000 7f810000 7fff0000
+divs m uo v 3f800000 7f810000 7fff0000
+divs p uo v 3f800000 7f810000 7fff0000
+divs z uo v 3f800000 7f810000 7fff0000
+divs n uo v bf800000 7f810000 7fff0000
+divs m uo v bf800000 7f810000 7fff0000
+divs p uo v bf800000 7f810000 7fff0000
+divs z uo v bf800000 7f810000 7fff0000
+divs n uo v 007fffff 7f810000 7fff0000
+divs m uo v 007fffff 7f810000 7fff0000
+divs p uo v 007fffff 7f810000 7fff0000
+divs z uo v 007fffff 7f810000 7fff0000
+divs n uo v 807fffff 7f810000 7fff0000
+divs m uo v 807fffff 7f810000 7fff0000
+divs p uo v 807fffff 7f810000 7fff0000
+divs z uo v 807fffff 7f810000 7fff0000
+divs n uo v 7f810000 007fffff 7fff0000
+divs m uo v 7f810000 007fffff 7fff0000
+divs p uo v 7f810000 007fffff 7fff0000
+divs z uo v 7f810000 007fffff 7fff0000
+divs n uo v 7f810000 807fffff 7fff0000
+divs m uo v 7f810000 807fffff 7fff0000
+divs p uo v 7f810000 807fffff 7fff0000
+divs z uo v 7f810000 807fffff 7fff0000
+divs n uo v 7f810000 00000001 7fff0000
+divs m uo v 7f810000 00000001 7fff0000
+divs p uo v 7f810000 00000001 7fff0000
+divs z uo v 7f810000 00000001 7fff0000
+divs n uo v 7f810000 80000001 7fff0000
+divs m uo v 7f810000 80000001 7fff0000
+divs p uo v 7f810000 80000001 7fff0000
+divs z uo v 7f810000 80000001 7fff0000
+divs n uo v 00000001 7f810000 7fff0000
+divs m uo v 00000001 7f810000 7fff0000
+divs p uo v 00000001 7f810000 7fff0000
+divs z uo v 00000001 7f810000 7fff0000
+divs n uo v 80000001 7f810000 7fff0000
+divs m uo v 80000001 7f810000 7fff0000
+divs p uo v 80000001 7f810000 7fff0000
+divs z uo v 80000001 7f810000 7fff0000
+divs n uo v 7f810000 7f7fffff 7fff0000
+divs m uo v 7f810000 7f7fffff 7fff0000
+divs p uo v 7f810000 7f7fffff 7fff0000
+divs z uo v 7f810000 7f7fffff 7fff0000
+divs n uo v 7f810000 ff7fffff 7fff0000
+divs m uo v 7f810000 ff7fffff 7fff0000
+divs p uo v 7f810000 ff7fffff 7fff0000
+divs z uo v 7f810000 ff7fffff 7fff0000
+divs n uo v 7f7fffff 7f810000 7fff0000
+divs m uo v 7f7fffff 7f810000 7fff0000
+divs p uo v 7f7fffff 7f810000 7fff0000
+divs z uo v 7f7fffff 7f810000 7fff0000
+divs n uo v ff7fffff 7f810000 7fff0000
+divs m uo v ff7fffff 7f810000 7fff0000
+divs p uo v ff7fffff 7f810000 7fff0000
+divs z uo v ff7fffff 7f810000 7fff0000
+divs n uo v 7f810000 7f800000 7fff0000
+divs m uo v 7f810000 7f800000 7fff0000
+divs p uo v 7f810000 7f800000 7fff0000
+divs z uo v 7f810000 7f800000 7fff0000
+divs n uo v 7f810000 ff800000 7fff0000
+divs m uo v 7f810000 ff800000 7fff0000
+divs p uo v 7f810000 ff800000 7fff0000
+divs z uo v 7f810000 ff800000 7fff0000
+divs n uo v 7f800000 7f810000 7fff0000
+divs m uo v 7f800000 7f810000 7fff0000
+divs p uo v 7f800000 7f810000 7fff0000
+divs z uo v 7f800000 7f810000 7fff0000
+divs n uo v ff800000 7f810000 7fff0000
+divs m uo v ff800000 7f810000 7fff0000
+divs p uo v ff800000 7f810000 7fff0000
+divs z uo v ff800000 7f810000 7fff0000
+divs n uo v 7fff0000 7f810000 7fff0000
+divs m uo v 7fff0000 7f810000 7fff0000
+divs p uo v 7fff0000 7f810000 7fff0000
+divs z uo v 7fff0000 7f810000 7fff0000
+divs n uo v 7f810000 7fff0000 7fff0000
+divs m uo v 7f810000 7fff0000 7fff0000
+divs p uo v 7f810000 7fff0000 7fff0000
+divs z uo v 7f810000 7fff0000 7fff0000
+divs n uo v 7f810000 7f810000 7fff0000
+divs m uo v 7f810000 7f810000 7fff0000
+divs p uo v 7f810000 7f810000 7fff0000
+divs z uo v 7f810000 7f810000 7fff0000
+divs n eq x 3fc00001 3f800001 3fc00000
+divs p eq x 3fc00001 3f800001 3fc00000
+divs z eq x 3fc00001 3f800001 3fbfffff
+divs m eq x 3fc00001 3f800001 3fbfffff
+divs n eq x 3fbfffff 3f7ffffe 3fc00001
+divs p eq x 3fbfffff 3f7ffffe 3fc00001
+divs z eq x 3fbfffff 3f7ffffe 3fc00000
+divs m eq x 3fbfffff 3f7ffffe 3fc00000
+divs n eq - 3fc00000 3f800000 3fc00000
+divs m eq - 3fc00000 3f800000 3fc00000
+divs p eq - 3fc00000 3f800000 3fc00000
+divs z eq - 3fc00000 3f800000 3fc00000
+divs n eq - 50000000 44800000 4b000000
+divs m eq - 50000000 44800000 4b000000
+divs p eq - 50000000 44800000 4b000000
+divs z eq - 50000000 44800000 4b000000
+divs n eq - 4f000000 45000000 49800000
+divs m eq - 4f000000 45000000 49800000
+divs p eq - 4f000000 45000000 49800000
+divs z eq - 4f000000 45000000 49800000
+divs n eq - 5e800000 40000000 5e000000
+divs m eq - 5e800000 40000000 5e000000
+divs p eq - 5e800000 40000000 5e000000
+divs z eq - 5e800000 40000000 5e000000
+divs n eq - 40800000 44800000 3b800000
+divs m eq - 40800000 44800000 3b800000
+divs p eq - 40800000 44800000 3b800000
+divs z eq - 40800000 44800000 3b800000
+divs n eq - 40000000 45000000 3a800000
+divs m eq - 40000000 45000000 3a800000
+divs p eq - 40000000 45000000 3a800000
+divs z eq - 40000000 45000000 3a800000
+divs n eq - cf000000 c5800000 49000000
+divs m eq - cf000000 c5800000 49000000
+divs p eq - cf000000 c5800000 49000000
+divs z eq - cf000000 c5800000 49000000
+divs n eq - cf000000 45000000 c9800000
+divs m eq - cf000000 45000000 c9800000
+divs p eq - cf000000 45000000 c9800000
+divs z eq - cf000000 45000000 c9800000
+divs n eq - cf000000 c5000000 49800000
+divs m eq - cf000000 c5000000 49800000
+divs p eq - cf000000 c5000000 49800000
+divs z eq - cf000000 c5000000 49800000
+divs n eq - 00000000 45800000 00000000
+divs m eq - 00000000 45800000 00000000
+divs p eq - 00000000 45800000 00000000
+divs z eq - 00000000 45800000 00000000
+divs n eq - 80000000 45800000 80000000
+divs m eq - 80000000 45800000 80000000
+divs p eq - 80000000 45800000 80000000
+divs z eq - 80000000 45800000 80000000
+divs n eq d 5e800000 00000000 7f800000
+divs m eq d 5e800000 00000000 7f800000
+divs p eq d 5e800000 00000000 7f800000
+divs z eq d 5e800000 00000000 7f800000
+divs n eq - cc000000 46800000 c5000000
+divs m eq - cc000000 46800000 c5000000
+divs p eq - cc000000 46800000 c5000000
+divs z eq - cc000000 46800000 c5000000
+divs n eq - 52000000 c6000000 cb800000
+divs m eq - 52000000 c6000000 cb800000
+divs p eq - 52000000 c6000000 cb800000
+divs z eq - 52000000 c6000000 cb800000
+divs n eq - c0800000 45000000 bb000000
+divs m eq - c0800000 45000000 bb000000
+divs p eq - c0800000 45000000 bb000000
+divs z eq - c0800000 45000000 bb000000
+divs n eq - 5e800000 c0000000 de000000
+divs m eq - 5e800000 c0000000 de000000
+divs p eq - 5e800000 c0000000 de000000
+divs z eq - 5e800000 c0000000 de000000
+divs n eq - 5e800000 c5000000 d9000000
+divs m eq - 5e800000 c5000000 d9000000
+divs p eq - 5e800000 c5000000 d9000000
+divs z eq - 5e800000 c5000000 d9000000
+divs n eq - de000000 c0000000 5d800000
+divs m eq - de000000 c0000000 5d800000
+divs p eq - de000000 c0000000 5d800000
+divs z eq - de000000 c0000000 5d800000
+divs n eq - c0000000 c5000000 3a800000
+divs m eq - c0000000 c5000000 3a800000
+divs p eq - c0000000 c5000000 3a800000
+divs z eq - c0000000 c5000000 3a800000
+divs n eq - c0000000 45000000 ba800000
+divs m eq - c0000000 45000000 ba800000
+divs p eq - c0000000 45000000 ba800000
+divs z eq - c0000000 45000000 ba800000
+divs n eq - 46fffc00 40000000 467ffc00
+divs m eq - 46fffc00 40000000 467ffc00
+divs p eq - 46fffc00 40000000 467ffc00
+divs z eq - 46fffc00 40000000 467ffc00
+divs n eq - 40000000 46800000 39000000
+divs m eq - 40000000 46800000 39000000
+divs p eq - 40000000 46800000 39000000
+divs z eq - 40000000 46800000 39000000
+divs n eq - 46fffe00 46fffe00 3f800000
+divs m eq - 46fffe00 46fffe00 3f800000
+divs p eq - 46fffe00 46fffe00 3f800000
+divs z eq - 46fffe00 46fffe00 3f800000
+divs n eq - 46fffe00 c6fffe00 bf800000
+divs m eq - 46fffe00 c6fffe00 bf800000
+divs p eq - 46fffe00 c6fffe00 bf800000
+divs z eq - 46fffe00 c6fffe00 bf800000
+divs n eq - 46fffa00 46fffa00 3f800000
+divs m eq - 46fffa00 46fffa00 3f800000
+divs p eq - 46fffa00 46fffa00 3f800000
+divs z eq - 46fffa00 46fffa00 3f800000
+divs n eq - 46fffe00 3f800000 46fffe00
+divs m eq - 46fffe00 3f800000 46fffe00
+divs p eq - 46fffe00 3f800000 46fffe00
+divs z eq - 46fffe00 3f800000 46fffe00
+divs n eq - 00000000 46fffe00 00000000
+divs m eq - 00000000 46fffe00 00000000
+divs p eq - 00000000 46fffe00 00000000
+divs z eq - 00000000 46fffe00 00000000
+divs n eq d 46fffe00 00000000 7f800000
+divs m eq d 46fffe00 00000000 7f800000
+divs p eq d 46fffe00 00000000 7f800000
+divs z eq d 46fffe00 00000000 7f800000
+divs n eq d c6fffe00 00000000 ff800000
+divs m eq d c6fffe00 00000000 ff800000
+divs p eq d c6fffe00 00000000 ff800000
+divs z eq d c6fffe00 00000000 ff800000
diff --git a/verrou/unitTest/checkUCB-vecto/inputData/muld.input b/verrou/unitTest/checkUCB-vecto/inputData/muld.input
new file mode 100644
index 0000000000000000000000000000000000000000..2264e1752b01e56dd86dbc7aaf52e4453c960b1d
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/inputData/muld.input
@@ -0,0 +1,1361 @@
+/* Copyright (C) 1988-1994 Sun Microsystems, Inc. 2550 Garcia Avenue */
+/* Mountain View, California  94043 All rights reserved. */
+/*  */
+/* Any person is hereby authorized to download, copy, use, create bug fixes,  */
+/* and distribute, subject to the following conditions: */
+/*  */
+/* 	1.  the software may not be redistributed for a fee except as */
+/* 	    reasonable to cover media costs; */
+/* 	2.  any copy of the software must include this notice, as well as  */
+/* 	    any other embedded copyright notices; and  */
+/* 	3.  any distribution of this software or derivative works thereof  */
+/* 	    must comply with all applicable U.S. export control laws. */
+/*  */
+/* THE SOFTWARE IS MADE AVAILABLE "AS IS" AND WITHOUT EXPRESS OR IMPLIED */
+/* WARRANTY OF ANY KIND, INCLUDING BUT NOT LIMITED TO THE IMPLIED */
+/* WARRANTIES OF DESIGN, MERCHANTIBILITY, FITNESS FOR A PARTICULAR */
+/* PURPOSE, NON-INFRINGEMENT, PERFORMANCE OR CONFORMANCE TO */
+/* SPECIFICATIONS.   */
+/*  */
+/* BY DOWNLOADING AND/OR USING THIS SOFTWARE, THE USER WAIVES ALL CLAIMS */
+/* AGAINST SUN MICROSYSTEMS, INC. AND ITS AFFILIATED COMPANIES IN ANY */
+/* JURISDICTION, INCLUDING BUT NOT LIMITED TO CLAIMS FOR DAMAGES OR */
+/* EQUITABLE RELIEF BASED ON LOSS OF DATA, AND SPECIFICALLY WAIVES EVEN */
+/* UNKNOWN OR UNANTICIPATED CLAIMS OR LOSSES, PRESENT AND FUTURE. */
+/*  */
+/* IN NO EVENT WILL SUN MICROSYSTEMS, INC. OR ANY OF ITS AFFILIATED */
+/* COMPANIES BE LIABLE FOR ANY LOST REVENUE OR PROFITS OR OTHER SPECIAL, */
+/* INDIRECT AND CONSEQUENTIAL DAMAGES, EVEN IF IT HAS BEEN ADVISED OF THE */
+/* POSSIBILITY OF SUCH DAMAGES. */
+/*  */
+/* This file is provided with no support and without any obligation on the */
+/* part of Sun Microsystems, Inc. ("Sun") or any of its affiliated */
+/* companies to assist in its use, correction, modification or */
+/* enhancement.  Nevertheless, and without creating any obligation on its */
+/* part, Sun welcomes your comments concerning the software and requests */
+/* that they be sent to fdlibm-comments@sunpro.sun.com. */
+	/* 0*0 */
+muld n eq - 0 0 0 0 0 0
+muld n eq - 80000000 0 80000000 0 0 0
+muld n eq - 0 0 80000000 0 80000000 0
+	/* 0*inf */
+muld n uo v 0 0 fff00000 0 FFF80000 0
+	/* 1+u * 1-u */
+muld n eq x 3fefffff fffffffe 3ff00000 1 3ff00000 0
+muld z eq x 3fefffff fffffffe 3ff00000 1 3fefffff ffffffff
+muld p eq x 3fefffff fffffffe 3ff00000 1 3ff00000 0
+muld m eq x 3fefffff fffffffe 3ff00000 1 3fefffff ffffffff
+	/* small * small */
+muld n eq ux 00f70001 1 00f00000 1 0 0
+muld z eq ux 00f70001 1 00f00000 1 0 0
+muld p eq ux 00f70001 1 00f00000 1 0 1
+muld m eq ux 00f70001 1 00f00000 1 0 0
+muld m eq - 00000000 00000000 00000000 00000000 00000000 00000000
+muld m eq - 00000000 00000000 000fffff ffffffff 00000000 00000000
+muld m eq - 00000000 00000000 001fffff ffffffff 00000000 00000000
+muld m eq - 00000000 00000000 3ff00000 00000000 00000000 00000000
+muld m eq - 00000000 00000000 43d00000 00000000 00000000 00000000
+muld m eq - 00000000 00000000 7fdfffff ffffffff 00000000 00000000
+muld m eq - 00000000 00000000 80000000 00000000 80000000 00000000
+muld m eq - 00000000 00000000 80000000 00000002 80000000 00000000
+muld m eq - 00000000 00000000 800fffff ffffffff 80000000 00000000
+muld m eq - 00000000 00000000 80100000 00000001 80000000 00000000
+muld m eq - 00000000 00000000 80200000 00000000 80000000 00000000
+muld m eq - 00000000 00000000 c0080000 00000000 80000000 00000000
+muld m eq - 00000000 00000000 ffcfffff ffffffff 80000000 00000000
+muld m eq - 00000000 00000000 ffe00000 00000000 80000000 00000000
+muld m eq - 00000000 00000000 ffefffff ffffffff 80000000 00000000
+muld m eq - 00000000 00000001 00000000 00000000 00000000 00000000
+muld m eq - 00000000 00000001 3ff00000 00000000 00000000 00000001
+muld m eq - 00000000 00000001 40000000 00000000 00000000 00000002
+muld m eq - 00000000 00000001 43500000 00000000 00300000 00000000
+muld m eq - 00000000 00000001 7ff00000 00000000 7ff00000 00000000
+muld m eq - 00000000 00000006 3fe00000 00000000 00000000 00000003
+muld m eq - 00000000 00000006 bfe00000 00000000 80000000 00000003
+muld m eq - 00000000 00000008 3fc00000 00000000 00000000 00000001
+muld m eq - 000fffff fffffffc 40000000 00000000 001fffff fffffff8
+muld m eq - 000fffff ffffffff 40000000 00000000 001fffff fffffffe
+muld m eq - 00100000 00000000 00000000 00000000 00000000 00000000
+muld m eq - 00100000 00000000 3fefffff fffffffe 000fffff ffffffff
+muld m eq - 00100000 00000000 7ff00000 00000000 7ff00000 00000000
+muld m eq - 00100000 00000000 c0000000 00000000 80200000 00000000
+muld m eq - 00100000 00000001 c0000000 00000000 80200000 00000001
+muld m eq - 001fffff fffffff8 3fe00000 00000000 000fffff fffffffc
+muld m eq - 001fffff fffffffe 3fe00000 00000000 000fffff ffffffff
+muld m eq - 001fffff fffffffe bfe00000 00000000 800fffff ffffffff
+muld m eq - 36a00000 00000000 41800000 00000000 38300000 00000000
+muld m eq - 3ff00000 00000000 00000000 00000001 00000000 00000001
+muld m eq - 3ff00000 00000000 000fffff fffffffd 000fffff fffffffd
+muld m eq - 3ff00000 00000000 00200000 00000003 00200000 00000003
+muld m eq - 3ff00000 00000000 3ff00000 00000000 3ff00000 00000000
+muld m eq - 3ff00000 00000000 40000000 00000000 40000000 00000000
+muld m eq - 3ff00000 00000000 80000000 00000001 80000000 00000001
+muld m eq - 3ff00000 00000000 80000000 00000002 80000000 00000002
+muld m eq - 3ff00000 00000000 80000000 00000009 80000000 00000009
+muld m eq - 3ff00000 00000000 801fffff fffffffe 801fffff fffffffe
+muld m eq - 40000000 00000000 00000000 00000001 00000000 00000002
+muld m eq - 40000000 00000000 000fffff fffffffc 001fffff fffffff8
+muld m eq - 40000000 00000000 000fffff ffffffff 001fffff fffffffe
+muld m eq - 40000000 00000000 00100000 00000000 00200000 00000000
+muld m eq - 40000000 00000000 00100000 00000001 00200000 00000001
+muld m eq - 40000000 00000000 3ff00000 00000000 40000000 00000000
+muld m eq - 40000000 00000000 40080000 00000000 40180000 00000000
+muld m eq - 40000000 00000000 40d00000 00000000 40e00000 00000000
+muld m eq - 40000000 00000000 43c00000 00000000 43d00000 00000000
+muld m eq - 40000000 00000000 7fd00000 00000000 7fe00000 00000000
+muld m eq - 40000000 00000000 7fdfffff ffffffff 7fefffff ffffffff
+muld m eq - 40000000 00000000 800fffff fffffffd 801fffff fffffffa
+muld m eq - 40000000 00000000 80100000 00000003 80200000 00000003
+muld m eq - 40000000 00000000 80100000 00000005 80200000 00000005
+muld m eq - 40000000 00000000 bff00000 00000000 c0000000 00000000
+muld m eq - 40000000 00000000 ffcfffff fffffffd ffdfffff fffffffd
+muld m eq - 40000000 00000000 ffd00000 00000003 ffe00000 00000003
+muld m eq - 40080000 00000000 00000000 00000002 00000000 00000006
+muld m eq - 40080000 00000000 40000000 00000000 40180000 00000000
+muld m eq - 40080000 00000000 40080000 00000000 40220000 00000000
+muld m eq - 40080000 00000000 c0000000 00000000 c0180000 00000000
+muld m eq - 40100000 00000000 00000000 00000002 00000000 00000008
+muld m eq - 40100000 00000000 43b00000 00000000 43d00000 00000000
+muld m eq - 40100000 00000000 7fcfffff ffffffff 7fefffff ffffffff
+muld m eq - 40100000 00000000 ffcfffff ffffffff ffefffff ffffffff
+muld m eq - 40140000 00000000 00000000 00000000 00000000 00000000
+muld m eq - 40140000 00000000 7ff00000 00000000 7ff00000 00000000
+muld m eq - 40140000 00000000 80000000 00000001 80000000 00000005
+muld m eq - 401c0000 00000000 80000000 00000000 80000000 00000000
+muld m eq - 401c0000 00000000 fff00000 00000000 fff00000 00000000
+muld m eq - 40a00000 00000000 41e00000 00000000 42900000 00000000
+muld m eq - 40b00000 00000000 00000000 00000000 00000000 00000000
+muld m eq - 40b00000 00000000 43d00000 00000000 44900000 00000000
+muld m eq - 40b00000 00000000 80000000 00000000 80000000 00000000
+muld m eq - 40b00000 00000000 c0000000 00000000 c0c00000 00000000
+muld m eq - 40b00000 00000000 c3c00000 00000000 c4800000 00000000
+muld m eq - 40c00000 00000000 41d00000 00000000 42a00000 00000000
+muld m eq - 40d00000 00000000 40000000 00000000 40e00000 00000000
+muld m eq - 40d00000 00000000 c1600000 00000000 c2400000 00000000
+muld m eq - 7fcfffff fffffffd 40100000 00000000 7fefffff fffffffd
+muld m eq - 7fcfffff fffffffd c0100000 00000000 ffefffff fffffffd
+muld m eq - 7fd00000 00000000 c0000000 00000000 ffe00000 00000000
+muld m eq - 7fdfffff ffffffff c0000000 00000000 ffefffff ffffffff
+muld m eq - 7fe00000 00000000 00000000 00000000 00000000 00000000
+muld m eq - 7fe00000 00000000 7ff00000 00000000 7ff00000 00000000
+muld m eq - 7fefffff ffffffff 00000000 00000000 00000000 00000000
+muld m eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+muld m eq - 7ff00000 00000000 001fffff ffffffff 7ff00000 00000000
+muld m eq - 7ff00000 00000000 3ff00000 00000000 7ff00000 00000000
+muld m eq - 7ff00000 00000000 7fdfffff ffffffff 7ff00000 00000000
+muld m eq - 7ff00000 00000000 7ff00000 00000000 7ff00000 00000000
+muld m eq - 7ff00000 00000000 80000000 00000002 fff00000 00000000
+muld m eq - 7ff00000 00000000 800fffff ffffffff fff00000 00000000
+muld m eq - 7ff00000 00000000 80100000 00000001 fff00000 00000000
+muld m eq - 7ff00000 00000000 80200000 00000000 fff00000 00000000
+muld m eq - 7ff00000 00000000 c0080000 00000000 fff00000 00000000
+muld m eq - 7ff00000 00000000 ffe00000 00000000 fff00000 00000000
+muld m eq - 7ff00000 00000000 ffefffff ffffffff fff00000 00000000
+muld m eq - 7ff00000 00000000 fff00000 00000000 fff00000 00000000
+muld m eq - 80000000 00000000 00000000 00000000 80000000 00000000
+muld m eq - 80000000 00000000 40180000 00000000 80000000 00000000
+muld m eq - 80000000 00000000 7fefffff ffffffff 80000000 00000000
+muld m eq - 80000000 00000000 80000000 00000000 00000000 00000000
+muld m eq - 80000000 00000000 80000000 00000004 00000000 00000000
+muld m eq - 80000000 00000000 80100000 00000000 00000000 00000000
+muld m eq - 80000000 00000000 c0200000 00000000 00000000 00000000
+muld m eq - 80000000 00000000 ffd00000 00000000 00000000 00000000
+muld m eq - 80000000 00000001 40140000 00000000 80000000 00000005
+muld m eq - 80000000 00000002 3ff00000 00000000 80000000 00000002
+muld m eq - 80000000 00000003 00000000 00000000 80000000 00000000
+muld m eq - 80000000 00000003 7ff00000 00000000 fff00000 00000000
+muld m eq - 80000000 00000004 bff00000 00000000 00000000 00000004
+muld m eq - 80000000 00000008 3fc00000 00000000 80000000 00000001
+muld m eq - 800fffff fffffffd c0000000 00000000 001fffff fffffffa
+muld m eq - 800fffff ffffffff 00000000 00000000 80000000 00000000
+muld m eq - 800fffff ffffffff 7ff00000 00000000 fff00000 00000000
+muld m eq - 800fffff ffffffff 80000000 00000000 00000000 00000000
+muld m eq - 800fffff ffffffff bff00000 00000000 000fffff ffffffff
+muld m eq - 800fffff ffffffff fff00000 00000000 7ff00000 00000000
+muld m eq - 80100000 00000001 00000000 00000000 80000000 00000000
+muld m eq - 80100000 00000001 7ff00000 00000000 fff00000 00000000
+muld m eq - 80100000 00000001 bff00000 00000000 00100000 00000001
+muld m eq - 801fffff fffffffc 3fe00000 00000000 800fffff fffffffe
+muld m eq - 801fffff fffffffc bfe00000 00000000 000fffff fffffffe
+muld m eq - 801fffff fffffffe 3ff00000 00000000 801fffff fffffffe
+muld m eq - 801fffff ffffffff 80000000 00000000 00000000 00000000
+muld m eq - 801fffff ffffffff fff00000 00000000 7ff00000 00000000
+muld m eq - 80200000 00000000 00000000 00000000 80000000 00000000
+muld m eq - 80200000 00000000 7ff00000 00000000 fff00000 00000000
+muld m eq - bff00000 00000000 00000000 00000009 80000000 00000009
+muld m eq - bff00000 00000000 00100000 00000009 80100000 00000009
+muld m eq - bff00000 00000000 3ff00000 00000000 bff00000 00000000
+muld m eq - bff00000 00000000 40000000 00000000 c0000000 00000000
+muld m eq - bff00000 00000000 80000000 00000004 00000000 00000004
+muld m eq - bff00000 00000000 800fffff ffffffff 000fffff ffffffff
+muld m eq - bff00000 00000000 80100000 00000001 00100000 00000001
+muld m eq - bff00000 00000000 bff00000 00000000 3ff00000 00000000
+muld m eq - bff00000 00000000 c0000000 00000000 40000000 00000000
+muld m eq - c0000000 00000000 00000000 00000000 80000000 00000000
+muld m eq - c0000000 00000000 000fffff fffffffd 801fffff fffffffa
+muld m eq - c0000000 00000000 00100000 00000001 80200000 00000001
+muld m eq - c0000000 00000000 00100000 00000005 80200000 00000005
+muld m eq - c0000000 00000000 00100000 00000009 80200000 00000009
+muld m eq - c0000000 00000000 40080000 00000000 c0180000 00000000
+muld m eq - c0000000 00000000 40d00000 00000000 c0e00000 00000000
+muld m eq - c0000000 00000000 43c00000 00000000 c3d00000 00000000
+muld m eq - c0000000 00000000 7fcfffff ffffffff ffdfffff ffffffff
+muld m eq - c0000000 00000000 7fd00000 00000001 ffe00000 00000001
+muld m eq - c0000000 00000000 7ff00000 00000000 fff00000 00000000
+muld m eq - c0000000 00000000 800fffff fffffffd 001fffff fffffffa
+muld m eq - c0000000 00000000 bff00000 00000000 40000000 00000000
+muld m eq - c0000000 00000000 c0080000 00000000 40180000 00000000
+muld m eq - c0000000 00000000 c3c00000 00000000 43d00000 00000000
+muld m eq - c0080000 00000000 40080000 00000000 c0220000 00000000
+muld m eq - c0080000 00000000 c0000000 00000000 40180000 00000000
+muld m eq - c0080000 00000000 c0080000 00000000 40220000 00000000
+muld m eq - c0100000 00000000 43c00000 00000000 c3e00000 00000000
+muld m eq - c0100000 00000000 7fcfffff ffffffff ffefffff ffffffff
+muld m eq - c0100000 00000000 80000000 00000000 00000000 00000000
+muld m eq - c0100000 00000000 ffcfffff ffffffff 7fefffff ffffffff
+muld m eq - c0100000 00000000 fff00000 00000000 7ff00000 00000000
+muld m eq - c0a00000 00000000 41e00000 00000000 c2900000 00000000
+muld m eq - c0a00000 00000000 c0000000 00000000 40b00000 00000000
+muld m eq - c0a00000 00000000 c1e00000 00000000 42900000 00000000
+muld m eq - c0a00000 00000000 c1f00000 00000000 42a00000 00000000
+muld m eq - c0d00000 00000000 42400000 00000000 c3200000 00000000
+muld m eq - c0d00000 00000000 c0000000 00000000 40e00000 00000000
+muld m eq - ffcfffff fffffffd 40100000 00000000 ffefffff fffffffd
+muld m eq - ffcfffff fffffffd c0100000 00000000 7fefffff fffffffd
+muld m eq - ffcfffff ffffffff 00000000 00000000 80000000 00000000
+muld m eq - ffcfffff ffffffff 7ff00000 00000000 fff00000 00000000
+muld m eq - ffd00000 00000000 00000000 00000000 80000000 00000000
+muld m eq - ffd00000 00000000 7ff00000 00000000 fff00000 00000000
+muld m eq - ffdfffff ffffffff 80000000 00000000 00000000 00000000
+muld m eq - ffefffff ffffffff 80000000 00000000 00000000 00000000
+muld m eq - ffefffff ffffffff fff00000 00000000 7ff00000 00000000
+muld m eq - fff00000 00000000 40180000 00000000 fff00000 00000000
+muld m eq - fff00000 00000000 7ff00000 00000000 fff00000 00000000
+muld m eq - fff00000 00000000 80000000 00000004 7ff00000 00000000
+muld m eq - fff00000 00000000 80100000 00000000 7ff00000 00000000
+muld m eq - fff00000 00000000 c0200000 00000000 7ff00000 00000000
+muld m eq - fff00000 00000000 ffd00000 00000000 7ff00000 00000000
+muld m eq - fff00000 00000000 fff00000 00000000 7ff00000 00000000
+muld m eq x 3ff00000 00000001 3ff00000 00000001 3ff00000 00000002
+muld m eq x 3ff00000 00000001 bff00000 00000001 bff00000 00000003
+muld m eq x 3ff00000 00000001 bff00000 00000002 bff00000 00000004
+muld m eq x 3ff00000 00000002 3ff00000 00000001 3ff00000 00000003
+muld m eq x 4007ffff ffffffff 3fefffff fffffffd 4007ffff fffffffc
+muld m eq x 4007ffff ffffffff 3fefffff fffffffe 4007ffff fffffffd
+muld m eq x 4007ffff ffffffff 3fefffff ffffffff 4007ffff fffffffe
+muld m eq x 4007ffff ffffffff bfefffff fffffffd c007ffff fffffffd
+muld m eq x 40080000 00000001 3ff00000 00000001 40080000 00000002
+muld m eq x 40080000 00000001 3ff00000 00000003 40080000 00000005
+muld m eq x 40080000 00000001 bff00000 00000003 c0080000 00000006
+muld m eq x 4013ffff ffffffff 3fefffff ffffffff 4013ffff fffffffe
+muld m eq x 40140000 00000001 3ff00000 00000001 40140000 00000002
+muld m eq x 401bffff ffffffff 3fefffff fffffffc 401bffff fffffffb
+muld m eq x 401bffff ffffffff 3fefffff ffffffff 401bffff fffffffe
+muld m eq x 401c0000 00000001 3ff00000 00000001 401c0000 00000002
+muld m eq x bfefffff ffffffff ffefffff ffffffff 7fefffff fffffffe
+muld m eq x bff00000 00000001 3ff00000 00000001 bff00000 00000003
+muld m eq x bff00000 00000001 bff00000 00000001 3ff00000 00000002
+muld m eq x bff00000 00000001 bff00000 00000002 3ff00000 00000003
+muld m eq x bff00000 00000002 3ff00000 00000001 bff00000 00000004
+muld m eq x bff00000 00000002 bff00000 00000001 3ff00000 00000003
+muld m eq x c007ffff ffffffff 3fefffff ffffffff c007ffff ffffffff
+muld m eq x c0080000 00000001 3ff00000 00000001 c0080000 00000003
+muld m eq x c013ffff ffffffff bfefffff ffffffff 4013ffff fffffffe
+muld m eq x c0140000 00000001 bff00000 00000001 40140000 00000002
+muld m eq x ffcfffff fffffff9 c00fffff ffffffff 7fefffff fffffff8
+muld m eq x ffcfffff ffffffff 40000000 00000001 ffe00000 00000001
+muld m eq x ffdfffff ffffffff 3ff00000 00000001 ffe00000 00000001
+muld m eq x?u 000fffff fffffff8 bff00000 00000008 80100000 00000000
+muld m eq x?u 800fffff ffffffff 3ff00000 00000001 80100000 00000000
+muld m eq xo 3ff00000 00000002 7fefffff fffffffe 7fefffff ffffffff
+muld m eq xo 40180000 00000000 7fe00000 00000000 7fefffff ffffffff
+muld m eq xo 7fdfffff fffffffd c0000000 00000008 fff00000 00000000
+muld m eq xo 7fe00000 00000000 40000000 00000001 7fefffff ffffffff
+muld m eq xo 7fe00000 00000000 40180000 00000000 7fefffff ffffffff
+muld m eq xo 7fe00000 00000000 40180000 00000001 7fefffff ffffffff
+muld m eq xo 7fe00000 00000000 7fd00000 00000006 7fefffff ffffffff
+muld m eq xo 7fe00000 00000000 ffd00000 00000000 fff00000 00000000
+muld m eq xo 7fe00000 00000000 ffd00000 00000004 fff00000 00000000
+muld m eq xo 7fe00000 00000000 ffe00000 00000000 fff00000 00000000
+muld m eq xo 7fe00000 00000009 c0180000 00000002 fff00000 00000000
+muld m eq xo c007ffff fffffffe 7fe00000 00000000 fff00000 00000000
+muld m eq xo c01bffff fffffff9 7fe00000 00000000 fff00000 00000000
+muld m eq xo c01c0000 00000000 ffe00000 00000000 7fefffff ffffffff
+muld m eq xo c0220000 00000000 7fe00000 00000000 fff00000 00000000
+muld m eq xo ffcfffff fffffff9 7fe00000 00000000 fff00000 00000000
+muld m eq xo ffcfffff fffffff9 ffefffff ffffffff 7fefffff ffffffff
+muld m eq xo ffdfffff fffffff7 7fd00000 00000001 fff00000 00000000
+muld m eq xo ffefffff fffffffd 7fe00000 00000000 fff00000 00000000
+muld m eq xo ffefffff fffffffd c0080000 00000001 7fefffff ffffffff
+muld m eq xu 00000000 00000001 00000000 00000001 00000000 00000000
+muld m eq xu 00000000 00000001 3fe00000 00000000 00000000 00000000
+muld m eq xu 00000000 00000001 3fefffff ffffffff 00000000 00000000
+muld m eq xu 00000000 00000001 80000000 00000001 80000000 00000001
+muld m eq xu 00000000 00000001 bfefffff ffffffff 80000000 00000001
+muld m eq xu 000fffff fffffff7 80200000 00000003 80000000 00000001
+muld m eq xu 000fffff fffffff8 bff00000 00000001 800fffff fffffff9
+muld m eq xu 000fffff fffffffc 3ff00000 00000001 000fffff fffffffc
+muld m eq xu 000fffff fffffffe 3fefffff fffffffc 000fffff fffffffc
+muld m eq xu 000fffff fffffffe bff00000 00000001 800fffff ffffffff
+muld m eq xu 000fffff ffffffff 000fffff fffffffe 00000000 00000000
+muld m eq xu 000fffff ffffffff 3ff00000 00000001 000fffff ffffffff
+muld m eq xu 00100000 00000000 00100000 00000000 00000000 00000000
+muld m eq xu 00100000 00000000 80100000 00000000 80000000 00000001
+muld m eq xu 00100000 00000001 3fefffff fffffffa 000fffff fffffffd
+muld m eq xu 00100000 00000001 3fefffff fffffffe 000fffff ffffffff
+muld m eq xu 001fffff ffffffff 3fe00000 00000000 000fffff ffffffff
+muld m eq xu 001fffff ffffffff bfe00000 00000000 80100000 00000000
+muld m eq xu 00f70001 00000001 00f00000 00000001 00000000 00000000
+muld m eq xu 20000000 02000000 1fffffff fbffffff 000fffff ffffffff
+muld m eq xu 20000000 02000000 1fffffff fc000000 000fffff ffffffff
+muld m eq xu 3fe00000 00000000 00000000 00000001 00000000 00000000
+muld m eq xu 3fe00000 00000000 80000000 00000001 80000000 00000001
+muld m eq xu 40040000 00000000 00000000 00000001 00000000 00000002
+muld m eq xu 800fffff fffffff7 00200000 00000003 80000000 00000001
+muld m eq xu 800fffff fffffff7 3ff00000 00000001 800fffff fffffff8
+muld m eq xu 800fffff ffffffff 800fffff fffffffe 00000000 00000000
+muld m eq xu 80100000 00000000 00200000 00000000 80000000 00000001
+muld m eq xu 80100000 00000000 80100000 00000000 00000000 00000000
+muld m eq xu 801fffff ffffffff bfe00000 00000000 000fffff ffffffff
+muld m eq xu bfe00000 00000001 00000000 00000001 80000000 00000001
+muld m eq xu c0040000 00000001 00000000 00000001 80000000 00000003
+muld m uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+muld m uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+muld m uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld m uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld m uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld m uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld m uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+muld m uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+muld m uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+muld m uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+muld m uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+muld m uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+muld m uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+muld m uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+muld m uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+muld m uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+muld m uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+muld m uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+muld m uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+muld m uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+muld m uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+muld m uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld m uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld m uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld m uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld m uo v 00000000 00000000 7ff00000 00000000 7fffe000 00000000
+muld m uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+muld m uo v 00000000 00000000 fff00000 00000000 ffffe000 00000000
+muld m uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+muld m uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld m uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld m uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld m uo v 7ff00000 00000000 00000000 00000000 7fffe000 00000000
+muld m uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld m uo v 7ff00000 00000000 80000000 00000000 ffffe000 00000000
+muld m uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+muld m uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+muld m uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+muld m uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+muld m uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+muld m uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+muld m uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+muld m uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+muld m uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+muld m uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+muld m uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+muld m uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+muld m uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+muld m uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+muld m uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+muld m uo v 80000000 00000000 7ff00000 00000000 ffffe000 00000000
+muld m uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+muld m uo v 80000000 00000000 fff00000 00000000 7fffe000 00000000
+muld m uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+muld m uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld m uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld m uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld m uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld n eq - 00000000 00000000 000fffff ffffffff 00000000 00000000
+muld n eq - 00000000 00000000 001fffff ffffffff 00000000 00000000
+muld n eq - 00000000 00000000 3ff00000 00000000 00000000 00000000
+muld n eq - 00000000 00000000 43d00000 00000000 00000000 00000000
+muld n eq - 00000000 00000000 7fdfffff ffffffff 00000000 00000000
+muld n eq - 00000000 00000000 80000000 00000002 80000000 00000000
+muld n eq - 00000000 00000000 800fffff ffffffff 80000000 00000000
+muld n eq - 00000000 00000000 80100000 00000001 80000000 00000000
+muld n eq - 00000000 00000000 80200000 00000000 80000000 00000000
+muld n eq - 00000000 00000000 c0080000 00000000 80000000 00000000
+muld n eq - 00000000 00000000 ffcfffff ffffffff 80000000 00000000
+muld n eq - 00000000 00000000 ffe00000 00000000 80000000 00000000
+muld n eq - 00000000 00000000 ffefffff ffffffff 80000000 00000000
+muld n eq - 00000000 00000001 00000000 00000000 00000000 00000000
+muld n eq - 00000000 00000001 3ff00000 00000000 00000000 00000001
+muld n eq - 00000000 00000001 40000000 00000000 00000000 00000002
+muld n eq - 00000000 00000001 43500000 00000000 00300000 00000000
+muld n eq - 00000000 00000001 7ff00000 00000000 7ff00000 00000000
+muld n eq - 00000000 00000006 3fe00000 00000000 00000000 00000003
+muld n eq - 00000000 00000006 bfe00000 00000000 80000000 00000003
+muld n eq - 00000000 00000008 3fc00000 00000000 00000000 00000001
+muld n eq - 000fffff fffffffc 40000000 00000000 001fffff fffffff8
+muld n eq - 000fffff ffffffff 40000000 00000000 001fffff fffffffe
+muld n eq - 00100000 00000000 00000000 00000000 00000000 00000000
+muld n eq - 00100000 00000000 3fefffff fffffffe 000fffff ffffffff
+muld n eq - 00100000 00000000 7ff00000 00000000 7ff00000 00000000
+muld n eq - 00100000 00000000 c0000000 00000000 80200000 00000000
+muld n eq - 00100000 00000001 c0000000 00000000 80200000 00000001
+muld n eq - 001fffff fffffff8 3fe00000 00000000 000fffff fffffffc
+muld n eq - 001fffff fffffffe 3fe00000 00000000 000fffff ffffffff
+muld n eq - 001fffff fffffffe bfe00000 00000000 800fffff ffffffff
+muld n eq - 36a00000 00000000 41800000 00000000 38300000 00000000
+muld n eq - 3ff00000 00000000 00000000 00000001 00000000 00000001
+muld n eq - 3ff00000 00000000 000fffff fffffffd 000fffff fffffffd
+muld n eq - 3ff00000 00000000 00200000 00000003 00200000 00000003
+muld n eq - 3ff00000 00000000 3ff00000 00000000 3ff00000 00000000
+muld n eq - 3ff00000 00000000 40000000 00000000 40000000 00000000
+muld n eq - 3ff00000 00000000 80000000 00000001 80000000 00000001
+muld n eq - 3ff00000 00000000 80000000 00000002 80000000 00000002
+muld n eq - 3ff00000 00000000 80000000 00000009 80000000 00000009
+muld n eq - 3ff00000 00000000 801fffff fffffffe 801fffff fffffffe
+muld n eq - 40000000 00000000 00000000 00000001 00000000 00000002
+muld n eq - 40000000 00000000 000fffff fffffffc 001fffff fffffff8
+muld n eq - 40000000 00000000 000fffff ffffffff 001fffff fffffffe
+muld n eq - 40000000 00000000 00100000 00000000 00200000 00000000
+muld n eq - 40000000 00000000 00100000 00000001 00200000 00000001
+muld n eq - 40000000 00000000 3ff00000 00000000 40000000 00000000
+muld n eq - 40000000 00000000 40080000 00000000 40180000 00000000
+muld n eq - 40000000 00000000 40d00000 00000000 40e00000 00000000
+muld n eq - 40000000 00000000 43c00000 00000000 43d00000 00000000
+muld n eq - 40000000 00000000 7fd00000 00000000 7fe00000 00000000
+muld n eq - 40000000 00000000 7fdfffff ffffffff 7fefffff ffffffff
+muld n eq - 40000000 00000000 800fffff fffffffd 801fffff fffffffa
+muld n eq - 40000000 00000000 80100000 00000003 80200000 00000003
+muld n eq - 40000000 00000000 80100000 00000005 80200000 00000005
+muld n eq - 40000000 00000000 bff00000 00000000 c0000000 00000000
+muld n eq - 40000000 00000000 ffcfffff fffffffd ffdfffff fffffffd
+muld n eq - 40000000 00000000 ffd00000 00000003 ffe00000 00000003
+muld n eq - 40080000 00000000 00000000 00000002 00000000 00000006
+muld n eq - 40080000 00000000 40000000 00000000 40180000 00000000
+muld n eq - 40080000 00000000 40080000 00000000 40220000 00000000
+muld n eq - 40080000 00000000 c0000000 00000000 c0180000 00000000
+muld n eq - 40100000 00000000 00000000 00000002 00000000 00000008
+muld n eq - 40100000 00000000 43b00000 00000000 43d00000 00000000
+muld n eq - 40100000 00000000 7fcfffff ffffffff 7fefffff ffffffff
+muld n eq - 40100000 00000000 ffcfffff ffffffff ffefffff ffffffff
+muld n eq - 40140000 00000000 00000000 00000000 00000000 00000000
+muld n eq - 40140000 00000000 7ff00000 00000000 7ff00000 00000000
+muld n eq - 40140000 00000000 80000000 00000001 80000000 00000005
+muld n eq - 401c0000 00000000 80000000 00000000 80000000 00000000
+muld n eq - 401c0000 00000000 fff00000 00000000 fff00000 00000000
+muld n eq - 40a00000 00000000 41e00000 00000000 42900000 00000000
+muld n eq - 40b00000 00000000 00000000 00000000 00000000 00000000
+muld n eq - 40b00000 00000000 43d00000 00000000 44900000 00000000
+muld n eq - 40b00000 00000000 80000000 00000000 80000000 00000000
+muld n eq - 40b00000 00000000 c0000000 00000000 c0c00000 00000000
+muld n eq - 40b00000 00000000 c3c00000 00000000 c4800000 00000000
+muld n eq - 40c00000 00000000 41d00000 00000000 42a00000 00000000
+muld n eq - 40d00000 00000000 40000000 00000000 40e00000 00000000
+muld n eq - 40d00000 00000000 c1600000 00000000 c2400000 00000000
+muld n eq - 7fcfffff fffffffd 40100000 00000000 7fefffff fffffffd
+muld n eq - 7fcfffff fffffffd c0100000 00000000 ffefffff fffffffd
+muld n eq - 7fd00000 00000000 c0000000 00000000 ffe00000 00000000
+muld n eq - 7fdfffff ffffffff c0000000 00000000 ffefffff ffffffff
+muld n eq - 7fe00000 00000000 00000000 00000000 00000000 00000000
+muld n eq - 7fe00000 00000000 7ff00000 00000000 7ff00000 00000000
+muld n eq - 7fefffff ffffffff 00000000 00000000 00000000 00000000
+muld n eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+muld n eq - 7ff00000 00000000 001fffff ffffffff 7ff00000 00000000
+muld n eq - 7ff00000 00000000 3ff00000 00000000 7ff00000 00000000
+muld n eq - 7ff00000 00000000 7fdfffff ffffffff 7ff00000 00000000
+muld n eq - 7ff00000 00000000 7ff00000 00000000 7ff00000 00000000
+muld n eq - 7ff00000 00000000 80000000 00000002 fff00000 00000000
+muld n eq - 7ff00000 00000000 800fffff ffffffff fff00000 00000000
+muld n eq - 7ff00000 00000000 80100000 00000001 fff00000 00000000
+muld n eq - 7ff00000 00000000 80200000 00000000 fff00000 00000000
+muld n eq - 7ff00000 00000000 c0080000 00000000 fff00000 00000000
+muld n eq - 7ff00000 00000000 ffe00000 00000000 fff00000 00000000
+muld n eq - 7ff00000 00000000 ffefffff ffffffff fff00000 00000000
+muld n eq - 7ff00000 00000000 fff00000 00000000 fff00000 00000000
+muld n eq - 80000000 00000000 00000000 00000000 80000000 00000000
+muld n eq - 80000000 00000000 40180000 00000000 80000000 00000000
+muld n eq - 80000000 00000000 7fefffff ffffffff 80000000 00000000
+muld n eq - 80000000 00000000 80000000 00000004 00000000 00000000
+muld n eq - 80000000 00000000 80100000 00000000 00000000 00000000
+muld n eq - 80000000 00000000 c0200000 00000000 00000000 00000000
+muld n eq - 80000000 00000000 ffd00000 00000000 00000000 00000000
+muld n eq - 80000000 00000001 40140000 00000000 80000000 00000005
+muld n eq - 80000000 00000002 3ff00000 00000000 80000000 00000002
+muld n eq - 80000000 00000003 00000000 00000000 80000000 00000000
+muld n eq - 80000000 00000003 7ff00000 00000000 fff00000 00000000
+muld n eq - 80000000 00000004 bff00000 00000000 00000000 00000004
+muld n eq - 80000000 00000008 3fc00000 00000000 80000000 00000001
+muld n eq - 800fffff fffffffd c0000000 00000000 001fffff fffffffa
+muld n eq - 800fffff ffffffff 00000000 00000000 80000000 00000000
+muld n eq - 800fffff ffffffff 7ff00000 00000000 fff00000 00000000
+muld n eq - 800fffff ffffffff 80000000 00000000 00000000 00000000
+muld n eq - 800fffff ffffffff bff00000 00000000 000fffff ffffffff
+muld n eq - 800fffff ffffffff fff00000 00000000 7ff00000 00000000
+muld n eq - 80100000 00000001 00000000 00000000 80000000 00000000
+muld n eq - 80100000 00000001 7ff00000 00000000 fff00000 00000000
+muld n eq - 80100000 00000001 bff00000 00000000 00100000 00000001
+muld n eq - 801fffff fffffffc 3fe00000 00000000 800fffff fffffffe
+muld n eq - 801fffff fffffffc bfe00000 00000000 000fffff fffffffe
+muld n eq - 801fffff fffffffe 3ff00000 00000000 801fffff fffffffe
+muld n eq - 801fffff ffffffff 80000000 00000000 00000000 00000000
+muld n eq - 801fffff ffffffff fff00000 00000000 7ff00000 00000000
+muld n eq - 80200000 00000000 00000000 00000000 80000000 00000000
+muld n eq - 80200000 00000000 7ff00000 00000000 fff00000 00000000
+muld n eq - bff00000 00000000 00000000 00000009 80000000 00000009
+muld n eq - bff00000 00000000 00100000 00000009 80100000 00000009
+muld n eq - bff00000 00000000 3ff00000 00000000 bff00000 00000000
+muld n eq - bff00000 00000000 40000000 00000000 c0000000 00000000
+muld n eq - bff00000 00000000 80000000 00000004 00000000 00000004
+muld n eq - bff00000 00000000 800fffff ffffffff 000fffff ffffffff
+muld n eq - bff00000 00000000 80100000 00000001 00100000 00000001
+muld n eq - bff00000 00000000 bff00000 00000000 3ff00000 00000000
+muld n eq - bff00000 00000000 c0000000 00000000 40000000 00000000
+muld n eq - c0000000 00000000 00000000 00000000 80000000 00000000
+muld n eq - c0000000 00000000 000fffff fffffffd 801fffff fffffffa
+muld n eq - c0000000 00000000 00100000 00000001 80200000 00000001
+muld n eq - c0000000 00000000 00100000 00000005 80200000 00000005
+muld n eq - c0000000 00000000 00100000 00000009 80200000 00000009
+muld n eq - c0000000 00000000 40080000 00000000 c0180000 00000000
+muld n eq - c0000000 00000000 40d00000 00000000 c0e00000 00000000
+muld n eq - c0000000 00000000 43c00000 00000000 c3d00000 00000000
+muld n eq - c0000000 00000000 7fcfffff ffffffff ffdfffff ffffffff
+muld n eq - c0000000 00000000 7fd00000 00000001 ffe00000 00000001
+muld n eq - c0000000 00000000 7ff00000 00000000 fff00000 00000000
+muld n eq - c0000000 00000000 800fffff fffffffd 001fffff fffffffa
+muld n eq - c0000000 00000000 bff00000 00000000 40000000 00000000
+muld n eq - c0000000 00000000 c0080000 00000000 40180000 00000000
+muld n eq - c0000000 00000000 c3c00000 00000000 43d00000 00000000
+muld n eq - c0080000 00000000 40080000 00000000 c0220000 00000000
+muld n eq - c0080000 00000000 c0000000 00000000 40180000 00000000
+muld n eq - c0080000 00000000 c0080000 00000000 40220000 00000000
+muld n eq - c0100000 00000000 43c00000 00000000 c3e00000 00000000
+muld n eq - c0100000 00000000 7fcfffff ffffffff ffefffff ffffffff
+muld n eq - c0100000 00000000 80000000 00000000 00000000 00000000
+muld n eq - c0100000 00000000 ffcfffff ffffffff 7fefffff ffffffff
+muld n eq - c0100000 00000000 fff00000 00000000 7ff00000 00000000
+muld n eq - c0a00000 00000000 41e00000 00000000 c2900000 00000000
+muld n eq - c0a00000 00000000 c0000000 00000000 40b00000 00000000
+muld n eq - c0a00000 00000000 c1e00000 00000000 42900000 00000000
+muld n eq - c0a00000 00000000 c1f00000 00000000 42a00000 00000000
+muld n eq - c0d00000 00000000 42400000 00000000 c3200000 00000000
+muld n eq - c0d00000 00000000 c0000000 00000000 40e00000 00000000
+muld n eq - ffcfffff fffffffd 40100000 00000000 ffefffff fffffffd
+muld n eq - ffcfffff fffffffd c0100000 00000000 7fefffff fffffffd
+muld n eq - ffcfffff ffffffff 00000000 00000000 80000000 00000000
+muld n eq - ffcfffff ffffffff 7ff00000 00000000 fff00000 00000000
+muld n eq - ffd00000 00000000 00000000 00000000 80000000 00000000
+muld n eq - ffd00000 00000000 7ff00000 00000000 fff00000 00000000
+muld n eq - ffdfffff ffffffff 80000000 00000000 00000000 00000000
+muld n eq - ffefffff ffffffff 80000000 00000000 00000000 00000000
+muld n eq - ffefffff ffffffff fff00000 00000000 7ff00000 00000000
+muld n eq - fff00000 00000000 40180000 00000000 fff00000 00000000
+muld n eq - fff00000 00000000 7ff00000 00000000 fff00000 00000000
+muld n eq - fff00000 00000000 80000000 00000004 7ff00000 00000000
+muld n eq - fff00000 00000000 80100000 00000000 7ff00000 00000000
+muld n eq - fff00000 00000000 c0200000 00000000 7ff00000 00000000
+muld n eq - fff00000 00000000 ffd00000 00000000 7ff00000 00000000
+muld n eq - fff00000 00000000 fff00000 00000000 7ff00000 00000000
+muld n eq x 3ff00000 00000001 3ff00000 00000001 3ff00000 00000002
+muld n eq x 3ff00000 00000001 bff00000 00000001 bff00000 00000002
+muld n eq x 3ff00000 00000001 bff00000 00000002 bff00000 00000003
+muld n eq x 3ff00000 00000002 3ff00000 00000001 3ff00000 00000003
+muld n eq x 4007ffff ffffffff 3fefffff fffffffd 4007ffff fffffffd
+muld n eq x 4007ffff ffffffff 3fefffff fffffffe 4007ffff fffffffe
+muld n eq x 4007ffff ffffffff 3fefffff ffffffff 4007ffff fffffffe
+muld n eq x 4007ffff ffffffff bfefffff fffffffd c007ffff fffffffd
+muld n eq x 40080000 00000001 3ff00000 00000001 40080000 00000003
+muld n eq x 40080000 00000001 3ff00000 00000003 40080000 00000006
+muld n eq x 40080000 00000001 bff00000 00000003 c0080000 00000006
+muld n eq x 4013ffff ffffffff 3fefffff ffffffff 4013ffff fffffffe
+muld n eq x 40140000 00000001 3ff00000 00000001 40140000 00000002
+muld n eq x 401bffff ffffffff 3fefffff fffffffc 401bffff fffffffc
+muld n eq x 401bffff ffffffff 3fefffff ffffffff 401bffff fffffffe
+muld n eq x 401c0000 00000001 3ff00000 00000001 401c0000 00000003
+muld n eq x bfefffff ffffffff ffefffff ffffffff 7fefffff fffffffe
+muld n eq x bff00000 00000001 3ff00000 00000001 bff00000 00000002
+muld n eq x bff00000 00000001 bff00000 00000001 3ff00000 00000002
+muld n eq x bff00000 00000001 bff00000 00000002 3ff00000 00000003
+muld n eq x bff00000 00000002 3ff00000 00000001 bff00000 00000003
+muld n eq x bff00000 00000002 bff00000 00000001 3ff00000 00000003
+muld n eq x c007ffff ffffffff 3fefffff ffffffff c007ffff fffffffe
+muld n eq x c0080000 00000001 3ff00000 00000001 c0080000 00000003
+muld n eq x c013ffff ffffffff bfefffff ffffffff 4013ffff fffffffe
+muld n eq x c0140000 00000001 bff00000 00000001 40140000 00000002
+muld n eq x ffcfffff fffffff9 c00fffff ffffffff 7fefffff fffffff8
+muld n eq x ffcfffff ffffffff 40000000 00000001 ffe00000 00000000
+muld n eq x ffdfffff ffffffff 3ff00000 00000001 ffe00000 00000000
+muld n eq x?u 000fffff fffffff8 3ff00000 00000008 00100000 00000000
+muld n eq x?u 000fffff fffffff8 bff00000 00000008 80100000 00000000
+muld n eq x?u 000fffff ffffffff 3ff00000 00000001 00100000 00000000
+muld n eq x?u 00100000 00000001 3fefffff fffffffe 00100000 00000000
+muld n eq x?u 00100000 00000002 3fefffff fffffffc 00100000 00000000
+muld n eq x?u 20000000 02000000 1fffffff fc000000 00100000 00000000
+muld n eq x?u 800fffff ffffffff 3ff00000 00000001 80100000 00000000
+muld n eq xo 3ff00000 00000002 7fefffff fffffffe 7ff00000 00000000
+muld n eq xo 40000000 00000000 7fe00000 00000000 7ff00000 00000000
+muld n eq xo 7fdfffff fffffffd c0000000 00000008 fff00000 00000000
+muld n eq xo 7fe00000 00000000 40000000 00000000 7ff00000 00000000
+muld n eq xo 7fe00000 00000000 7fe00000 00000000 7ff00000 00000000
+muld n eq xo 7fe00000 00000000 7fefffff fffffffe 7ff00000 00000000
+muld n eq xo 7fe00000 00000000 ffd00000 00000000 fff00000 00000000
+muld n eq xo 7fe00000 00000000 ffd00000 00000004 fff00000 00000000
+muld n eq xo 7fe00000 00000000 ffe00000 00000000 fff00000 00000000
+muld n eq xo 7fe00000 00000009 7fefffff fffffffa 7ff00000 00000000
+muld n eq xo 7fe00000 00000009 c0180000 00000002 fff00000 00000000
+muld n eq xo c007ffff fffffffe 7fe00000 00000000 fff00000 00000000
+muld n eq xo c0080000 00000000 ffe00000 00000000 7ff00000 00000000
+muld n eq xo c013ffff fffffffe ffe00000 00000000 7ff00000 00000000
+muld n eq xo c01bffff fffffff9 7fe00000 00000000 fff00000 00000000
+muld n eq xo c0220000 00000000 7fe00000 00000000 fff00000 00000000
+muld n eq xo c0220000 00000001 ffe00000 00000000 7ff00000 00000000
+muld n eq xo ffcfffff fffffff9 7fe00000 00000000 fff00000 00000000
+muld n eq xo ffdfffff fffffff7 7fd00000 00000001 fff00000 00000000
+muld n eq xo ffe00000 00000005 ffe00000 00000001 7ff00000 00000000
+muld n eq xo ffefffff fffffffd 7fe00000 00000000 fff00000 00000000
+muld n eq xo ffefffff fffffffd c0080000 00000001 7ff00000 00000000
+muld n eq xo ffefffff fffffffd ffe00000 00000001 7ff00000 00000000
+muld n eq xo ffefffff ffffffff ffefffff ffffffff 7ff00000 00000000
+muld n eq xu 00000000 00000001 00000000 00000001 00000000 00000000
+muld n eq xu 00000000 00000001 3fe00000 00000000 00000000 00000000
+muld n eq xu 00000000 00000001 3fefffff ffffffff 00000000 00000001
+muld n eq xu 00000000 00000001 bfefffff ffffffff 80000000 00000001
+muld n eq xu 000fffff fffffff7 80200000 00000003 80000000 00000000
+muld n eq xu 000fffff fffffff8 3ff00000 00000001 000fffff fffffff9
+muld n eq xu 000fffff fffffff8 bff00000 00000001 800fffff fffffff9
+muld n eq xu 000fffff fffffffe 3fefffff fffffffc 000fffff fffffffc
+muld n eq xu 000fffff fffffffe 3ff00000 00000001 000fffff ffffffff
+muld n eq xu 000fffff fffffffe bff00000 00000001 800fffff ffffffff
+muld n eq xu 000fffff ffffffff 000fffff fffffffe 00000000 00000000
+muld n eq xu 00100000 00000000 00100000 00000000 00000000 00000000
+muld n eq xu 00100000 00000000 80100000 00000000 80000000 00000000
+muld n eq xu 00100000 00000001 3fefffff fffffffa 000fffff fffffffe
+muld n eq xu 001fffff ffffffff 3fe00000 00000000 00100000 00000000
+muld n eq xu 001fffff ffffffff bfe00000 00000000 80100000 00000000
+muld n eq xu 00f70001 00000001 00f00000 00000001 00000000 00000000
+muld n eq xu 20000000 02000000 1fffffff fbffffff 000fffff ffffffff
+muld n eq xu 3fe00000 00000000 00000000 00000001 00000000 00000000
+muld n eq xu 3fe00000 00000000 80000000 00000001 80000000 00000000
+muld n eq xu 3ff7ffff ffffffff 80000000 00000001 80000000 00000001
+muld n eq xu 40040000 00000000 00000000 00000001 00000000 00000002
+muld n eq xu 400bffff ffffffff 80000000 00000001 80000000 00000003
+muld n eq xu 80000000 00000001 00000000 00000001 80000000 00000000
+muld n eq xu 800fffff fffffff7 00200000 00000003 80000000 00000000
+muld n eq xu 800fffff fffffff7 3ff00000 00000001 800fffff fffffff8
+muld n eq xu 800fffff ffffffff 800fffff fffffffe 00000000 00000000
+muld n eq xu 80100000 00000000 00100000 00000000 80000000 00000000
+muld n eq xu 80100000 00000000 80100000 00000000 00000000 00000000
+muld n eq xu bfe00000 00000001 00000000 00000001 80000000 00000001
+muld n eq xu bff80000 00000000 80000000 00000001 00000000 00000002
+muld n eq xu c0040000 00000001 00000000 00000001 80000000 00000003
+muld n eq xu c00c0000 00000000 80000000 00000001 00000000 00000004
+muld n uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+muld n uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+muld n uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld n uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld n uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld n uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld n uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+muld n uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+muld n uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+muld n uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+muld n uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+muld n uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+muld n uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+muld n uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+muld n uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+muld n uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+muld n uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+muld n uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+muld n uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+muld n uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+muld n uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+muld n uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld n uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld n uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld n uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld n uo v 00000000 00000000 7ff00000 00000000 7fffe000 00000000
+muld n uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+muld n uo v 00000000 00000000 fff00000 00000000 ffffe000 00000000
+muld n uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+muld n uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld n uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld n uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld n uo v 7ff00000 00000000 00000000 00000000 7fffe000 00000000
+muld n uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld n uo v 7ff00000 00000000 80000000 00000000 ffffe000 00000000
+muld n uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+muld n uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+muld n uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+muld n uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+muld n uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+muld n uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+muld n uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+muld n uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+muld n uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+muld n uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+muld n uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+muld n uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+muld n uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+muld n uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+muld n uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+muld n uo v 80000000 00000000 7ff00000 00000000 ffffe000 00000000
+muld n uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+muld n uo v 80000000 00000000 fff00000 00000000 7fffe000 00000000
+muld n uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+muld n uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld n uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld n uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld n uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld p eq - 00000000 00000000 00000000 00000000 00000000 00000000
+muld p eq - 00000000 00000000 000fffff ffffffff 00000000 00000000
+muld p eq - 00000000 00000000 001fffff ffffffff 00000000 00000000
+muld p eq - 00000000 00000000 3ff00000 00000000 00000000 00000000
+muld p eq - 00000000 00000000 43d00000 00000000 00000000 00000000
+muld p eq - 00000000 00000000 7fdfffff ffffffff 00000000 00000000
+muld p eq - 00000000 00000000 80000000 00000000 80000000 00000000
+muld p eq - 00000000 00000000 80000000 00000002 80000000 00000000
+muld p eq - 00000000 00000000 800fffff ffffffff 80000000 00000000
+muld p eq - 00000000 00000000 80100000 00000001 80000000 00000000
+muld p eq - 00000000 00000000 80200000 00000000 80000000 00000000
+muld p eq - 00000000 00000000 c0080000 00000000 80000000 00000000
+muld p eq - 00000000 00000000 ffcfffff ffffffff 80000000 00000000
+muld p eq - 00000000 00000000 ffe00000 00000000 80000000 00000000
+muld p eq - 00000000 00000000 ffefffff ffffffff 80000000 00000000
+muld p eq - 00000000 00000001 00000000 00000000 00000000 00000000
+muld p eq - 00000000 00000001 3ff00000 00000000 00000000 00000001
+muld p eq - 00000000 00000001 40000000 00000000 00000000 00000002
+muld p eq - 00000000 00000001 43500000 00000000 00300000 00000000
+muld p eq - 00000000 00000001 7ff00000 00000000 7ff00000 00000000
+muld p eq - 00000000 00000006 3fe00000 00000000 00000000 00000003
+muld p eq - 00000000 00000006 bfe00000 00000000 80000000 00000003
+muld p eq - 00000000 00000008 3fc00000 00000000 00000000 00000001
+muld p eq - 000fffff fffffffc 40000000 00000000 001fffff fffffff8
+muld p eq - 000fffff ffffffff 40000000 00000000 001fffff fffffffe
+muld p eq - 00100000 00000000 00000000 00000000 00000000 00000000
+muld p eq - 00100000 00000000 3fefffff fffffffe 000fffff ffffffff
+muld p eq - 00100000 00000000 7ff00000 00000000 7ff00000 00000000
+muld p eq - 00100000 00000000 c0000000 00000000 80200000 00000000
+muld p eq - 00100000 00000001 c0000000 00000000 80200000 00000001
+muld p eq - 001fffff fffffff8 3fe00000 00000000 000fffff fffffffc
+muld p eq - 001fffff fffffffe 3fe00000 00000000 000fffff ffffffff
+muld p eq - 001fffff fffffffe bfe00000 00000000 800fffff ffffffff
+muld p eq - 36a00000 00000000 41800000 00000000 38300000 00000000
+muld p eq - 3ff00000 00000000 00000000 00000001 00000000 00000001
+muld p eq - 3ff00000 00000000 000fffff fffffffd 000fffff fffffffd
+muld p eq - 3ff00000 00000000 00200000 00000003 00200000 00000003
+muld p eq - 3ff00000 00000000 3ff00000 00000000 3ff00000 00000000
+muld p eq - 3ff00000 00000000 40000000 00000000 40000000 00000000
+muld p eq - 3ff00000 00000000 80000000 00000001 80000000 00000001
+muld p eq - 3ff00000 00000000 80000000 00000002 80000000 00000002
+muld p eq - 3ff00000 00000000 80000000 00000009 80000000 00000009
+muld p eq - 3ff00000 00000000 801fffff fffffffe 801fffff fffffffe
+muld p eq - 40000000 00000000 00000000 00000001 00000000 00000002
+muld p eq - 40000000 00000000 000fffff fffffffc 001fffff fffffff8
+muld p eq - 40000000 00000000 000fffff ffffffff 001fffff fffffffe
+muld p eq - 40000000 00000000 00100000 00000000 00200000 00000000
+muld p eq - 40000000 00000000 00100000 00000001 00200000 00000001
+muld p eq - 40000000 00000000 3ff00000 00000000 40000000 00000000
+muld p eq - 40000000 00000000 40080000 00000000 40180000 00000000
+muld p eq - 40000000 00000000 40d00000 00000000 40e00000 00000000
+muld p eq - 40000000 00000000 43c00000 00000000 43d00000 00000000
+muld p eq - 40000000 00000000 7fd00000 00000000 7fe00000 00000000
+muld p eq - 40000000 00000000 7fdfffff ffffffff 7fefffff ffffffff
+muld p eq - 40000000 00000000 800fffff fffffffd 801fffff fffffffa
+muld p eq - 40000000 00000000 80100000 00000003 80200000 00000003
+muld p eq - 40000000 00000000 80100000 00000005 80200000 00000005
+muld p eq - 40000000 00000000 bff00000 00000000 c0000000 00000000
+muld p eq - 40000000 00000000 ffcfffff fffffffd ffdfffff fffffffd
+muld p eq - 40000000 00000000 ffd00000 00000003 ffe00000 00000003
+muld p eq - 40080000 00000000 00000000 00000002 00000000 00000006
+muld p eq - 40080000 00000000 40000000 00000000 40180000 00000000
+muld p eq - 40080000 00000000 40080000 00000000 40220000 00000000
+muld p eq - 40080000 00000000 c0000000 00000000 c0180000 00000000
+muld p eq - 40100000 00000000 00000000 00000002 00000000 00000008
+muld p eq - 40100000 00000000 43b00000 00000000 43d00000 00000000
+muld p eq - 40100000 00000000 7fcfffff ffffffff 7fefffff ffffffff
+muld p eq - 40100000 00000000 ffcfffff ffffffff ffefffff ffffffff
+muld p eq - 40140000 00000000 00000000 00000000 00000000 00000000
+muld p eq - 40140000 00000000 7ff00000 00000000 7ff00000 00000000
+muld p eq - 40140000 00000000 80000000 00000001 80000000 00000005
+muld p eq - 401c0000 00000000 80000000 00000000 80000000 00000000
+muld p eq - 401c0000 00000000 fff00000 00000000 fff00000 00000000
+muld p eq - 40a00000 00000000 41e00000 00000000 42900000 00000000
+muld p eq - 40b00000 00000000 00000000 00000000 00000000 00000000
+muld p eq - 40b00000 00000000 43d00000 00000000 44900000 00000000
+muld p eq - 40b00000 00000000 80000000 00000000 80000000 00000000
+muld p eq - 40b00000 00000000 c0000000 00000000 c0c00000 00000000
+muld p eq - 40b00000 00000000 c3c00000 00000000 c4800000 00000000
+muld p eq - 40c00000 00000000 41d00000 00000000 42a00000 00000000
+muld p eq - 40d00000 00000000 40000000 00000000 40e00000 00000000
+muld p eq - 40d00000 00000000 c1600000 00000000 c2400000 00000000
+muld p eq - 7fcfffff fffffffd 40100000 00000000 7fefffff fffffffd
+muld p eq - 7fcfffff fffffffd c0100000 00000000 ffefffff fffffffd
+muld p eq - 7fd00000 00000000 c0000000 00000000 ffe00000 00000000
+muld p eq - 7fdfffff ffffffff c0000000 00000000 ffefffff ffffffff
+muld p eq - 7fe00000 00000000 00000000 00000000 00000000 00000000
+muld p eq - 7fe00000 00000000 7ff00000 00000000 7ff00000 00000000
+muld p eq - 7fefffff ffffffff 00000000 00000000 00000000 00000000
+muld p eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+muld p eq - 7ff00000 00000000 001fffff ffffffff 7ff00000 00000000
+muld p eq - 7ff00000 00000000 3ff00000 00000000 7ff00000 00000000
+muld p eq - 7ff00000 00000000 7fdfffff ffffffff 7ff00000 00000000
+muld p eq - 7ff00000 00000000 7ff00000 00000000 7ff00000 00000000
+muld p eq - 7ff00000 00000000 80000000 00000002 fff00000 00000000
+muld p eq - 7ff00000 00000000 800fffff ffffffff fff00000 00000000
+muld p eq - 7ff00000 00000000 80100000 00000001 fff00000 00000000
+muld p eq - 7ff00000 00000000 80200000 00000000 fff00000 00000000
+muld p eq - 7ff00000 00000000 c0080000 00000000 fff00000 00000000
+muld p eq - 7ff00000 00000000 ffe00000 00000000 fff00000 00000000
+muld p eq - 7ff00000 00000000 ffefffff ffffffff fff00000 00000000
+muld p eq - 7ff00000 00000000 fff00000 00000000 fff00000 00000000
+muld p eq - 80000000 00000000 00000000 00000000 80000000 00000000
+muld p eq - 80000000 00000000 40180000 00000000 80000000 00000000
+muld p eq - 80000000 00000000 7fefffff ffffffff 80000000 00000000
+muld p eq - 80000000 00000000 80000000 00000000 00000000 00000000
+muld p eq - 80000000 00000000 80000000 00000004 00000000 00000000
+muld p eq - 80000000 00000000 80100000 00000000 00000000 00000000
+muld p eq - 80000000 00000000 c0200000 00000000 00000000 00000000
+muld p eq - 80000000 00000000 ffd00000 00000000 00000000 00000000
+muld p eq - 80000000 00000001 40140000 00000000 80000000 00000005
+muld p eq - 80000000 00000002 3ff00000 00000000 80000000 00000002
+muld p eq - 80000000 00000003 00000000 00000000 80000000 00000000
+muld p eq - 80000000 00000003 7ff00000 00000000 fff00000 00000000
+muld p eq - 80000000 00000004 bff00000 00000000 00000000 00000004
+muld p eq - 80000000 00000008 3fc00000 00000000 80000000 00000001
+muld p eq - 800fffff fffffffd c0000000 00000000 001fffff fffffffa
+muld p eq - 800fffff ffffffff 00000000 00000000 80000000 00000000
+muld p eq - 800fffff ffffffff 7ff00000 00000000 fff00000 00000000
+muld p eq - 800fffff ffffffff 80000000 00000000 00000000 00000000
+muld p eq - 800fffff ffffffff bff00000 00000000 000fffff ffffffff
+muld p eq - 800fffff ffffffff fff00000 00000000 7ff00000 00000000
+muld p eq - 80100000 00000001 00000000 00000000 80000000 00000000
+muld p eq - 80100000 00000001 7ff00000 00000000 fff00000 00000000
+muld p eq - 80100000 00000001 bff00000 00000000 00100000 00000001
+muld p eq - 801fffff fffffffc 3fe00000 00000000 800fffff fffffffe
+muld p eq - 801fffff fffffffc bfe00000 00000000 000fffff fffffffe
+muld p eq - 801fffff fffffffe 3ff00000 00000000 801fffff fffffffe
+muld p eq - 801fffff ffffffff 80000000 00000000 00000000 00000000
+muld p eq - 801fffff ffffffff fff00000 00000000 7ff00000 00000000
+muld p eq - 80200000 00000000 00000000 00000000 80000000 00000000
+muld p eq - 80200000 00000000 7ff00000 00000000 fff00000 00000000
+muld p eq - bff00000 00000000 00000000 00000009 80000000 00000009
+muld p eq - bff00000 00000000 00100000 00000009 80100000 00000009
+muld p eq - bff00000 00000000 3ff00000 00000000 bff00000 00000000
+muld p eq - bff00000 00000000 40000000 00000000 c0000000 00000000
+muld p eq - bff00000 00000000 80000000 00000004 00000000 00000004
+muld p eq - bff00000 00000000 800fffff ffffffff 000fffff ffffffff
+muld p eq - bff00000 00000000 80100000 00000001 00100000 00000001
+muld p eq - bff00000 00000000 bff00000 00000000 3ff00000 00000000
+muld p eq - bff00000 00000000 c0000000 00000000 40000000 00000000
+muld p eq - c0000000 00000000 00000000 00000000 80000000 00000000
+muld p eq - c0000000 00000000 000fffff fffffffd 801fffff fffffffa
+muld p eq - c0000000 00000000 00100000 00000001 80200000 00000001
+muld p eq - c0000000 00000000 00100000 00000005 80200000 00000005
+muld p eq - c0000000 00000000 00100000 00000009 80200000 00000009
+muld p eq - c0000000 00000000 40080000 00000000 c0180000 00000000
+muld p eq - c0000000 00000000 40d00000 00000000 c0e00000 00000000
+muld p eq - c0000000 00000000 43c00000 00000000 c3d00000 00000000
+muld p eq - c0000000 00000000 7fcfffff ffffffff ffdfffff ffffffff
+muld p eq - c0000000 00000000 7fd00000 00000001 ffe00000 00000001
+muld p eq - c0000000 00000000 7ff00000 00000000 fff00000 00000000
+muld p eq - c0000000 00000000 800fffff fffffffd 001fffff fffffffa
+muld p eq - c0000000 00000000 bff00000 00000000 40000000 00000000
+muld p eq - c0000000 00000000 c0080000 00000000 40180000 00000000
+muld p eq - c0000000 00000000 c3c00000 00000000 43d00000 00000000
+muld p eq - c0080000 00000000 40080000 00000000 c0220000 00000000
+muld p eq - c0080000 00000000 c0000000 00000000 40180000 00000000
+muld p eq - c0080000 00000000 c0080000 00000000 40220000 00000000
+muld p eq - c0100000 00000000 43c00000 00000000 c3e00000 00000000
+muld p eq - c0100000 00000000 7fcfffff ffffffff ffefffff ffffffff
+muld p eq - c0100000 00000000 80000000 00000000 00000000 00000000
+muld p eq - c0100000 00000000 ffcfffff ffffffff 7fefffff ffffffff
+muld p eq - c0100000 00000000 fff00000 00000000 7ff00000 00000000
+muld p eq - c0a00000 00000000 41e00000 00000000 c2900000 00000000
+muld p eq - c0a00000 00000000 c0000000 00000000 40b00000 00000000
+muld p eq - c0a00000 00000000 c1e00000 00000000 42900000 00000000
+muld p eq - c0a00000 00000000 c1f00000 00000000 42a00000 00000000
+muld p eq - c0d00000 00000000 42400000 00000000 c3200000 00000000
+muld p eq - c0d00000 00000000 c0000000 00000000 40e00000 00000000
+muld p eq - ffcfffff fffffffd 40100000 00000000 ffefffff fffffffd
+muld p eq - ffcfffff fffffffd c0100000 00000000 7fefffff fffffffd
+muld p eq - ffcfffff ffffffff 00000000 00000000 80000000 00000000
+muld p eq - ffcfffff ffffffff 7ff00000 00000000 fff00000 00000000
+muld p eq - ffd00000 00000000 00000000 00000000 80000000 00000000
+muld p eq - ffd00000 00000000 7ff00000 00000000 fff00000 00000000
+muld p eq - ffdfffff ffffffff 80000000 00000000 00000000 00000000
+muld p eq - ffefffff ffffffff 80000000 00000000 00000000 00000000
+muld p eq - ffefffff ffffffff fff00000 00000000 7ff00000 00000000
+muld p eq - fff00000 00000000 40180000 00000000 fff00000 00000000
+muld p eq - fff00000 00000000 7ff00000 00000000 fff00000 00000000
+muld p eq - fff00000 00000000 80000000 00000004 7ff00000 00000000
+muld p eq - fff00000 00000000 80100000 00000000 7ff00000 00000000
+muld p eq - fff00000 00000000 c0200000 00000000 7ff00000 00000000
+muld p eq - fff00000 00000000 ffd00000 00000000 7ff00000 00000000
+muld p eq - fff00000 00000000 fff00000 00000000 7ff00000 00000000
+muld p eq x 3ff00000 00000001 3ff00000 00000001 3ff00000 00000003
+muld p eq x 3ff00000 00000001 bff00000 00000001 bff00000 00000002
+muld p eq x 3ff00000 00000001 bff00000 00000002 bff00000 00000003
+muld p eq x 3ff00000 00000002 3ff00000 00000001 3ff00000 00000004
+muld p eq x 4007ffff ffffffff 3fefffff fffffffd 4007ffff fffffffd
+muld p eq x 4007ffff ffffffff 3fefffff fffffffe 4007ffff fffffffe
+muld p eq x 4007ffff ffffffff 3fefffff ffffffff 4007ffff ffffffff
+muld p eq x 4007ffff ffffffff bfefffff fffffffd c007ffff fffffffc
+muld p eq x 40080000 00000001 3ff00000 00000001 40080000 00000003
+muld p eq x 40080000 00000001 3ff00000 00000003 40080000 00000006
+muld p eq x 40080000 00000001 bff00000 00000003 c0080000 00000005
+muld p eq x 4013ffff ffffffff 3fefffff ffffffff 4013ffff ffffffff
+muld p eq x 40140000 00000001 3ff00000 00000001 40140000 00000003
+muld p eq x 401bffff ffffffff 3fefffff fffffffc 401bffff fffffffc
+muld p eq x 401bffff ffffffff 3fefffff ffffffff 401bffff ffffffff
+muld p eq x 401c0000 00000001 3ff00000 00000001 401c0000 00000003
+muld p eq x bfefffff ffffffff ffefffff ffffffff 7fefffff ffffffff
+muld p eq x bff00000 00000001 3ff00000 00000001 bff00000 00000002
+muld p eq x bff00000 00000001 bff00000 00000001 3ff00000 00000003
+muld p eq x bff00000 00000001 bff00000 00000002 3ff00000 00000004
+muld p eq x bff00000 00000002 3ff00000 00000001 bff00000 00000003
+muld p eq x bff00000 00000002 bff00000 00000001 3ff00000 00000004
+muld p eq x c007ffff ffffffff 3fefffff ffffffff c007ffff fffffffe
+muld p eq x c0080000 00000001 3ff00000 00000001 c0080000 00000002
+muld p eq x c013ffff ffffffff bfefffff ffffffff 4013ffff ffffffff
+muld p eq x c0140000 00000001 bff00000 00000001 40140000 00000003
+muld p eq x ffcfffff fffffff9 c00fffff ffffffff 7fefffff fffffff9
+muld p eq x ffcfffff ffffffff 40000000 00000001 ffe00000 00000000
+muld p eq x ffdfffff ffffffff 3ff00000 00000001 ffe00000 00000000
+muld p eq x?u 000fffff fffffff8 3ff00000 00000008 00100000 00000000
+muld p eq x?u 000fffff ffffffff 3ff00000 00000001 00100000 00000000
+muld p eq x?u 00100000 00000001 3fefffff fffffffe 00100000 00000000
+muld p eq x?u 00100000 00000002 3fefffff fffffffc 00100000 00000000
+muld p eq x?u 20000000 02000000 1fffffff fbffffff 00100000 00000000
+muld p eq x?u 20000000 02000000 1fffffff fc000000 00100000 00000000
+muld p eq xo 3ff00000 00000002 7fefffff fffffffe 7ff00000 00000000
+muld p eq xo 40000000 00000000 7fe00000 00000000 7ff00000 00000000
+muld p eq xo 7fdfffff fffffffd c0000000 00000008 ffefffff ffffffff
+muld p eq xo 7fe00000 00000000 40000000 00000000 7ff00000 00000000
+muld p eq xo 7fe00000 00000000 7fe00000 00000000 7ff00000 00000000
+muld p eq xo 7fe00000 00000000 7fefffff fffffffe 7ff00000 00000000
+muld p eq xo 7fe00000 00000000 c0000000 00000000 ffefffff ffffffff
+muld p eq xo 7fe00000 00000000 c0100000 00000005 ffefffff ffffffff
+muld p eq xo 7fe00000 00000000 c0200000 00000003 ffefffff ffffffff
+muld p eq xo 7fe00000 00000009 7fefffff fffffffa 7ff00000 00000000
+muld p eq xo 7fe00000 00000009 c0180000 00000002 ffefffff ffffffff
+muld p eq xo 7fe00000 00000009 ffd00000 00000002 ffefffff ffffffff
+muld p eq xo c0000000 00000000 7fe00000 00000000 ffefffff ffffffff
+muld p eq xo c0080000 00000000 ffe00000 00000000 7ff00000 00000000
+muld p eq xo c013ffff fffffffe ffe00000 00000000 7ff00000 00000000
+muld p eq xo c0140000 00000000 7fe00000 00000000 ffefffff ffffffff
+muld p eq xo c0220000 00000001 ffe00000 00000000 7ff00000 00000000
+muld p eq xo ffd00000 00000000 7fe00000 00000000 ffefffff ffffffff
+muld p eq xo ffe00000 00000005 ffe00000 00000001 7ff00000 00000000
+muld p eq xo ffefffff fffffffd c0080000 00000001 7ff00000 00000000
+muld p eq xo ffefffff fffffffd ffe00000 00000001 7ff00000 00000000
+muld p eq xo ffefffff ffffffff ffefffff ffffffff 7ff00000 00000000
+muld p eq xu 00000000 00000001 00000000 00000001 00000000 00000001
+muld p eq xu 00000000 00000001 3fe00000 00000000 00000000 00000001
+muld p eq xu 00000000 00000001 3fefffff ffffffff 00000000 00000001
+muld p eq xu 000fffff fffffff7 80200000 00000003 80000000 00000000
+muld p eq xu 000fffff fffffff8 3ff00000 00000001 000fffff fffffff9
+muld p eq xu 000fffff fffffffe 3fefffff fffffffc 000fffff fffffffd
+muld p eq xu 000fffff fffffffe 3ff00000 00000001 000fffff ffffffff
+muld p eq xu 000fffff ffffffff 000fffff fffffffe 00000000 00000001
+muld p eq xu 000fffff ffffffff bff00000 00000001 800fffff ffffffff
+muld p eq xu 00100000 00000000 00200000 00000000 00000000 00000001
+muld p eq xu 00100000 00000000 80100000 00000000 80000000 00000000
+muld p eq xu 00100000 00000001 3fefffff fffffffa 000fffff fffffffe
+muld p eq xu 001fffff ffffffff 3fe00000 00000000 00100000 00000000
+muld p eq xu 00f70001 00000001 00f00000 00000001 00000000 00000001
+muld p eq xu 3fe00000 00000000 80000000 00000001 80000000 00000000
+muld p eq xu 3ff7ffff ffffffff 80000000 00000001 80000000 00000001
+muld p eq xu 400bffff ffffffff 80000000 00000001 80000000 00000003
+muld p eq xu 80000000 00000001 00000000 00000001 80000000 00000000
+muld p eq xu 80000000 00000001 3fefffff ffffffff 80000000 00000000
+muld p eq xu 800fffff fffffff7 00200000 00000003 80000000 00000000
+muld p eq xu 800fffff ffffffff 800fffff fffffffe 00000000 00000001
+muld p eq xu 80100000 00000000 00100000 00000000 80000000 00000000
+muld p eq xu 801fffff ffffffff 3fe00000 00000000 800fffff ffffffff
+muld p eq xu 80200000 00000000 80200000 00000000 00000000 00000001
+muld p eq xu bff80000 00000000 80000000 00000001 00000000 00000002
+muld p eq xu c00c0000 00000000 80000000 00000001 00000000 00000004
+muld p uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+muld p uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+muld p uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld p uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld p uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld p uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld p uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+muld p uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+muld p uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+muld p uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+muld p uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+muld p uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+muld p uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+muld p uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+muld p uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+muld p uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+muld p uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+muld p uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+muld p uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+muld p uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+muld p uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+muld p uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld p uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld p uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld p uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld p uo v 00000000 00000000 7ff00000 00000000 7fffe000 00000000
+muld p uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+muld p uo v 00000000 00000000 fff00000 00000000 ffffe000 00000000
+muld p uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+muld p uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld p uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld p uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld p uo v 7ff00000 00000000 00000000 00000000 7fffe000 00000000
+muld p uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld p uo v 7ff00000 00000000 80000000 00000000 ffffe000 00000000
+muld p uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+muld p uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+muld p uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+muld p uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+muld p uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+muld p uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+muld p uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+muld p uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+muld p uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+muld p uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+muld p uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+muld p uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+muld p uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+muld p uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+muld p uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+muld p uo v 80000000 00000000 7ff00000 00000000 ffffe000 00000000
+muld p uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+muld p uo v 80000000 00000000 fff00000 00000000 7fffe000 00000000
+muld p uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+muld p uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld p uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld p uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld p uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld z eq - 00000000 00000000 00000000 00000000 00000000 00000000
+muld z eq - 00000000 00000000 000fffff ffffffff 00000000 00000000
+muld z eq - 00000000 00000000 001fffff ffffffff 00000000 00000000
+muld z eq - 00000000 00000000 3ff00000 00000000 00000000 00000000
+muld z eq - 00000000 00000000 43d00000 00000000 00000000 00000000
+muld z eq - 00000000 00000000 7fdfffff ffffffff 00000000 00000000
+muld z eq - 00000000 00000000 80000000 00000000 80000000 00000000
+muld z eq - 00000000 00000000 80000000 00000002 80000000 00000000
+muld z eq - 00000000 00000000 800fffff ffffffff 80000000 00000000
+muld z eq - 00000000 00000000 80100000 00000001 80000000 00000000
+muld z eq - 00000000 00000000 80200000 00000000 80000000 00000000
+muld z eq - 00000000 00000000 c0080000 00000000 80000000 00000000
+muld z eq - 00000000 00000000 ffcfffff ffffffff 80000000 00000000
+muld z eq - 00000000 00000000 ffe00000 00000000 80000000 00000000
+muld z eq - 00000000 00000000 ffefffff ffffffff 80000000 00000000
+muld z eq - 00000000 00000001 00000000 00000000 00000000 00000000
+muld z eq - 00000000 00000001 3ff00000 00000000 00000000 00000001
+muld z eq - 00000000 00000001 40000000 00000000 00000000 00000002
+muld z eq - 00000000 00000001 43500000 00000000 00300000 00000000
+muld z eq - 00000000 00000001 7ff00000 00000000 7ff00000 00000000
+muld z eq - 00000000 00000006 3fe00000 00000000 00000000 00000003
+muld z eq - 00000000 00000006 bfe00000 00000000 80000000 00000003
+muld z eq - 00000000 00000008 3fc00000 00000000 00000000 00000001
+muld z eq - 000fffff fffffffc 40000000 00000000 001fffff fffffff8
+muld z eq - 000fffff ffffffff 40000000 00000000 001fffff fffffffe
+muld z eq - 00100000 00000000 00000000 00000000 00000000 00000000
+muld z eq - 00100000 00000000 3fefffff fffffffe 000fffff ffffffff
+muld z eq - 00100000 00000000 7ff00000 00000000 7ff00000 00000000
+muld z eq - 00100000 00000000 c0000000 00000000 80200000 00000000
+muld z eq - 00100000 00000001 c0000000 00000000 80200000 00000001
+muld z eq - 001fffff fffffff8 3fe00000 00000000 000fffff fffffffc
+muld z eq - 001fffff fffffffe 3fe00000 00000000 000fffff ffffffff
+muld z eq - 001fffff fffffffe bfe00000 00000000 800fffff ffffffff
+muld z eq - 36a00000 00000000 41800000 00000000 38300000 00000000
+muld z eq - 3ff00000 00000000 00000000 00000001 00000000 00000001
+muld z eq - 3ff00000 00000000 000fffff fffffffd 000fffff fffffffd
+muld z eq - 3ff00000 00000000 00200000 00000003 00200000 00000003
+muld z eq - 3ff00000 00000000 3ff00000 00000000 3ff00000 00000000
+muld z eq - 3ff00000 00000000 40000000 00000000 40000000 00000000
+muld z eq - 3ff00000 00000000 80000000 00000001 80000000 00000001
+muld z eq - 3ff00000 00000000 80000000 00000002 80000000 00000002
+muld z eq - 3ff00000 00000000 80000000 00000009 80000000 00000009
+muld z eq - 3ff00000 00000000 801fffff fffffffe 801fffff fffffffe
+muld z eq - 40000000 00000000 00000000 00000001 00000000 00000002
+muld z eq - 40000000 00000000 000fffff fffffffc 001fffff fffffff8
+muld z eq - 40000000 00000000 000fffff ffffffff 001fffff fffffffe
+muld z eq - 40000000 00000000 00100000 00000000 00200000 00000000
+muld z eq - 40000000 00000000 00100000 00000001 00200000 00000001
+muld z eq - 40000000 00000000 3ff00000 00000000 40000000 00000000
+muld z eq - 40000000 00000000 40080000 00000000 40180000 00000000
+muld z eq - 40000000 00000000 40d00000 00000000 40e00000 00000000
+muld z eq - 40000000 00000000 43c00000 00000000 43d00000 00000000
+muld z eq - 40000000 00000000 7fd00000 00000000 7fe00000 00000000
+muld z eq - 40000000 00000000 7fdfffff ffffffff 7fefffff ffffffff
+muld z eq - 40000000 00000000 800fffff fffffffd 801fffff fffffffa
+muld z eq - 40000000 00000000 80100000 00000003 80200000 00000003
+muld z eq - 40000000 00000000 80100000 00000005 80200000 00000005
+muld z eq - 40000000 00000000 bff00000 00000000 c0000000 00000000
+muld z eq - 40000000 00000000 ffcfffff fffffffd ffdfffff fffffffd
+muld z eq - 40000000 00000000 ffd00000 00000003 ffe00000 00000003
+muld z eq - 40080000 00000000 00000000 00000002 00000000 00000006
+muld z eq - 40080000 00000000 40000000 00000000 40180000 00000000
+muld z eq - 40080000 00000000 40080000 00000000 40220000 00000000
+muld z eq - 40080000 00000000 c0000000 00000000 c0180000 00000000
+muld z eq - 40100000 00000000 00000000 00000002 00000000 00000008
+muld z eq - 40100000 00000000 43b00000 00000000 43d00000 00000000
+muld z eq - 40100000 00000000 7fcfffff ffffffff 7fefffff ffffffff
+muld z eq - 40100000 00000000 ffcfffff ffffffff ffefffff ffffffff
+muld z eq - 40140000 00000000 00000000 00000000 00000000 00000000
+muld z eq - 40140000 00000000 7ff00000 00000000 7ff00000 00000000
+muld z eq - 40140000 00000000 80000000 00000001 80000000 00000005
+muld z eq - 401c0000 00000000 80000000 00000000 80000000 00000000
+muld z eq - 401c0000 00000000 fff00000 00000000 fff00000 00000000
+muld z eq - 40a00000 00000000 41e00000 00000000 42900000 00000000
+muld z eq - 40b00000 00000000 00000000 00000000 00000000 00000000
+muld z eq - 40b00000 00000000 43d00000 00000000 44900000 00000000
+muld z eq - 40b00000 00000000 80000000 00000000 80000000 00000000
+muld z eq - 40b00000 00000000 c0000000 00000000 c0c00000 00000000
+muld z eq - 40b00000 00000000 c3c00000 00000000 c4800000 00000000
+muld z eq - 40c00000 00000000 41d00000 00000000 42a00000 00000000
+muld z eq - 40d00000 00000000 40000000 00000000 40e00000 00000000
+muld z eq - 40d00000 00000000 c1600000 00000000 c2400000 00000000
+muld z eq - 7fcfffff fffffffd 40100000 00000000 7fefffff fffffffd
+muld z eq - 7fcfffff fffffffd c0100000 00000000 ffefffff fffffffd
+muld z eq - 7fd00000 00000000 c0000000 00000000 ffe00000 00000000
+muld z eq - 7fdfffff ffffffff c0000000 00000000 ffefffff ffffffff
+muld z eq - 7fe00000 00000000 00000000 00000000 00000000 00000000
+muld z eq - 7fe00000 00000000 7ff00000 00000000 7ff00000 00000000
+muld z eq - 7fefffff ffffffff 00000000 00000000 00000000 00000000
+muld z eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+muld z eq - 7ff00000 00000000 001fffff ffffffff 7ff00000 00000000
+muld z eq - 7ff00000 00000000 3ff00000 00000000 7ff00000 00000000
+muld z eq - 7ff00000 00000000 7fdfffff ffffffff 7ff00000 00000000
+muld z eq - 7ff00000 00000000 7ff00000 00000000 7ff00000 00000000
+muld z eq - 7ff00000 00000000 80000000 00000002 fff00000 00000000
+muld z eq - 7ff00000 00000000 800fffff ffffffff fff00000 00000000
+muld z eq - 7ff00000 00000000 80100000 00000001 fff00000 00000000
+muld z eq - 7ff00000 00000000 80200000 00000000 fff00000 00000000
+muld z eq - 7ff00000 00000000 c0080000 00000000 fff00000 00000000
+muld z eq - 7ff00000 00000000 ffe00000 00000000 fff00000 00000000
+muld z eq - 7ff00000 00000000 ffefffff ffffffff fff00000 00000000
+muld z eq - 7ff00000 00000000 fff00000 00000000 fff00000 00000000
+muld z eq - 80000000 00000000 00000000 00000000 80000000 00000000
+muld z eq - 80000000 00000000 40180000 00000000 80000000 00000000
+muld z eq - 80000000 00000000 7fefffff ffffffff 80000000 00000000
+muld z eq - 80000000 00000000 80000000 00000000 00000000 00000000
+muld z eq - 80000000 00000000 80000000 00000004 00000000 00000000
+muld z eq - 80000000 00000000 80100000 00000000 00000000 00000000
+muld z eq - 80000000 00000000 c0200000 00000000 00000000 00000000
+muld z eq - 80000000 00000000 ffd00000 00000000 00000000 00000000
+muld z eq - 80000000 00000001 40140000 00000000 80000000 00000005
+muld z eq - 80000000 00000002 3ff00000 00000000 80000000 00000002
+muld z eq - 80000000 00000003 00000000 00000000 80000000 00000000
+muld z eq - 80000000 00000003 7ff00000 00000000 fff00000 00000000
+muld z eq - 80000000 00000004 bff00000 00000000 00000000 00000004
+muld z eq - 80000000 00000008 3fc00000 00000000 80000000 00000001
+muld z eq - 800fffff fffffffd c0000000 00000000 001fffff fffffffa
+muld z eq - 800fffff ffffffff 00000000 00000000 80000000 00000000
+muld z eq - 800fffff ffffffff 7ff00000 00000000 fff00000 00000000
+muld z eq - 800fffff ffffffff 80000000 00000000 00000000 00000000
+muld z eq - 800fffff ffffffff bff00000 00000000 000fffff ffffffff
+muld z eq - 800fffff ffffffff fff00000 00000000 7ff00000 00000000
+muld z eq - 80100000 00000001 00000000 00000000 80000000 00000000
+muld z eq - 80100000 00000001 7ff00000 00000000 fff00000 00000000
+muld z eq - 80100000 00000001 bff00000 00000000 00100000 00000001
+muld z eq - 801fffff fffffffc 3fe00000 00000000 800fffff fffffffe
+muld z eq - 801fffff fffffffc bfe00000 00000000 000fffff fffffffe
+muld z eq - 801fffff fffffffe 3ff00000 00000000 801fffff fffffffe
+muld z eq - 801fffff ffffffff 80000000 00000000 00000000 00000000
+muld z eq - 801fffff ffffffff fff00000 00000000 7ff00000 00000000
+muld z eq - 80200000 00000000 00000000 00000000 80000000 00000000
+muld z eq - 80200000 00000000 7ff00000 00000000 fff00000 00000000
+muld z eq - bff00000 00000000 00000000 00000009 80000000 00000009
+muld z eq - bff00000 00000000 00100000 00000009 80100000 00000009
+muld z eq - bff00000 00000000 3ff00000 00000000 bff00000 00000000
+muld z eq - bff00000 00000000 40000000 00000000 c0000000 00000000
+muld z eq - bff00000 00000000 80000000 00000004 00000000 00000004
+muld z eq - bff00000 00000000 800fffff ffffffff 000fffff ffffffff
+muld z eq - bff00000 00000000 80100000 00000001 00100000 00000001
+muld z eq - bff00000 00000000 bff00000 00000000 3ff00000 00000000
+muld z eq - bff00000 00000000 c0000000 00000000 40000000 00000000
+muld z eq - c0000000 00000000 00000000 00000000 80000000 00000000
+muld z eq - c0000000 00000000 000fffff fffffffd 801fffff fffffffa
+muld z eq - c0000000 00000000 00100000 00000001 80200000 00000001
+muld z eq - c0000000 00000000 00100000 00000005 80200000 00000005
+muld z eq - c0000000 00000000 00100000 00000009 80200000 00000009
+muld z eq - c0000000 00000000 40080000 00000000 c0180000 00000000
+muld z eq - c0000000 00000000 40d00000 00000000 c0e00000 00000000
+muld z eq - c0000000 00000000 43c00000 00000000 c3d00000 00000000
+muld z eq - c0000000 00000000 7fcfffff ffffffff ffdfffff ffffffff
+muld z eq - c0000000 00000000 7fd00000 00000001 ffe00000 00000001
+muld z eq - c0000000 00000000 7ff00000 00000000 fff00000 00000000
+muld z eq - c0000000 00000000 800fffff fffffffd 001fffff fffffffa
+muld z eq - c0000000 00000000 bff00000 00000000 40000000 00000000
+muld z eq - c0000000 00000000 c0080000 00000000 40180000 00000000
+muld z eq - c0000000 00000000 c3c00000 00000000 43d00000 00000000
+muld z eq - c0080000 00000000 40080000 00000000 c0220000 00000000
+muld z eq - c0080000 00000000 c0000000 00000000 40180000 00000000
+muld z eq - c0080000 00000000 c0080000 00000000 40220000 00000000
+muld z eq - c0100000 00000000 43c00000 00000000 c3e00000 00000000
+muld z eq - c0100000 00000000 7fcfffff ffffffff ffefffff ffffffff
+muld z eq - c0100000 00000000 80000000 00000000 00000000 00000000
+muld z eq - c0100000 00000000 ffcfffff ffffffff 7fefffff ffffffff
+muld z eq - c0100000 00000000 fff00000 00000000 7ff00000 00000000
+muld z eq - c0a00000 00000000 41e00000 00000000 c2900000 00000000
+muld z eq - c0a00000 00000000 c0000000 00000000 40b00000 00000000
+muld z eq - c0a00000 00000000 c1e00000 00000000 42900000 00000000
+muld z eq - c0a00000 00000000 c1f00000 00000000 42a00000 00000000
+muld z eq - c0d00000 00000000 42400000 00000000 c3200000 00000000
+muld z eq - c0d00000 00000000 c0000000 00000000 40e00000 00000000
+muld z eq - ffcfffff fffffffd 40100000 00000000 ffefffff fffffffd
+muld z eq - ffcfffff fffffffd c0100000 00000000 7fefffff fffffffd
+muld z eq - ffcfffff ffffffff 00000000 00000000 80000000 00000000
+muld z eq - ffcfffff ffffffff 7ff00000 00000000 fff00000 00000000
+muld z eq - ffd00000 00000000 00000000 00000000 80000000 00000000
+muld z eq - ffd00000 00000000 7ff00000 00000000 fff00000 00000000
+muld z eq - ffdfffff ffffffff 80000000 00000000 00000000 00000000
+muld z eq - ffefffff ffffffff 80000000 00000000 00000000 00000000
+muld z eq - ffefffff ffffffff fff00000 00000000 7ff00000 00000000
+muld z eq - fff00000 00000000 40180000 00000000 fff00000 00000000
+muld z eq - fff00000 00000000 7ff00000 00000000 fff00000 00000000
+muld z eq - fff00000 00000000 80000000 00000004 7ff00000 00000000
+muld z eq - fff00000 00000000 80100000 00000000 7ff00000 00000000
+muld z eq - fff00000 00000000 c0200000 00000000 7ff00000 00000000
+muld z eq - fff00000 00000000 ffd00000 00000000 7ff00000 00000000
+muld z eq - fff00000 00000000 fff00000 00000000 7ff00000 00000000
+muld z eq x 3ff00000 00000001 3ff00000 00000001 3ff00000 00000002
+muld z eq x 3ff00000 00000001 bff00000 00000001 bff00000 00000002
+muld z eq x 3ff00000 00000001 bff00000 00000002 bff00000 00000003
+muld z eq x 3ff00000 00000002 3ff00000 00000001 3ff00000 00000003
+muld z eq x 4007ffff ffffffff 3fefffff fffffffd 4007ffff fffffffc
+muld z eq x 4007ffff ffffffff 3fefffff fffffffe 4007ffff fffffffd
+muld z eq x 4007ffff ffffffff 3fefffff ffffffff 4007ffff fffffffe
+muld z eq x 4007ffff ffffffff bfefffff fffffffd c007ffff fffffffc
+muld z eq x 40080000 00000001 3ff00000 00000001 40080000 00000002
+muld z eq x 40080000 00000001 3ff00000 00000003 40080000 00000005
+muld z eq x 40080000 00000001 bff00000 00000003 c0080000 00000005
+muld z eq x 4013ffff ffffffff 3fefffff ffffffff 4013ffff fffffffe
+muld z eq x 40140000 00000001 3ff00000 00000001 40140000 00000002
+muld z eq x 401bffff ffffffff 3fefffff fffffffc 401bffff fffffffb
+muld z eq x 401bffff ffffffff 3fefffff ffffffff 401bffff fffffffe
+muld z eq x 401c0000 00000001 3ff00000 00000001 401c0000 00000002
+muld z eq x bfefffff ffffffff ffefffff ffffffff 7fefffff fffffffe
+muld z eq x bff00000 00000001 3ff00000 00000001 bff00000 00000002
+muld z eq x bff00000 00000001 bff00000 00000001 3ff00000 00000002
+muld z eq x bff00000 00000001 bff00000 00000002 3ff00000 00000003
+muld z eq x bff00000 00000002 3ff00000 00000001 bff00000 00000003
+muld z eq x bff00000 00000002 bff00000 00000001 3ff00000 00000003
+muld z eq x c007ffff ffffffff 3fefffff ffffffff c007ffff fffffffe
+muld z eq x c0080000 00000001 3ff00000 00000001 c0080000 00000002
+muld z eq x c013ffff ffffffff bfefffff ffffffff 4013ffff fffffffe
+muld z eq x c0140000 00000001 bff00000 00000001 40140000 00000002
+muld z eq x ffcfffff fffffff9 c00fffff ffffffff 7fefffff fffffff8
+muld z eq x ffcfffff ffffffff 40000000 00000001 ffe00000 00000000
+muld z eq x ffdfffff ffffffff 3ff00000 00000001 ffe00000 00000000
+muld z eq xo 3ff00000 00000002 7fefffff fffffffe 7fefffff ffffffff
+muld z eq xo 40180000 00000000 7fe00000 00000000 7fefffff ffffffff
+muld z eq xo 7fdfffff fffffffd c0000000 00000008 ffefffff ffffffff
+muld z eq xo 7fe00000 00000000 40000000 00000001 7fefffff ffffffff
+muld z eq xo 7fe00000 00000000 40180000 00000000 7fefffff ffffffff
+muld z eq xo 7fe00000 00000000 40180000 00000001 7fefffff ffffffff
+muld z eq xo 7fe00000 00000000 7fd00000 00000006 7fefffff ffffffff
+muld z eq xo 7fe00000 00000000 c0000000 00000000 ffefffff ffffffff
+muld z eq xo 7fe00000 00000000 c0100000 00000005 ffefffff ffffffff
+muld z eq xo 7fe00000 00000000 c0200000 00000003 ffefffff ffffffff
+muld z eq xo 7fe00000 00000009 c0180000 00000002 ffefffff ffffffff
+muld z eq xo 7fe00000 00000009 ffd00000 00000002 ffefffff ffffffff
+muld z eq xo c0000000 00000000 7fe00000 00000000 ffefffff ffffffff
+muld z eq xo c0140000 00000000 7fe00000 00000000 ffefffff ffffffff
+muld z eq xo c01c0000 00000000 ffe00000 00000000 7fefffff ffffffff
+muld z eq xo ffcfffff fffffff9 ffefffff ffffffff 7fefffff ffffffff
+muld z eq xo ffd00000 00000000 7fe00000 00000000 ffefffff ffffffff
+muld z eq xo ffefffff fffffffd c0080000 00000001 7fefffff ffffffff
+muld z eq xu 00000000 00000001 00000000 00000001 00000000 00000000
+muld z eq xu 00000000 00000001 3fe00000 00000000 00000000 00000000
+muld z eq xu 00000000 00000001 3fefffff ffffffff 00000000 00000000
+muld z eq xu 000fffff fffffff7 80200000 00000003 80000000 00000000
+muld z eq xu 000fffff fffffffc 3ff00000 00000001 000fffff fffffffc
+muld z eq xu 000fffff fffffffe 3fefffff fffffffc 000fffff fffffffc
+muld z eq xu 000fffff ffffffff 000fffff fffffffe 00000000 00000000
+muld z eq xu 000fffff ffffffff 3ff00000 00000001 000fffff ffffffff
+muld z eq xu 000fffff ffffffff bff00000 00000001 800fffff ffffffff
+muld z eq xu 00100000 00000000 00100000 00000000 00000000 00000000
+muld z eq xu 00100000 00000000 80100000 00000000 80000000 00000000
+muld z eq xu 00100000 00000001 3fefffff fffffffa 000fffff fffffffd
+muld z eq xu 00100000 00000001 3fefffff fffffffe 000fffff ffffffff
+muld z eq xu 001fffff ffffffff 3fe00000 00000000 000fffff ffffffff
+muld z eq xu 00f70001 00000001 00f00000 00000001 00000000 00000000
+muld z eq xu 20000000 02000000 1fffffff fbffffff 000fffff ffffffff
+muld z eq xu 20000000 02000000 1fffffff fc000000 000fffff ffffffff
+muld z eq xu 3fe00000 00000000 00000000 00000001 00000000 00000000
+muld z eq xu 3fe00000 00000000 80000000 00000001 80000000 00000000
+muld z eq xu 3ff7ffff ffffffff 80000000 00000001 80000000 00000001
+muld z eq xu 40040000 00000000 00000000 00000001 00000000 00000002
+muld z eq xu 400bffff ffffffff 80000000 00000001 80000000 00000003
+muld z eq xu 80000000 00000001 00000000 00000001 80000000 00000000
+muld z eq xu 80000000 00000001 3fefffff ffffffff 80000000 00000000
+muld z eq xu 800fffff fffffff7 00200000 00000003 80000000 00000000
+muld z eq xu 800fffff ffffffff 800fffff fffffffe 00000000 00000000
+muld z eq xu 80100000 00000000 00100000 00000000 80000000 00000000
+muld z eq xu 80100000 00000000 80100000 00000000 00000000 00000000
+muld z eq xu 801fffff ffffffff 3fe00000 00000000 800fffff ffffffff
+muld z eq xu 801fffff ffffffff bfe00000 00000000 000fffff ffffffff
+muld z uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+muld z uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+muld z uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld z uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld z uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld z uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld z uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+muld z uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+muld z uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+muld z uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+muld z uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+muld z uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+muld z uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+muld z uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+muld z uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+muld z uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+muld z uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+muld z uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+muld z uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+muld z uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+muld z uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+muld z uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld z uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld z uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+muld z uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+muld z uo v 00000000 00000000 7ff00000 00000000 7fffe000 00000000
+muld z uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+muld z uo v 00000000 00000000 fff00000 00000000 ffffe000 00000000
+muld z uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+muld z uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld z uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld z uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld z uo v 7ff00000 00000000 00000000 00000000 7fffe000 00000000
+muld z uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld z uo v 7ff00000 00000000 80000000 00000000 ffffe000 00000000
+muld z uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+muld z uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+muld z uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+muld z uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+muld z uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+muld z uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+muld z uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+muld z uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+muld z uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+muld z uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+muld z uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+muld z uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+muld z uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+muld z uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+muld z uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+muld z uo v 80000000 00000000 7ff00000 00000000 ffffe000 00000000
+muld z uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+muld z uo v 80000000 00000000 fff00000 00000000 7fffe000 00000000
+muld z uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+muld z uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld z uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+muld z uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+muld z uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
diff --git a/verrou/unitTest/checkUCB-vecto/inputData/muls.input b/verrou/unitTest/checkUCB-vecto/inputData/muls.input
new file mode 100644
index 0000000000000000000000000000000000000000..0d503126e30ef28db028888a33fba9cd5e57e001
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/inputData/muls.input
@@ -0,0 +1,1339 @@
+/* Copyright (C) 1988-1994 Sun Microsystems, Inc. 2550 Garcia Avenue */
+/* Mountain View, California  94043 All rights reserved. */
+/*  */
+/* Any person is hereby authorized to download, copy, use, create bug fixes,  */
+/* and distribute, subject to the following conditions: */
+/*  */
+/* 	1.  the software may not be redistributed for a fee except as */
+/* 	    reasonable to cover media costs; */
+/* 	2.  any copy of the software must include this notice, as well as  */
+/* 	    any other embedded copyright notices; and  */
+/* 	3.  any distribution of this software or derivative works thereof  */
+/* 	    must comply with all applicable U.S. export control laws. */
+/*  */
+/* THE SOFTWARE IS MADE AVAILABLE "AS IS" AND WITHOUT EXPRESS OR IMPLIED */
+/* WARRANTY OF ANY KIND, INCLUDING BUT NOT LIMITED TO THE IMPLIED */
+/* WARRANTIES OF DESIGN, MERCHANTIBILITY, FITNESS FOR A PARTICULAR */
+/* PURPOSE, NON-INFRINGEMENT, PERFORMANCE OR CONFORMANCE TO */
+/* SPECIFICATIONS.   */
+/*  */
+/* BY DOWNLOADING AND/OR USING THIS SOFTWARE, THE USER WAIVES ALL CLAIMS */
+/* AGAINST SUN MICROSYSTEMS, INC. AND ITS AFFILIATED COMPANIES IN ANY */
+/* JURISDICTION, INCLUDING BUT NOT LIMITED TO CLAIMS FOR DAMAGES OR */
+/* EQUITABLE RELIEF BASED ON LOSS OF DATA, AND SPECIFICALLY WAIVES EVEN */
+/* UNKNOWN OR UNANTICIPATED CLAIMS OR LOSSES, PRESENT AND FUTURE. */
+/*  */
+/* IN NO EVENT WILL SUN MICROSYSTEMS, INC. OR ANY OF ITS AFFILIATED */
+/* COMPANIES BE LIABLE FOR ANY LOST REVENUE OR PROFITS OR OTHER SPECIAL, */
+/* INDIRECT AND CONSEQUENTIAL DAMAGES, EVEN IF IT HAS BEEN ADVISED OF THE */
+/* POSSIBILITY OF SUCH DAMAGES. */
+/*  */
+/* This file is provided with no support and without any obligation on the */
+/* part of Sun Microsystems, Inc. ("Sun") or any of its affiliated */
+/* companies to assist in its use, correction, modification or */
+/* enhancement.  Nevertheless, and without creating any obligation on its */
+/* part, Sun welcomes your comments concerning the software and requests */
+/* that they be sent to fdlibm-comments@sunpro.sun.com. */
+
+muls p eq - 3f800000 3f800000 3f800000
+muls z eq - 3f800000 3f800000 3f800000
+muls n eq - 3f800000 40000000 40000000
+muls m eq - 3f800000 40000000 40000000
+muls p eq - 3f800000 40000000 40000000
+muls z eq - 3f800000 40000000 40000000
+muls n eq - 40000000 3f800000 40000000
+muls m eq - 40000000 3f800000 40000000
+muls p eq - 40000000 3f800000 40000000
+muls z eq - 40000000 3f800000 40000000
+muls n eq - 40000000 40400000 40c00000
+muls m eq - 40000000 40400000 40c00000
+muls p eq - 40000000 40400000 40c00000
+muls z eq - 40000000 40400000 40c00000
+muls n eq - 40400000 40000000 40c00000
+muls m eq - 40400000 40000000 40c00000
+muls p eq - 40400000 40000000 40c00000
+muls z eq - 40400000 40000000 40c00000
+muls n eq - 40400000 40400000 41100000
+muls m eq - 40400000 40400000 41100000
+muls p eq - 40400000 40400000 41100000
+muls z eq - 40400000 40400000 41100000
+muls n eq - bf800000 3f800000 bf800000
+muls m eq - bf800000 3f800000 bf800000
+muls p eq - bf800000 3f800000 bf800000
+muls z eq - bf800000 3f800000 bf800000
+muls n eq - bf800000 40000000 c0000000
+muls m eq - bf800000 40000000 c0000000
+muls p eq - bf800000 40000000 c0000000
+muls z eq - bf800000 40000000 c0000000
+muls n eq - 40000000 bf800000 c0000000
+muls m eq - 40000000 bf800000 c0000000
+muls p eq - 40000000 bf800000 c0000000
+muls z eq - 40000000 bf800000 c0000000
+muls n eq - c0000000 40400000 c0c00000
+muls m eq - c0000000 40400000 c0c00000
+muls p eq - c0000000 40400000 c0c00000
+muls z eq - c0000000 40400000 c0c00000
+muls n eq - 40400000 c0000000 c0c00000
+muls m eq - 40400000 c0000000 c0c00000
+muls p eq - 40400000 c0000000 c0c00000
+muls z eq - 40400000 c0000000 c0c00000
+muls n eq - c0400000 40400000 c1100000
+muls m eq - c0400000 40400000 c1100000
+muls p eq - c0400000 40400000 c1100000
+muls z eq - c0400000 40400000 c1100000
+muls n eq - bf800000 bf800000 3f800000
+muls m eq - bf800000 bf800000 3f800000
+muls p eq - bf800000 bf800000 3f800000
+muls z eq - bf800000 bf800000 3f800000
+muls n eq - bf800000 c0000000 40000000
+muls m eq - bf800000 c0000000 40000000
+muls p eq - bf800000 c0000000 40000000
+muls z eq - bf800000 c0000000 40000000
+muls n eq - c0000000 bf800000 40000000
+muls m eq - c0000000 bf800000 40000000
+muls p eq - c0000000 bf800000 40000000
+muls z eq - c0000000 bf800000 40000000
+muls n eq - c0000000 c0400000 40c00000
+muls m eq - c0000000 c0400000 40c00000
+muls p eq - c0000000 c0400000 40c00000
+muls z eq - c0000000 c0400000 40c00000
+muls n eq - c0400000 c0000000 40c00000
+muls m eq - c0400000 c0000000 40c00000
+muls p eq - c0400000 c0000000 40c00000
+muls z eq - c0400000 c0000000 40c00000
+muls n eq - c0400000 c0400000 41100000
+muls m eq - c0400000 c0400000 41100000
+muls p eq - c0400000 c0400000 41100000
+muls z eq - c0400000 c0400000 41100000
+muls n eq - 00000000 00000000 00000000
+muls m eq - 00000000 00000000 00000000
+muls p eq - 00000000 00000000 00000000
+muls z eq - 00000000 00000000 00000000
+muls n eq - 00000000 80000000 80000000
+muls m eq - 00000000 80000000 80000000
+muls p eq - 00000000 80000000 80000000
+muls z eq - 00000000 80000000 80000000
+muls n uo v 00000000 7f800000 7fff0000
+muls m uo v 00000000 7f800000 7fff0000
+muls p uo v 00000000 7f800000 7fff0000
+muls z uo v 00000000 7f800000 7fff0000
+muls n uo v 00000000 ff800000 ffff0000
+muls m uo v 00000000 ff800000 ffff0000
+muls p uo v 00000000 ff800000 ffff0000
+muls z uo v 00000000 ff800000 ffff0000
+muls n eq - c0000000 7f800000 ff800000
+muls m eq - c0000000 7f800000 ff800000
+muls p eq - c0000000 7f800000 ff800000
+muls z eq - c0000000 7f800000 ff800000
+muls n eq - c0800000 ff800000 7f800000
+muls m eq - c0800000 ff800000 7f800000
+muls p eq - c0800000 ff800000 7f800000
+muls z eq - c0800000 ff800000 7f800000
+muls n eq - 40a00000 7f800000 7f800000
+muls m eq - 40a00000 7f800000 7f800000
+muls p eq - 40a00000 7f800000 7f800000
+muls z eq - 40a00000 7f800000 7f800000
+muls n eq - 40e00000 ff800000 ff800000
+muls m eq - 40e00000 ff800000 ff800000
+muls p eq - 40e00000 ff800000 ff800000
+muls z eq - 40e00000 ff800000 ff800000
+muls n eq - 00000000 3f800000 00000000
+muls m eq - 00000000 3f800000 00000000
+muls p eq - 00000000 3f800000 00000000
+muls z eq - 00000000 3f800000 00000000
+muls n eq - c0000000 00000000 80000000
+muls m eq - c0000000 00000000 80000000
+muls p eq - c0000000 00000000 80000000
+muls z eq - c0000000 00000000 80000000
+muls n eq - 00000000 c0400000 80000000
+muls m eq - 00000000 c0400000 80000000
+muls p eq - 00000000 c0400000 80000000
+muls z eq - 00000000 c0400000 80000000
+muls n eq - c0800000 80000000 00000000
+muls m eq - c0800000 80000000 00000000
+muls p eq - c0800000 80000000 00000000
+muls z eq - c0800000 80000000 00000000
+muls n eq - 40a00000 00000000 00000000
+muls m eq - 40a00000 00000000 00000000
+muls p eq - 40a00000 00000000 00000000
+muls z eq - 40a00000 00000000 00000000
+muls n eq - 40e00000 80000000 80000000
+muls m eq - 40e00000 80000000 80000000
+muls p eq - 40e00000 80000000 80000000
+muls z eq - 40e00000 80000000 80000000
+muls n eq - 00000000 ff000000 80000000
+muls m eq - 00000000 ff000000 80000000
+muls p eq - 00000000 ff000000 80000000
+muls z eq - 00000000 ff000000 80000000
+muls n eq - 00000000 7effffff 00000000
+muls m eq - 00000000 7effffff 00000000
+muls p eq - 00000000 7effffff 00000000
+muls z eq - 00000000 7effffff 00000000
+muls n eq - 00000000 fe7fffff 80000000
+muls m eq - 00000000 fe7fffff 80000000
+muls p eq - 00000000 fe7fffff 80000000
+muls z eq - 00000000 fe7fffff 80000000
+muls n eq - 00000000 ff7fffff 80000000
+muls m eq - 00000000 ff7fffff 80000000
+muls p eq - 00000000 ff7fffff 80000000
+muls z eq - 00000000 ff7fffff 80000000
+muls n eq - 00000000 81000000 80000000
+muls m eq - 00000000 81000000 80000000
+muls p eq - 00000000 81000000 80000000
+muls z eq - 00000000 81000000 80000000
+muls n eq - 00000000 00ffffff 00000000
+muls m eq - 00000000 00ffffff 00000000
+muls p eq - 00000000 00ffffff 00000000
+muls z eq - 00000000 00ffffff 00000000
+muls n eq - 00000000 80800001 80000000
+muls m eq - 00000000 80800001 80000000
+muls p eq - 00000000 80800001 80000000
+muls z eq - 00000000 80800001 80000000
+muls n eq - 00000000 80000002 80000000
+muls m eq - 00000000 80000002 80000000
+muls p eq - 00000000 80000002 80000000
+muls z eq - 00000000 80000002 80000000
+muls n eq - 00000000 007fffff 00000000
+muls m eq - 00000000 007fffff 00000000
+muls p eq - 00000000 007fffff 00000000
+muls z eq - 00000000 007fffff 00000000
+muls n eq - 00000000 807fffff 80000000
+muls m eq - 00000000 807fffff 80000000
+muls p eq - 00000000 807fffff 80000000
+muls z eq - 00000000 807fffff 80000000
+muls n eq - 40000000 7e800000 7f000000
+muls m eq - 40000000 7e800000 7f000000
+muls p eq - 40000000 7e800000 7f000000
+muls z eq - 40000000 7e800000 7f000000
+muls n eq - c0000000 7e7fffff feffffff
+muls m eq - c0000000 7e7fffff feffffff
+muls p eq - c0000000 7e7fffff feffffff
+muls z eq - c0000000 7e7fffff feffffff
+muls n eq - 40000000 fe7ffffd fefffffd
+muls m eq - 40000000 fe7ffffd fefffffd
+muls p eq - 40000000 fe7ffffd fefffffd
+muls z eq - 40000000 fe7ffffd fefffffd
+muls n eq - 40000000 7effffff 7f7fffff
+muls m eq - 40000000 7effffff 7f7fffff
+muls p eq - 40000000 7effffff 7f7fffff
+muls z eq - 40000000 7effffff 7f7fffff
+muls n eq - c0000000 7e800001 ff000001
+muls m eq - c0000000 7e800001 ff000001
+muls p eq - c0000000 7e800001 ff000001
+muls z eq - c0000000 7e800001 ff000001
+muls n eq - 40000000 fe800003 ff000003
+muls m eq - 40000000 fe800003 ff000003
+muls p eq - 40000000 fe800003 ff000003
+muls z eq - 40000000 fe800003 ff000003
+muls n eq - 40800000 7e7fffff 7f7fffff
+muls m eq - 40800000 7e7fffff 7f7fffff
+muls p eq - 40800000 7e7fffff 7f7fffff
+muls z eq - 40800000 7e7fffff 7f7fffff
+muls n eq - c0800000 7e7fffff ff7fffff
+muls m eq - c0800000 7e7fffff ff7fffff
+muls p eq - c0800000 7e7fffff ff7fffff
+muls z eq - c0800000 7e7fffff ff7fffff
+muls n eq - 40800000 fe7fffff ff7fffff
+muls m eq - 40800000 fe7fffff ff7fffff
+muls p eq - 40800000 fe7fffff ff7fffff
+muls z eq - 40800000 fe7fffff ff7fffff
+muls n eq - c0800000 fe7fffff 7f7fffff
+muls m eq - c0800000 fe7fffff 7f7fffff
+muls p eq - c0800000 fe7fffff 7f7fffff
+muls z eq - c0800000 fe7fffff 7f7fffff
+muls n eq - 40000000 00800000 01000000
+muls m eq - 40000000 00800000 01000000
+muls p eq - 40000000 00800000 01000000
+muls z eq - 40000000 00800000 01000000
+muls n eq - c0000000 00800001 81000001
+muls m eq - c0000000 00800001 81000001
+muls p eq - c0000000 00800001 81000001
+muls z eq - c0000000 00800001 81000001
+muls n eq - 40000000 80800003 81000003
+muls m eq - 40000000 80800003 81000003
+muls p eq - 40000000 80800003 81000003
+muls z eq - 40000000 80800003 81000003
+muls n eq - c0000000 00800009 81000009
+muls m eq - c0000000 00800009 81000009
+muls p eq - c0000000 00800009 81000009
+muls z eq - c0000000 00800009 81000009
+muls n eq - 40000000 80800005 81000005
+muls m eq - 40000000 80800005 81000005
+muls p eq - 40000000 80800005 81000005
+muls z eq - 40000000 80800005 81000005
+muls n eq - 40000000 00800001 01000001
+muls m eq - 40000000 00800001 01000001
+muls p eq - 40000000 00800001 01000001
+muls z eq - 40000000 00800001 01000001
+muls n eq - c0000000 00800005 81000005
+muls m eq - c0000000 00800005 81000005
+muls p eq - c0000000 00800005 81000005
+muls z eq - c0000000 00800005 81000005
+muls n eq - c0000000 007ffffd 80fffffa
+muls m eq - c0000000 007ffffd 80fffffa
+muls p eq - c0000000 007ffffd 80fffffa
+muls z eq - c0000000 007ffffd 80fffffa
+muls n eq - 40000000 807ffffd 80fffffa
+muls m eq - 40000000 807ffffd 80fffffa
+muls p eq - 40000000 807ffffd 80fffffa
+muls z eq - 40000000 807ffffd 80fffffa
+muls n eq - 40000000 007fffff 00fffffe
+muls m eq - 40000000 007fffff 00fffffe
+muls p eq - 40000000 007fffff 00fffffe
+muls z eq - 40000000 007fffff 00fffffe
+muls n eq - c0000000 807ffffd 00fffffa
+muls m eq - c0000000 807ffffd 00fffffa
+muls p eq - c0000000 807ffffd 00fffffa
+muls z eq - c0000000 807ffffd 00fffffa
+muls n eq - 40000000 007ffffc 00fffff8
+muls m eq - 40000000 007ffffc 00fffff8
+muls p eq - 40000000 007ffffc 00fffff8
+muls z eq - 40000000 007ffffc 00fffff8
+muls n eq - 40400000 00000002 00000006
+muls m eq - 40400000 00000002 00000006
+muls p eq - 40400000 00000002 00000006
+muls z eq - 40400000 00000002 00000006
+muls n eq - 3f800000 80000009 80000009
+muls m eq - 3f800000 80000009 80000009
+muls p eq - 3f800000 80000009 80000009
+muls z eq - 3f800000 80000009 80000009
+muls n eq - 40800000 00000002 00000008
+muls m eq - 40800000 00000002 00000008
+muls p eq - 40800000 00000002 00000008
+muls z eq - 40800000 00000002 00000008
+muls n eq - 40a00000 80000001 80000005
+muls m eq - 40a00000 80000001 80000005
+muls p eq - 40a00000 80000001 80000005
+muls z eq - 40a00000 80000001 80000005
+muls n eq - bf800000 80000004 00000004
+muls m eq - bf800000 80000004 00000004
+muls p eq - bf800000 80000004 00000004
+muls z eq - bf800000 80000004 00000004
+muls n eq - 40000000 00000001 00000002
+muls m eq - 40000000 00000001 00000002
+muls p eq - 40000000 00000001 00000002
+muls z eq - 40000000 00000001 00000002
+muls n eq - 3f800000 01000003 01000003
+muls m eq - 3f800000 01000003 01000003
+muls p eq - 3f800000 01000003 01000003
+muls z eq - 3f800000 01000003 01000003
+muls n eq - bf800000 00800009 80800009
+muls m eq - bf800000 00800009 80800009
+muls p eq - bf800000 00800009 80800009
+muls z eq - bf800000 00800009 80800009
+muls n eq - 3f800000 007ffffd 007ffffd
+muls m eq - 3f800000 007ffffd 007ffffd
+muls p eq - 3f800000 007ffffd 007ffffd
+muls z eq - 3f800000 007ffffd 007ffffd
+muls n eq - bf800000 00000009 80000009
+muls m eq - bf800000 00000009 80000009
+muls p eq - bf800000 00000009 80000009
+muls z eq - bf800000 00000009 80000009
+muls n eq - 3f800000 80fffffe 80fffffe
+muls m eq - 3f800000 80fffffe 80fffffe
+muls p eq - 3f800000 80fffffe 80fffffe
+muls z eq - 3f800000 80fffffe 80fffffe
+muls n eq - bf800000 80800001 00800001
+muls m eq - bf800000 80800001 00800001
+muls p eq - bf800000 80800001 00800001
+muls z eq - bf800000 80800001 00800001
+muls n eq - 3f800000 80000002 80000002
+muls m eq - 3f800000 80000002 80000002
+muls p eq - 3f800000 80000002 80000002
+muls z eq - 3f800000 80000002 80000002
+muls n eq - bf800000 807fffff 007fffff
+muls m eq - bf800000 807fffff 007fffff
+muls p eq - bf800000 807fffff 007fffff
+muls z eq - bf800000 807fffff 007fffff
+muls n eq xo c1100000 7f000000 ff800000
+muls m eq xo c1100000 7f000000 ff800000
+muls z eq xo c0e00000 ff000000 7f7fffff
+muls m eq xo c0e00000 ff000000 7f7fffff
+muls z eq xo c0a00000 7f000000 ff7fffff
+muls p eq xo c0a00000 7f000000 ff7fffff
+muls n eq xo c0400000 ff000000 7f800000
+muls p eq xo c0400000 ff000000 7f800000
+muls n eq xo 40000000 7f000000 7f800000
+muls p eq xo 40000000 7f000000 7f800000
+muls z eq xo 40c00000 7f000000 7f7fffff
+muls m eq xo 40c00000 7f000000 7f7fffff
+muls z eq xo c0000000 7f000000 ff7fffff
+muls p eq xo c0000000 7f000000 ff7fffff
+muls n eq - 3f800000 80000001 80000001
+muls m eq - 3f800000 80000001 80000001
+muls p eq - 3f800000 80000001 80000001
+muls z eq - 3f800000 80000001 80000001
+muls n eq - 3f800000 00000001 00000001
+muls m eq - 3f800000 00000001 00000001
+muls p eq - 3f800000 00000001 00000001
+muls z eq - 3f800000 00000001 00000001
+muls n uo - 00000000 7fff0000 7fff0000
+muls m uo - 00000000 7fff0000 7fff0000
+muls p uo - 00000000 7fff0000 7fff0000
+muls z uo - 00000000 7fff0000 7fff0000
+muls n uo - 3f800000 7fff0000 7fff0000
+muls m uo - 3f800000 7fff0000 7fff0000
+muls p uo - 3f800000 7fff0000 7fff0000
+muls z uo - 3f800000 7fff0000 7fff0000
+muls n uo - bf800000 7fff0000 7fff0000
+muls m uo - bf800000 7fff0000 7fff0000
+muls p uo - bf800000 7fff0000 7fff0000
+muls z uo - bf800000 7fff0000 7fff0000
+muls n uo v 00000000 7f810000 7fff0000
+muls m uo v 00000000 7f810000 7fff0000
+muls p uo v 00000000 7f810000 7fff0000
+muls z uo v 00000000 7f810000 7fff0000
+muls n uo v 3f800000 7f810000 7fff0000
+muls m uo v 3f800000 7f810000 7fff0000
+muls p uo v 3f800000 7f810000 7fff0000
+muls z uo v 3f800000 7f810000 7fff0000
+muls n uo v bf800000 7f810000 7fff0000
+muls m uo v bf800000 7f810000 7fff0000
+muls p uo v bf800000 7f810000 7fff0000
+muls z uo v bf800000 7f810000 7fff0000
+muls n eq - 46000000 4e800000 55000000
+muls m eq - 46000000 4e800000 55000000
+muls p eq - 46000000 4e800000 55000000
+muls z eq - 46000000 4e800000 55000000
+muls n eq - 45000000 4f000000 54800000
+muls m eq - 45000000 4f000000 54800000
+muls p eq - 45000000 4f000000 54800000
+muls z eq - 45000000 4f000000 54800000
+muls n eq - 45800000 5e800000 64800000
+muls m eq - 45800000 5e800000 64800000
+muls p eq - 45800000 5e800000 64800000
+muls z eq - 45800000 5e800000 64800000
+muls n eq - 40800000 5d800000 5e800000
+muls m eq - 40800000 5d800000 5e800000
+muls p eq - 40800000 5d800000 5e800000
+muls z eq - 40800000 5d800000 5e800000
+muls n eq - 40000000 5e000000 5e800000
+muls m eq - 40000000 5e000000 5e800000
+muls p eq - 40000000 5e000000 5e800000
+muls z eq - 40000000 5e000000 5e800000
+muls n eq - c5000000 cf800000 55000000
+muls m eq - c5000000 cf800000 55000000
+muls p eq - c5000000 cf800000 55000000
+muls z eq - c5000000 cf800000 55000000
+muls n eq - c5000000 4f000000 d4800000
+muls m eq - c5000000 4f000000 d4800000
+muls p eq - c5000000 4f000000 d4800000
+muls z eq - c5000000 4f000000 d4800000
+muls n eq - c5000000 cf000000 54800000
+muls m eq - c5000000 cf000000 54800000
+muls p eq - c5000000 cf000000 54800000
+muls z eq - c5000000 cf000000 54800000
+muls n eq - 00000000 5e800000 00000000
+muls m eq - 00000000 5e800000 00000000
+muls p eq - 00000000 5e800000 00000000
+muls z eq - 00000000 5e800000 00000000
+muls n eq - 45800000 00000000 00000000
+muls m eq - 45800000 00000000 00000000
+muls p eq - 45800000 00000000 00000000
+muls z eq - 45800000 00000000 00000000
+muls n eq - 45800000 80000000 80000000
+muls m eq - 45800000 80000000 80000000
+muls p eq - 45800000 80000000 80000000
+muls z eq - 45800000 80000000 80000000
+muls n eq - c6800000 52000000 d9000000
+muls m eq - c6800000 52000000 d9000000
+muls p eq - c6800000 52000000 d9000000
+muls z eq - c6800000 52000000 d9000000
+muls n eq - 46800000 cb000000 d2000000
+muls m eq - 46800000 cb000000 d2000000
+muls p eq - 46800000 cb000000 d2000000
+muls z eq - 46800000 cb000000 d2000000
+muls n eq - c0800000 5e000000 df000000
+muls m eq - c0800000 5e000000 df000000
+muls p eq - c0800000 5e000000 df000000
+muls z eq - c0800000 5e000000 df000000
+muls n eq - 45800000 c0000000 c6000000
+muls m eq - 45800000 c0000000 c6000000
+muls p eq - 45800000 c0000000 c6000000
+muls z eq - 45800000 c0000000 c6000000
+muls n eq - 45800000 de000000 e4000000
+muls m eq - 45800000 de000000 e4000000
+muls p eq - 45800000 de000000 e4000000
+muls z eq - 45800000 de000000 e4000000
+muls n eq - c5000000 c0000000 45800000
+muls m eq - c5000000 c0000000 45800000
+muls p eq - c5000000 c0000000 45800000
+muls z eq - c5000000 c0000000 45800000
+muls n eq - c0000000 de000000 5e800000
+muls m eq - c0000000 de000000 5e800000
+muls p eq - c0000000 de000000 5e800000
+muls z eq - c0000000 de000000 5e800000
+muls n eq - c0000000 5e000000 de800000
+muls m eq - c0000000 5e000000 de800000
+muls p eq - c0000000 5e000000 de800000
+muls z eq - c0000000 5e000000 de800000
+muls n eq - 40000000 46800000 47000000
+muls m eq - 40000000 46800000 47000000
+muls p eq - 40000000 46800000 47000000
+muls z eq - 40000000 46800000 47000000
+muls n eq - 46800000 40000000 47000000
+muls m eq - 46800000 40000000 47000000
+muls p eq - 46800000 40000000 47000000
+muls z eq - 46800000 40000000 47000000
+muls n eq - c0000000 46800000 c7000000
+muls m eq - c0000000 46800000 c7000000
+muls p eq - c0000000 46800000 c7000000
+muls z eq - c0000000 46800000 c7000000
+muls n eq - c6800000 c0000000 47000000
+muls m eq - c6800000 c0000000 47000000
+muls p eq - c6800000 c0000000 47000000
+muls z eq - c6800000 c0000000 47000000
+muls n eq xu 197e03f7 26810000 007fffff
+muls m eq xu 197e03f7 26810000 007fffff
+muls z eq xu 197e03f7 26810000 007fffff
+muls p eq x?u 197e03f7 26810000 00800000
+muls m eq xu 197e03f8 26810000 007fffff
+muls z eq xu 197e03f8 26810000 007fffff
+muls n eq x?u 197e03f8 26810000 00800000
+muls p eq x?u 197e03f8 26810000 00800000
+muls n eq - 00000001 4c000000 01800000
+muls m eq - 00000001 4c000000 01800000
+muls p eq - 00000001 4c000000 01800000
+muls z eq - 00000001 4c000000 01800000
+muls n eq xu 3f000000 00000001 00000000
+muls z eq xu 3f000000 00000001 00000000
+muls m eq xu 3f000000 00000001 00000000
+muls n eq xu bf000001 00000001 80000001
+muls m eq xu bf000001 00000001 80000001
+muls n eq xu 3fbfffff 80000001 80000001
+muls z eq xu 3fbfffff 80000001 80000001
+muls p eq xu 3fbfffff 80000001 80000001
+muls n eq xu bfc00000 80000001 00000002
+muls p eq xu bfc00000 80000001 00000002
+muls n eq xu 40200000 00000001 00000002
+muls z eq xu 40200000 00000001 00000002
+muls m eq xu 40200000 00000001 00000002
+muls n eq xu c0200001 00000001 80000003
+muls m eq xu c0200001 00000001 80000003
+muls n eq xu 405fffff 80000001 80000003
+muls z eq xu 405fffff 80000001 80000003
+muls p eq xu 405fffff 80000001 80000003
+muls n eq xu c0600000 80000001 00000004
+muls p eq xu c0600000 80000001 00000004
+muls n eq - 80000000 00000000 80000000
+muls m eq - 80000000 00000000 80000000
+muls p eq - 80000000 00000000 80000000
+muls z eq - 80000000 00000000 80000000
+muls n eq - 80000000 80000000 00000000
+muls m eq - 80000000 80000000 00000000
+muls p eq - 80000000 80000000 00000000
+muls z eq - 80000000 80000000 00000000
+muls n eq - 7f800000 7f800000 7f800000
+muls m eq - 7f800000 7f800000 7f800000
+muls p eq - 7f800000 7f800000 7f800000
+muls z eq - 7f800000 7f800000 7f800000
+muls n eq - ff800000 7f800000 ff800000
+muls m eq - ff800000 7f800000 ff800000
+muls p eq - ff800000 7f800000 ff800000
+muls z eq - ff800000 7f800000 ff800000
+muls n eq - 7f800000 ff800000 ff800000
+muls m eq - 7f800000 ff800000 ff800000
+muls p eq - 7f800000 ff800000 ff800000
+muls z eq - 7f800000 ff800000 ff800000
+muls n eq - ff800000 ff800000 7f800000
+muls m eq - ff800000 ff800000 7f800000
+muls p eq - ff800000 ff800000 7f800000
+muls z eq - ff800000 ff800000 7f800000
+muls n uo v 7f800000 00000000 7fff0000
+muls m uo v 7f800000 00000000 7fff0000
+muls p uo v 7f800000 00000000 7fff0000
+muls z uo v 7f800000 00000000 7fff0000
+muls n uo v 80000000 7f800000 ffff0000
+muls m uo v 80000000 7f800000 ffff0000
+muls p uo v 80000000 7f800000 ffff0000
+muls z uo v 80000000 7f800000 ffff0000
+muls n uo v 7f800000 80000000 ffff0000
+muls m uo v 7f800000 80000000 ffff0000
+muls p uo v 7f800000 80000000 ffff0000
+muls z uo v 7f800000 80000000 ffff0000
+muls n uo v 80000000 ff800000 7fff0000
+muls m uo v 80000000 ff800000 7fff0000
+muls p uo v 80000000 ff800000 7fff0000
+muls z uo v 80000000 ff800000 7fff0000
+muls n eq - 7f800000 3f800000 7f800000
+muls m eq - 7f800000 3f800000 7f800000
+muls p eq - 7f800000 3f800000 7f800000
+muls z eq - 7f800000 3f800000 7f800000
+muls n eq - 7f800000 c0400000 ff800000
+muls m eq - 7f800000 c0400000 ff800000
+muls p eq - 7f800000 c0400000 ff800000
+muls z eq - 7f800000 c0400000 ff800000
+muls n eq - ff800000 40c00000 ff800000
+muls m eq - ff800000 40c00000 ff800000
+muls p eq - ff800000 40c00000 ff800000
+muls z eq - ff800000 40c00000 ff800000
+muls n eq - ff800000 c1000000 7f800000
+muls m eq - ff800000 c1000000 7f800000
+muls p eq - ff800000 c1000000 7f800000
+muls z eq - ff800000 c1000000 7f800000
+muls n eq - 7f000000 7f800000 7f800000
+muls m eq - 7f000000 7f800000 7f800000
+muls p eq - 7f000000 7f800000 7f800000
+muls z eq - 7f000000 7f800000 7f800000
+muls n eq - fe800000 7f800000 ff800000
+muls m eq - fe800000 7f800000 ff800000
+muls p eq - fe800000 7f800000 ff800000
+muls z eq - fe800000 7f800000 ff800000
+muls n eq - 7f800000 ff000000 ff800000
+muls m eq - 7f800000 ff000000 ff800000
+muls p eq - 7f800000 ff000000 ff800000
+muls z eq - 7f800000 ff000000 ff800000
+muls n eq - ff800000 fe800000 7f800000
+muls m eq - ff800000 fe800000 7f800000
+muls p eq - ff800000 fe800000 7f800000
+muls z eq - ff800000 fe800000 7f800000
+muls n eq - 7f800000 7effffff 7f800000
+muls m eq - 7f800000 7effffff 7f800000
+muls p eq - 7f800000 7effffff 7f800000
+muls z eq - 7f800000 7effffff 7f800000
+muls n eq - fe7fffff 7f800000 ff800000
+muls m eq - fe7fffff 7f800000 ff800000
+muls p eq - fe7fffff 7f800000 ff800000
+muls z eq - fe7fffff 7f800000 ff800000
+muls n eq - 7f800000 ff7fffff ff800000
+muls m eq - 7f800000 ff7fffff ff800000
+muls p eq - 7f800000 ff7fffff ff800000
+muls z eq - 7f800000 ff7fffff ff800000
+muls n eq - ff7fffff ff800000 7f800000
+muls m eq - ff7fffff ff800000 7f800000
+muls p eq - ff7fffff ff800000 7f800000
+muls z eq - ff7fffff ff800000 7f800000
+muls n eq - 00800000 7f800000 7f800000
+muls m eq - 00800000 7f800000 7f800000
+muls p eq - 00800000 7f800000 7f800000
+muls z eq - 00800000 7f800000 7f800000
+muls n eq - 81000000 7f800000 ff800000
+muls m eq - 81000000 7f800000 ff800000
+muls p eq - 81000000 7f800000 ff800000
+muls z eq - 81000000 7f800000 ff800000
+muls n eq - 7f800000 81000000 ff800000
+muls m eq - 7f800000 81000000 ff800000
+muls p eq - 7f800000 81000000 ff800000
+muls z eq - 7f800000 81000000 ff800000
+muls n eq - ff800000 80800000 7f800000
+muls m eq - ff800000 80800000 7f800000
+muls p eq - ff800000 80800000 7f800000
+muls z eq - ff800000 80800000 7f800000
+muls n eq - 7f800000 00ffffff 7f800000
+muls m eq - 7f800000 00ffffff 7f800000
+muls p eq - 7f800000 00ffffff 7f800000
+muls z eq - 7f800000 00ffffff 7f800000
+muls n eq - 80800001 7f800000 ff800000
+muls m eq - 80800001 7f800000 ff800000
+muls p eq - 80800001 7f800000 ff800000
+muls z eq - 80800001 7f800000 ff800000
+muls n eq - 7f800000 80800001 ff800000
+muls m eq - 7f800000 80800001 ff800000
+muls p eq - 7f800000 80800001 ff800000
+muls z eq - 7f800000 80800001 ff800000
+muls n eq - 80ffffff ff800000 7f800000
+muls m eq - 80ffffff ff800000 7f800000
+muls p eq - 80ffffff ff800000 7f800000
+muls z eq - 80ffffff ff800000 7f800000
+muls n eq - 00000001 7f800000 7f800000
+muls m eq - 00000001 7f800000 7f800000
+muls p eq - 00000001 7f800000 7f800000
+muls z eq - 00000001 7f800000 7f800000
+muls n eq - 80000003 7f800000 ff800000
+muls m eq - 80000003 7f800000 ff800000
+muls p eq - 80000003 7f800000 ff800000
+muls z eq - 80000003 7f800000 ff800000
+muls n eq - 7f800000 80000002 ff800000
+muls m eq - 7f800000 80000002 ff800000
+muls p eq - 7f800000 80000002 ff800000
+muls z eq - 7f800000 80000002 ff800000
+muls n eq - ff800000 80000004 7f800000
+muls m eq - ff800000 80000004 7f800000
+muls p eq - ff800000 80000004 7f800000
+muls z eq - ff800000 80000004 7f800000
+muls n eq - 7f800000 007fffff 7f800000
+muls m eq - 7f800000 007fffff 7f800000
+muls p eq - 7f800000 007fffff 7f800000
+muls z eq - 7f800000 007fffff 7f800000
+muls n eq - 807fffff 7f800000 ff800000
+muls m eq - 807fffff 7f800000 ff800000
+muls p eq - 807fffff 7f800000 ff800000
+muls z eq - 807fffff 7f800000 ff800000
+muls n eq - 7f800000 807fffff ff800000
+muls m eq - 7f800000 807fffff ff800000
+muls p eq - 7f800000 807fffff ff800000
+muls z eq - 7f800000 807fffff ff800000
+muls n eq - 807fffff ff800000 7f800000
+muls m eq - 807fffff ff800000 7f800000
+muls p eq - 807fffff ff800000 7f800000
+muls z eq - 807fffff ff800000 7f800000
+muls n eq - 80000000 40c00000 80000000
+muls m eq - 80000000 40c00000 80000000
+muls p eq - 80000000 40c00000 80000000
+muls z eq - 80000000 40c00000 80000000
+muls n eq - 80000000 c1000000 00000000
+muls m eq - 80000000 c1000000 00000000
+muls p eq - 80000000 c1000000 00000000
+muls z eq - 80000000 c1000000 00000000
+muls n eq - 7f000000 00000000 00000000
+muls m eq - 7f000000 00000000 00000000
+muls p eq - 7f000000 00000000 00000000
+muls z eq - 7f000000 00000000 00000000
+muls n eq - fe800000 00000000 80000000
+muls m eq - fe800000 00000000 80000000
+muls p eq - fe800000 00000000 80000000
+muls z eq - fe800000 00000000 80000000
+muls n eq - 80000000 fe800000 00000000
+muls m eq - 80000000 fe800000 00000000
+muls p eq - 80000000 fe800000 00000000
+muls z eq - 80000000 fe800000 00000000
+muls n eq - fe7fffff 00000000 80000000
+muls m eq - fe7fffff 00000000 80000000
+muls p eq - fe7fffff 00000000 80000000
+muls z eq - fe7fffff 00000000 80000000
+muls n eq - feffffff 80000000 00000000
+muls m eq - feffffff 80000000 00000000
+muls p eq - feffffff 80000000 00000000
+muls z eq - feffffff 80000000 00000000
+muls n eq - 7f7fffff 00000000 00000000
+muls m eq - 7f7fffff 00000000 00000000
+muls p eq - 7f7fffff 00000000 00000000
+muls z eq - 7f7fffff 00000000 00000000
+muls n eq - ff7fffff 80000000 00000000
+muls m eq - ff7fffff 80000000 00000000
+muls p eq - ff7fffff 80000000 00000000
+muls z eq - ff7fffff 80000000 00000000
+muls n eq - 80000000 7f7fffff 80000000
+muls m eq - 80000000 7f7fffff 80000000
+muls p eq - 80000000 7f7fffff 80000000
+muls z eq - 80000000 7f7fffff 80000000
+muls n eq - 00800000 00000000 00000000
+muls m eq - 00800000 00000000 00000000
+muls p eq - 00800000 00000000 00000000
+muls z eq - 00800000 00000000 00000000
+muls n eq - 81000000 00000000 80000000
+muls m eq - 81000000 00000000 80000000
+muls p eq - 81000000 00000000 80000000
+muls z eq - 81000000 00000000 80000000
+muls n eq - 80000000 80800000 00000000
+muls m eq - 80000000 80800000 00000000
+muls p eq - 80000000 80800000 00000000
+muls z eq - 80000000 80800000 00000000
+muls n eq - 80800001 00000000 80000000
+muls m eq - 80800001 00000000 80000000
+muls p eq - 80800001 00000000 80000000
+muls z eq - 80800001 00000000 80000000
+muls n eq - 80ffffff 80000000 00000000
+muls m eq - 80ffffff 80000000 00000000
+muls p eq - 80ffffff 80000000 00000000
+muls z eq - 80ffffff 80000000 00000000
+muls n eq - 00000001 00000000 00000000
+muls m eq - 00000001 00000000 00000000
+muls p eq - 00000001 00000000 00000000
+muls z eq - 00000001 00000000 00000000
+muls n eq - 80000003 00000000 80000000
+muls m eq - 80000003 00000000 80000000
+muls p eq - 80000003 00000000 80000000
+muls z eq - 80000003 00000000 80000000
+muls n eq - 80000000 80000004 00000000
+muls m eq - 80000000 80000004 00000000
+muls p eq - 80000000 80000004 00000000
+muls z eq - 80000000 80000004 00000000
+muls n eq - 807fffff 00000000 80000000
+muls m eq - 807fffff 00000000 80000000
+muls p eq - 807fffff 00000000 80000000
+muls z eq - 807fffff 00000000 80000000
+muls n eq - 807fffff 80000000 00000000
+muls m eq - 807fffff 80000000 00000000
+muls p eq - 807fffff 80000000 00000000
+muls z eq - 807fffff 80000000 00000000
+muls n eq - 7e800000 c0000000 ff000000
+muls m eq - 7e800000 c0000000 ff000000
+muls p eq - 7e800000 c0000000 ff000000
+muls z eq - 7e800000 c0000000 ff000000
+muls n eq - 7effffff c0000000 ff7fffff
+muls m eq - 7effffff c0000000 ff7fffff
+muls p eq - 7effffff c0000000 ff7fffff
+muls z eq - 7effffff c0000000 ff7fffff
+muls n eq - 7e7ffffd 40800000 7f7ffffd
+muls m eq - 7e7ffffd 40800000 7f7ffffd
+muls p eq - 7e7ffffd 40800000 7f7ffffd
+muls z eq - 7e7ffffd 40800000 7f7ffffd
+muls n eq - 7e7ffffd c0800000 ff7ffffd
+muls m eq - 7e7ffffd c0800000 ff7ffffd
+muls p eq - 7e7ffffd c0800000 ff7ffffd
+muls z eq - 7e7ffffd c0800000 ff7ffffd
+muls n eq - fe7ffffd 40800000 ff7ffffd
+muls m eq - fe7ffffd 40800000 ff7ffffd
+muls p eq - fe7ffffd 40800000 ff7ffffd
+muls z eq - fe7ffffd 40800000 ff7ffffd
+muls n eq - fe7ffffd c0800000 7f7ffffd
+muls m eq - fe7ffffd c0800000 7f7ffffd
+muls p eq - fe7ffffd c0800000 7f7ffffd
+muls z eq - fe7ffffd c0800000 7f7ffffd
+muls n eq - 00800000 c0000000 81000000
+muls m eq - 00800000 c0000000 81000000
+muls p eq - 00800000 c0000000 81000000
+muls z eq - 00800000 c0000000 81000000
+muls n eq - 00800001 c0000000 81000001
+muls m eq - 00800001 c0000000 81000001
+muls p eq - 00800001 c0000000 81000001
+muls z eq - 00800001 c0000000 81000001
+muls n eq - 007fffff 40000000 00fffffe
+muls m eq - 007fffff 40000000 00fffffe
+muls p eq - 007fffff 40000000 00fffffe
+muls z eq - 007fffff 40000000 00fffffe
+muls n eq - 807ffffd c0000000 00fffffa
+muls m eq - 807ffffd c0000000 00fffffa
+muls p eq - 807ffffd c0000000 00fffffa
+muls z eq - 807ffffd c0000000 00fffffa
+muls n eq - 007ffffc 40000000 00fffff8
+muls m eq - 007ffffc 40000000 00fffff8
+muls p eq - 007ffffc 40000000 00fffff8
+muls z eq - 007ffffc 40000000 00fffff8
+muls n eq - 00000001 40000000 00000002
+muls m eq - 00000001 40000000 00000002
+muls p eq - 00000001 40000000 00000002
+muls z eq - 00000001 40000000 00000002
+muls n eq - 80000001 40a00000 80000005
+muls m eq - 80000001 40a00000 80000005
+muls p eq - 80000001 40a00000 80000005
+muls z eq - 80000001 40a00000 80000005
+muls n eq - 80000004 bf800000 00000004
+muls m eq - 80000004 bf800000 00000004
+muls p eq - 80000004 bf800000 00000004
+muls z eq - 80000004 bf800000 00000004
+muls n eq - 80fffffe 3f800000 80fffffe
+muls m eq - 80fffffe 3f800000 80fffffe
+muls p eq - 80fffffe 3f800000 80fffffe
+muls z eq - 80fffffe 3f800000 80fffffe
+muls n eq - 80800001 bf800000 00800001
+muls m eq - 80800001 bf800000 00800001
+muls p eq - 80800001 bf800000 00800001
+muls z eq - 80800001 bf800000 00800001
+muls n eq - 80000002 3f800000 80000002
+muls m eq - 80000002 3f800000 80000002
+muls p eq - 80000002 3f800000 80000002
+muls z eq - 80000002 3f800000 80000002
+muls n eq - 807fffff bf800000 007fffff
+muls m eq - 807fffff bf800000 007fffff
+muls p eq - 807fffff bf800000 007fffff
+muls z eq - 807fffff bf800000 007fffff
+muls n eq x 3f800001 3f800001 3f800002
+muls z eq x 3f800001 3f800001 3f800002
+muls m eq x 3f800001 3f800001 3f800002
+muls p eq x 3f800001 3f800001 3f800003
+muls n eq x bf800001 3f800001 bf800002
+muls z eq x bf800001 3f800001 bf800002
+muls p eq x bf800001 3f800001 bf800002
+muls m eq x bf800001 3f800001 bf800003
+muls n eq x 3f800001 bf800001 bf800002
+muls z eq x 3f800001 bf800001 bf800002
+muls p eq x 3f800001 bf800001 bf800002
+muls m eq x 3f800001 bf800001 bf800003
+muls n eq x bf800001 bf800001 3f800002
+muls z eq x bf800001 bf800001 3f800002
+muls m eq x bf800001 bf800001 3f800002
+muls p eq x bf800001 bf800001 3f800003
+muls n eq x 3f800002 3f800001 3f800003
+muls z eq x 3f800002 3f800001 3f800003
+muls m eq x 3f800002 3f800001 3f800003
+muls p eq x 3f800002 3f800001 3f800004
+muls n eq x bf800002 3f800001 bf800003
+muls z eq x bf800002 3f800001 bf800003
+muls p eq x bf800002 3f800001 bf800003
+muls m eq x bf800002 3f800001 bf800004
+muls n eq x 3f800001 bf800002 bf800003
+muls z eq x 3f800001 bf800002 bf800003
+muls p eq x 3f800001 bf800002 bf800003
+muls m eq x 3f800001 bf800002 bf800004
+muls n eq x bf800002 bf800001 3f800003
+muls z eq x bf800002 bf800001 3f800003
+muls m eq x bf800002 bf800001 3f800003
+muls p eq x bf800001 bf800002 3f800004
+muls p eq x bf800002 bf800001 3f800004
+muls n eq x bf800001 bf800002 3f800003
+muls z eq x bf800001 bf800002 3f800003
+muls m eq x bf800001 bf800002 3f800003
+muls n eq x 40400001 3f800001 40400003
+muls p eq x 40400001 3f800001 40400003
+muls z eq x 40400001 3f800001 40400002
+muls m eq x 40400001 3f800001 40400002
+muls p eq x 40400001 3f800003 40400006
+muls n eq x 40400001 3f800003 40400006
+muls z eq x 40400001 3f800003 40400005
+muls m eq x 40400001 3f800003 40400005
+muls n eq x c0400001 3f800001 c0400003
+muls m eq x c0400001 3f800001 c0400003
+muls z eq x c0400001 3f800001 c0400002
+muls p eq x c0400001 3f800001 c0400002
+muls m eq x 40400001 bf800003 c0400006
+muls n eq x 40400001 bf800003 c0400006
+muls z eq x 40400001 bf800003 c0400005
+muls p eq x 40400001 bf800003 c0400005
+muls p eq x 40a00001 3f800001 40a00003
+muls n eq x 40a00001 3f800001 40a00002
+muls z eq x 40a00001 3f800001 40a00002
+muls m eq x 40a00001 3f800001 40a00002
+muls p eq x c0a00001 bf800001 40a00003
+muls n eq x c0a00001 bf800001 40a00002
+muls z eq x c0a00001 bf800001 40a00002
+muls m eq x c0a00001 bf800001 40a00002
+muls p eq x 40e00001 3f800001 40e00003
+muls n eq x 40e00001 3f800001 40e00003
+muls m eq x 40e00001 3f800001 40e00002
+muls z eq x 40e00001 3f800001 40e00002
+muls z eq x 403fffff 3f7fffff 403ffffe
+muls m eq x 403fffff 3f7fffff 403ffffe
+muls n eq x 403fffff 3f7fffff 403ffffe
+muls p eq x 403fffff 3f7fffff 403fffff
+muls z eq x 403fffff 3f7ffffd 403ffffc
+muls m eq x 403fffff 3f7ffffd 403ffffc
+muls n eq x 403fffff 3f7ffffd 403ffffd
+muls p eq x 403fffff 3f7ffffd 403ffffd
+muls z eq x c03fffff 3f7fffff c03ffffe
+muls p eq x c03fffff 3f7fffff c03ffffe
+muls n eq x c03fffff 3f7fffff c03ffffe
+muls m eq x c03fffff 3f7fffff c03fffff
+muls z eq x 403fffff bf7ffffd c03ffffc
+muls p eq x 403fffff bf7ffffd c03ffffc
+muls n eq x 403fffff bf7ffffd c03ffffd
+muls m eq x 403fffff bf7ffffd c03ffffd
+muls n eq x 403fffff 3f7ffffe 403ffffe
+muls p eq x 403fffff 3f7ffffe 403ffffe
+muls z eq x 403fffff 3f7ffffe 403ffffd
+muls m eq x 403fffff 3f7ffffe 403ffffd
+muls z eq x 409fffff 3f7fffff 409ffffe
+muls m eq x 409fffff 3f7fffff 409ffffe
+muls n eq x 409fffff 3f7fffff 409ffffe
+muls p eq x 409fffff 3f7fffff 409fffff
+muls z eq x c09fffff bf7fffff 409ffffe
+muls m eq x c09fffff bf7fffff 409ffffe
+muls n eq x c09fffff bf7fffff 409ffffe
+muls p eq x c09fffff bf7fffff 409fffff
+muls m eq x 40dfffff 3f7fffff 40dffffe
+muls n eq x 40dfffff 3f7fffff 40dffffe
+muls z eq x 40dfffff 3f7fffff 40dffffe
+muls p eq x 40dfffff 3f7fffff 40dfffff
+muls n eq x 40dfffff 3f7ffffc 40dffffc
+muls p eq x 40dfffff 3f7ffffc 40dffffc
+muls z eq x 40dfffff 3f7ffffc 40dffffb
+muls m eq x 40dfffff 3f7ffffc 40dffffb
+muls n eq xo 7f000000 40000000 7f800000
+muls p eq xo 7f000000 40000000 7f800000
+muls z eq xo 7f000000 40000001 7f7fffff
+muls m eq xo 7f000000 40000001 7f7fffff
+muls n eq xo c03ffffe 7f000000 ff800000
+muls m eq xo c03ffffe 7f000000 ff800000
+muls z eq xo 7f000000 c0800005 ff7fffff
+muls p eq xo 7f000000 c0800005 ff7fffff
+muls n eq xo c09ffffe ff000000 7f800000
+muls p eq xo c09ffffe ff000000 7f800000
+muls z eq xo 7f000000 40c00001 7f7fffff
+muls m eq xo 7f000000 40c00001 7f7fffff
+muls n eq xo c0dffff9 7f000000 ff800000
+muls m eq xo c0dffff9 7f000000 ff800000
+muls z eq xo 7f000000 c1000003 ff7fffff
+muls p eq xo 7f000000 c1000003 ff7fffff
+muls n eq xo c1100001 ff000000 7f800000
+muls p eq xo c1100001 ff000000 7f800000
+muls z eq xo 7f000000 40c00000 7f7fffff
+muls m eq xo 7f000000 40c00000 7f7fffff
+muls z eq xo 7f000000 c0000000 ff7fffff
+muls p eq xo 7f000000 c0000000 ff7fffff
+muls n eq xo 7f000000 7f000000 7f800000
+muls p eq xo 7f000000 7f000000 7f800000
+muls n eq xo ff7ffffd 7f000000 ff800000
+muls m eq xo ff7ffffd 7f000000 ff800000
+muls n eq xo 7f000000 fe800004 ff800000
+muls m eq xo 7f000000 fe800004 ff800000
+muls n eq xo ff000005 ff000001 7f800000
+muls p eq xo ff000005 ff000001 7f800000
+muls n eq xo 7f000009 7f7ffffa 7f800000
+muls p eq xo 7f000009 7f7ffffa 7f800000
+muls n eq xo fe7ffff9 7f000000 ff800000
+muls m eq xo fe7ffff9 7f000000 ff800000
+muls n eq xo 7f000000 fe800000 ff800000
+muls m eq xo 7f000000 fe800000 ff800000
+muls n eq xo ff7fffff ff7fffff 7f800000
+muls p eq xo ff7fffff ff7fffff 7f800000
+muls z eq xo 7f000000 7e800006 7f7fffff
+muls m eq xo 7f000000 7e800006 7f7fffff
+muls n eq xo fefffff7 7e800001 ff800000
+muls m eq xo fefffff7 7e800001 ff800000
+muls n eq xo 7f000000 ff000000 ff800000
+muls m eq xo 7f000000 ff000000 ff800000
+muls z eq xo fe7ffff9 ff7fffff 7f7fffff
+muls m eq xo fe7ffff9 ff7fffff 7f7fffff
+muls n eq xo 7f000000 7f7ffffe 7f800000
+muls p eq xo 7f000000 7f7ffffe 7f800000
+muls z eq xo fe800000 7f000000 ff7fffff
+muls p eq xo fe800000 7f000000 ff7fffff
+muls z eq xo 7f000009 fe800002 ff7fffff
+muls p eq xo 7f000009 fe800002 ff7fffff
+muls n eq xo ff7ffffd ff000001 7f800000
+muls p eq xo ff7ffffd ff000001 7f800000
+muls n eq x feffffff 3f800001 ff000000
+muls z eq x feffffff 3f800001 ff000000
+muls p eq x feffffff 3f800001 ff000000
+muls m eq x feffffff 3f800001 ff000001
+muls n eq x bf7fffff ff7fffff 7f7ffffe
+muls z eq x bf7fffff ff7fffff 7f7ffffe
+muls m eq x bf7fffff ff7fffff 7f7ffffe
+muls p eq x bf7fffff ff7fffff 7f7fffff
+muls m eq x fe7fffff 40000001 ff000001
+muls n eq x fe7fffff 40000001 ff000000
+muls z eq x fe7fffff 40000001 ff000000
+muls p eq x fe7fffff 40000001 ff000000
+muls m eq xo 7efffffd c0000008 ff800000
+muls n eq xo 7efffffd c0000008 ff800000
+muls p eq xo 7efffffd c0000008 ff7fffff
+muls z eq xo 7efffffd c0000008 ff7fffff
+muls n eq x fe7ffff9 c07fffff 7f7ffff8
+muls z eq x fe7ffff9 c07fffff 7f7ffff8
+muls m eq x fe7ffff9 c07fffff 7f7ffff8
+muls p eq x fe7ffff9 c07fffff 7f7ffff9
+muls n eq xo 3f800002 7f7ffffe 7f800000
+muls p eq xo 3f800002 7f7ffffe 7f800000
+muls z eq xo 3f800002 7f7ffffe 7f7fffff
+muls m eq xo 3f800002 7f7ffffe 7f7fffff
+muls n eq xo 7f000009 c0c00002 ff800000
+muls m eq xo 7f000009 c0c00002 ff800000
+muls z eq xo 7f000009 c0c00002 ff7fffff
+muls p eq xo 7f000009 c0c00002 ff7fffff
+muls n eq xo ff7ffffd c0400001 7f800000
+muls p eq xo ff7ffffd c0400001 7f800000
+muls z eq xo ff7ffffd c0400001 7f7fffff
+muls m eq xo ff7ffffd c0400001 7f7fffff
+muls n eq - 00800000 3f7ffffe 007fffff
+muls m eq - 00800000 3f7ffffe 007fffff
+muls p eq - 00800000 3f7ffffe 007fffff
+muls z eq - 00800000 3f7ffffe 007fffff
+muls n eq - 00000001 3f800000 00000001
+muls m eq - 00000001 3f800000 00000001
+muls p eq - 00000001 3f800000 00000001
+muls z eq - 00000001 3f800000 00000001
+muls n eq - 00fffffe 3f000000 007fffff
+muls m eq - 00fffffe 3f000000 007fffff
+muls p eq - 00fffffe 3f000000 007fffff
+muls z eq - 00fffffe 3f000000 007fffff
+muls n eq - 80fffffc bf000000 007ffffe
+muls m eq - 80fffffc bf000000 007ffffe
+muls p eq - 80fffffc bf000000 007ffffe
+muls z eq - 80fffffc bf000000 007ffffe
+muls n eq - 00fffffe bf000000 807fffff
+muls m eq - 00fffffe bf000000 807fffff
+muls p eq - 00fffffe bf000000 807fffff
+muls z eq - 00fffffe bf000000 807fffff
+muls n eq - 80fffffc 3f000000 807ffffe
+muls m eq - 80fffffc 3f000000 807ffffe
+muls p eq - 80fffffc 3f000000 807ffffe
+muls z eq - 80fffffc 3f000000 807ffffe
+muls n eq - 00fffff8 3f000000 007ffffc
+muls m eq - 00fffff8 3f000000 007ffffc
+muls p eq - 00fffff8 3f000000 007ffffc
+muls z eq - 00fffff8 3f000000 007ffffc
+muls n eq - 00000008 3e000000 00000001
+muls m eq - 00000008 3e000000 00000001
+muls p eq - 00000008 3e000000 00000001
+muls z eq - 00000008 3e000000 00000001
+muls n eq - 00000006 3f000000 00000003
+muls m eq - 00000006 3f000000 00000003
+muls p eq - 00000006 3f000000 00000003
+muls z eq - 00000006 3f000000 00000003
+muls n eq - 80000008 3e000000 80000001
+muls m eq - 80000008 3e000000 80000001
+muls p eq - 80000008 3e000000 80000001
+muls z eq - 80000008 3e000000 80000001
+muls n eq - 00000006 bf000000 80000003
+muls m eq - 00000006 bf000000 80000003
+muls p eq - 00000006 bf000000 80000003
+muls z eq - 00000006 bf000000 80000003
+muls n eq xu 00800000 00800000 00000000
+muls z eq xu 00800000 00800000 00000000
+muls m eq xu 00800000 00800000 00000000
+muls n eq xu 80800000 80800000 00000000
+muls z eq xu 80800000 80800000 00000000
+muls m eq xu 80800000 80800000 00000000
+muls p eq xu 00800000 01000000 00000001
+muls p eq xu 81000000 81000000 00000001
+muls n eq xu 80800000 00800000 80000000
+muls z eq xu 80800000 00800000 80000000
+muls p eq xu 80800000 00800000 80000000
+muls n eq xu 00800000 80800000 80000000
+muls z eq xu 00800000 80800000 80000000
+muls p eq xu 00800000 80800000 80000000
+muls m eq xu 80800000 01000000 80000001
+muls m eq xu 00800000 80800000 80000001
+muls n eq xu 007fffff 007ffffe 00000000
+muls z eq xu 007fffff 007ffffe 00000000
+muls m eq xu 007fffff 007ffffe 00000000
+muls n eq xu 807fffff 807ffffe 00000000
+muls z eq xu 807fffff 807ffffe 00000000
+muls m eq xu 807fffff 807ffffe 00000000
+muls p eq xu 007fffff 007ffffe 00000001
+muls p eq xu 807fffff 807ffffe 00000001
+muls n eq xu 807ffff7 01000003 80000000
+muls z eq xu 807ffff7 01000003 80000000
+muls p eq xu 807ffff7 01000003 80000000
+muls n eq xu 007ffff7 81000003 80000000
+muls z eq xu 007ffff7 81000003 80000000
+muls p eq xu 007ffff7 81000003 80000000
+muls m eq xu 807ffff7 01000003 80000001
+muls m eq xu 007ffff7 81000003 80000001
+muls p eq xu 00000001 3f000000 00000001
+muls n eq xu 00000001 3f000000 00000000
+muls z eq xu 00000001 3f000000 00000000
+muls m eq xu 00000001 3f000000 00000000
+muls m eq xu 3f000000 80000001 80000001
+muls n eq xu 3f000000 80000001 80000000
+muls z eq xu 3f000000 80000001 80000000
+muls p eq xu 3f000000 80000001 80000000
+muls m eq xu 00000001 3f7fffff 00000000
+muls z eq xu 00000001 3f7fffff 00000000
+muls n eq xu 00000001 3f7fffff 00000001
+muls p eq xu 00000001 3f7fffff 00000001
+muls p eq xu 00000001 00000001 00000001
+muls n eq xu 00000001 00000001 00000000
+muls z eq xu 00000001 00000001 00000000
+muls m eq xu 00000001 00000001 00000000
+muls p eq xu 80000001 3f7fffff 80000000
+muls z eq xu 80000001 3f7fffff 80000000
+muls n eq xu 00000001 bf7fffff 80000001
+muls m eq xu 00000001 bf7fffff 80000001
+muls m eq xu 00000001 80000001 80000001
+muls n eq xu 80000001 00000001 80000000
+muls z eq xu 80000001 00000001 80000000
+muls p eq xu 80000001 00000001 80000000
+muls z eq xu 00ffffff 3f000000 007fffff
+muls m eq xu 00ffffff 3f000000 007fffff
+muls z eq xu 80ffffff bf000000 007fffff
+muls m eq xu 80ffffff bf000000 007fffff
+muls z eq xu 80ffffff 3f000000 807fffff
+muls p eq xu 80ffffff 3f000000 807fffff
+muls p eq xu 00ffffff 3f000000 00800000
+muls n eq xu 00ffffff 3f000000 00800000
+muls m eq xu 00ffffff bf000000 80800000
+muls n eq xu 00ffffff bf000000 80800000
+muls z eq xu 007fffff 3f800001 007fffff
+muls m eq xu 007fffff 3f800001 007fffff
+muls z eq xu 007fffff bf800001 807fffff
+muls p eq xu 007fffff bf800001 807fffff
+muls z eq xu 00800001 3f7ffffa 007ffffd
+muls m eq xu 00800001 3f7ffffa 007ffffd
+muls p eq xu 007ffffe 3f7ffffc 007ffffd
+muls z eq xu 007ffffc 3f800001 007ffffc
+muls m eq xu 007ffffc 3f800001 007ffffc
+muls z eq xu 00800001 3f7ffffe 007fffff
+muls m eq xu 00800001 3f7ffffe 007fffff
+muls p eq xu 007ffffe 3f800001 007fffff
+muls n eq xu 007ffffe 3f800001 007fffff
+muls m eq xu 007ffffe bf800001 807fffff
+muls n eq xu 007ffffe bf800001 807fffff
+muls p eq xu 007ffff8 3f800001 007ffff9
+muls n eq xu 007ffff8 3f800001 007ffff9
+muls m eq xu 807ffff7 3f800001 807ffff8
+muls n eq xu 807ffff7 3f800001 807ffff8
+muls m eq xu 007ffff8 bf800001 807ffff9
+muls n eq xu 007ffff8 bf800001 807ffff9
+muls n eq xu 00800001 3f7ffffa 007ffffe
+muls p eq xu 00800001 3f7ffffa 007ffffe
+muls m eq xu 007ffffe 3f7ffffc 007ffffc
+muls n eq xu 007ffffe 3f7ffffc 007ffffc
+muls z eq xu 007ffffe 3f7ffffc 007ffffc
+muls p eq x?u 007fffff 3f800001 00800000
+muls n eq x?u 007fffff 3f800001 00800000
+muls m eq x?u 807fffff 3f800001 80800000
+muls n eq x?u 807fffff 3f800001 80800000
+muls p eq x?u 007ffff8 3f800008 00800000
+muls n eq x?u 007ffff8 3f800008 00800000
+muls m eq x?u 007ffff8 bf800008 80800000
+muls n eq x?u 007ffff8 bf800008 80800000
+muls p eq x?u 00800001 3f7ffffe 00800000
+muls n eq x?u 00800001 3f7ffffe 00800000
+muls p eq x?u 00800002 3f7ffffc 00800000
+muls n eq x?u 00800002 3f7ffffc 00800000
+muls n uo - 7fff0000 00000000 7fff0000
+muls m uo - 7fff0000 00000000 7fff0000
+muls p uo - 7fff0000 00000000 7fff0000
+muls z uo - 7fff0000 00000000 7fff0000
+muls n uo - 7fff0000 80000000 7fff0000
+muls m uo - 7fff0000 80000000 7fff0000
+muls p uo - 7fff0000 80000000 7fff0000
+muls z uo - 7fff0000 80000000 7fff0000
+muls n uo - 80000000 7fff0000 7fff0000
+muls m uo - 80000000 7fff0000 7fff0000
+muls p uo - 80000000 7fff0000 7fff0000
+muls z uo - 80000000 7fff0000 7fff0000
+muls n uo - 7fff0000 3f800000 7fff0000
+muls m uo - 7fff0000 3f800000 7fff0000
+muls p uo - 7fff0000 3f800000 7fff0000
+muls z uo - 7fff0000 3f800000 7fff0000
+muls n uo - 7fff0000 bf800000 7fff0000
+muls m uo - 7fff0000 bf800000 7fff0000
+muls p uo - 7fff0000 bf800000 7fff0000
+muls z uo - 7fff0000 bf800000 7fff0000
+muls n uo - 007fffff 7fff0000 7fff0000
+muls m uo - 007fffff 7fff0000 7fff0000
+muls p uo - 007fffff 7fff0000 7fff0000
+muls z uo - 007fffff 7fff0000 7fff0000
+muls n uo - 807fffff 7fff0000 7fff0000
+muls m uo - 807fffff 7fff0000 7fff0000
+muls p uo - 807fffff 7fff0000 7fff0000
+muls z uo - 807fffff 7fff0000 7fff0000
+muls n uo - 7fff0000 007fffff 7fff0000
+muls m uo - 7fff0000 007fffff 7fff0000
+muls p uo - 7fff0000 007fffff 7fff0000
+muls z uo - 7fff0000 007fffff 7fff0000
+muls n uo - 7fff0000 807fffff 7fff0000
+muls m uo - 7fff0000 807fffff 7fff0000
+muls p uo - 7fff0000 807fffff 7fff0000
+muls z uo - 7fff0000 807fffff 7fff0000
+muls n uo - 7fff0000 00000001 7fff0000
+muls m uo - 7fff0000 00000001 7fff0000
+muls p uo - 7fff0000 00000001 7fff0000
+muls z uo - 7fff0000 00000001 7fff0000
+muls n uo - 7fff0000 80000001 7fff0000
+muls m uo - 7fff0000 80000001 7fff0000
+muls p uo - 7fff0000 80000001 7fff0000
+muls z uo - 7fff0000 80000001 7fff0000
+muls n uo - 00000001 7fff0000 7fff0000
+muls m uo - 00000001 7fff0000 7fff0000
+muls p uo - 00000001 7fff0000 7fff0000
+muls z uo - 00000001 7fff0000 7fff0000
+muls n uo - 80000001 7fff0000 7fff0000
+muls m uo - 80000001 7fff0000 7fff0000
+muls p uo - 80000001 7fff0000 7fff0000
+muls z uo - 80000001 7fff0000 7fff0000
+muls n uo - 7fff0000 7f7fffff 7fff0000
+muls m uo - 7fff0000 7f7fffff 7fff0000
+muls p uo - 7fff0000 7f7fffff 7fff0000
+muls z uo - 7fff0000 7f7fffff 7fff0000
+muls n uo - 7fff0000 ff7fffff 7fff0000
+muls m uo - 7fff0000 ff7fffff 7fff0000
+muls p uo - 7fff0000 ff7fffff 7fff0000
+muls z uo - 7fff0000 ff7fffff 7fff0000
+muls n uo - 7f7fffff 7fff0000 7fff0000
+muls m uo - 7f7fffff 7fff0000 7fff0000
+muls p uo - 7f7fffff 7fff0000 7fff0000
+muls z uo - 7f7fffff 7fff0000 7fff0000
+muls n uo - ff7fffff 7fff0000 7fff0000
+muls m uo - ff7fffff 7fff0000 7fff0000
+muls p uo - ff7fffff 7fff0000 7fff0000
+muls z uo - ff7fffff 7fff0000 7fff0000
+muls n uo - 7fff0000 7f800000 7fff0000
+muls m uo - 7fff0000 7f800000 7fff0000
+muls p uo - 7fff0000 7f800000 7fff0000
+muls z uo - 7fff0000 7f800000 7fff0000
+muls n uo - 7fff0000 ff800000 7fff0000
+muls m uo - 7fff0000 ff800000 7fff0000
+muls p uo - 7fff0000 ff800000 7fff0000
+muls z uo - 7fff0000 ff800000 7fff0000
+muls n uo - 7f800000 7fff0000 7fff0000
+muls m uo - 7f800000 7fff0000 7fff0000
+muls p uo - 7f800000 7fff0000 7fff0000
+muls z uo - 7f800000 7fff0000 7fff0000
+muls n uo - ff800000 7fff0000 7fff0000
+muls m uo - ff800000 7fff0000 7fff0000
+muls p uo - ff800000 7fff0000 7fff0000
+muls z uo - ff800000 7fff0000 7fff0000
+muls n uo - 7fff0000 7fff0000 7fff0000
+muls m uo - 7fff0000 7fff0000 7fff0000
+muls p uo - 7fff0000 7fff0000 7fff0000
+muls z uo - 7fff0000 7fff0000 7fff0000
+muls n uo v 7f810000 00000000 7fff0000
+muls m uo v 7f810000 00000000 7fff0000
+muls p uo v 7f810000 00000000 7fff0000
+muls z uo v 7f810000 00000000 7fff0000
+muls n uo v 7f810000 80000000 7fff0000
+muls m uo v 7f810000 80000000 7fff0000
+muls p uo v 7f810000 80000000 7fff0000
+muls z uo v 7f810000 80000000 7fff0000
+muls n uo v 80000000 7f810000 7fff0000
+muls m uo v 80000000 7f810000 7fff0000
+muls p uo v 80000000 7f810000 7fff0000
+muls z uo v 80000000 7f810000 7fff0000
+muls n uo v 7f810000 3f800000 7fff0000
+muls m uo v 7f810000 3f800000 7fff0000
+muls p uo v 7f810000 3f800000 7fff0000
+muls z uo v 7f810000 3f800000 7fff0000
+muls n uo v 7f810000 bf800000 7fff0000
+muls m uo v 7f810000 bf800000 7fff0000
+muls p uo v 7f810000 bf800000 7fff0000
+muls z uo v 7f810000 bf800000 7fff0000
+muls n uo v 007fffff 7f810000 7fff0000
+muls m uo v 007fffff 7f810000 7fff0000
+muls p uo v 007fffff 7f810000 7fff0000
+muls z uo v 007fffff 7f810000 7fff0000
+muls n uo v 807fffff 7f810000 7fff0000
+muls m uo v 807fffff 7f810000 7fff0000
+muls p uo v 807fffff 7f810000 7fff0000
+muls z uo v 807fffff 7f810000 7fff0000
+muls n uo v 7f810000 007fffff 7fff0000
+muls m uo v 7f810000 007fffff 7fff0000
+muls p uo v 7f810000 007fffff 7fff0000
+muls z uo v 7f810000 007fffff 7fff0000
+muls n uo v 7f810000 807fffff 7fff0000
+muls m uo v 7f810000 807fffff 7fff0000
+muls p uo v 7f810000 807fffff 7fff0000
+muls z uo v 7f810000 807fffff 7fff0000
+muls n uo v 7f810000 00000001 7fff0000
+muls m uo v 7f810000 00000001 7fff0000
+muls p uo v 7f810000 00000001 7fff0000
+muls z uo v 7f810000 00000001 7fff0000
+muls n uo v 7f810000 80000001 7fff0000
+muls m uo v 7f810000 80000001 7fff0000
+muls p uo v 7f810000 80000001 7fff0000
+muls z uo v 7f810000 80000001 7fff0000
+muls n uo v 00000001 7f810000 7fff0000
+muls m uo v 00000001 7f810000 7fff0000
+muls p uo v 00000001 7f810000 7fff0000
+muls z uo v 00000001 7f810000 7fff0000
+muls n uo v 80000001 7f810000 7fff0000
+muls m uo v 80000001 7f810000 7fff0000
+muls p uo v 80000001 7f810000 7fff0000
+muls z uo v 80000001 7f810000 7fff0000
+muls n uo v 7f810000 7f7fffff 7fff0000
+muls m uo v 7f810000 7f7fffff 7fff0000
+muls p uo v 7f810000 7f7fffff 7fff0000
+muls z uo v 7f810000 7f7fffff 7fff0000
+muls n uo v 7f810000 ff7fffff 7fff0000
+muls m uo v 7f810000 ff7fffff 7fff0000
+muls p uo v 7f810000 ff7fffff 7fff0000
+muls z uo v 7f810000 ff7fffff 7fff0000
+muls n uo v 7f7fffff 7f810000 7fff0000
+muls m uo v 7f7fffff 7f810000 7fff0000
+muls p uo v 7f7fffff 7f810000 7fff0000
+muls z uo v 7f7fffff 7f810000 7fff0000
+muls n uo v ff7fffff 7f810000 7fff0000
+muls m uo v ff7fffff 7f810000 7fff0000
+muls p uo v ff7fffff 7f810000 7fff0000
+muls z uo v ff7fffff 7f810000 7fff0000
+muls n uo v 7f810000 7f800000 7fff0000
+muls m uo v 7f810000 7f800000 7fff0000
+muls p uo v 7f810000 7f800000 7fff0000
+muls z uo v 7f810000 7f800000 7fff0000
+muls n uo v 7f810000 ff800000 7fff0000
+muls m uo v 7f810000 ff800000 7fff0000
+muls p uo v 7f810000 ff800000 7fff0000
+muls z uo v 7f810000 ff800000 7fff0000
+muls n uo v 7f800000 7f810000 7fff0000
+muls m uo v 7f800000 7f810000 7fff0000
+muls p uo v 7f800000 7f810000 7fff0000
+muls z uo v 7f800000 7f810000 7fff0000
+muls n uo v ff800000 7f810000 7fff0000
+muls m uo v ff800000 7f810000 7fff0000
+muls p uo v ff800000 7f810000 7fff0000
+muls z uo v ff800000 7f810000 7fff0000
+muls n uo v 7fff0000 7f810000 7fff0000
+muls m uo v 7fff0000 7f810000 7fff0000
+muls p uo v 7fff0000 7f810000 7fff0000
+muls z uo v 7fff0000 7f810000 7fff0000
+muls n uo v 7f810000 7fff0000 7fff0000
+muls m uo v 7f810000 7fff0000 7fff0000
+muls p uo v 7f810000 7fff0000 7fff0000
+muls z uo v 7f810000 7fff0000 7fff0000
+muls n uo v 7f810000 7f810000 7fff0000
+muls m uo v 7f810000 7f810000 7fff0000
+muls p uo v 7f810000 7f810000 7fff0000
+muls z uo v 7f810000 7f810000 7fff0000
diff --git a/verrou/unitTest/checkUCB-vecto/inputData/subd.input b/verrou/unitTest/checkUCB-vecto/inputData/subd.input
new file mode 100644
index 0000000000000000000000000000000000000000..766c31e015765e660f49f7e0ea003df03e1d275a
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/inputData/subd.input
@@ -0,0 +1,1314 @@
+/* Copyright (C) 1988-1994 Sun Microsystems, Inc. 2550 Garcia Avenue */
+/* Mountain View, California  94043 All rights reserved. */
+/*  */
+/* Any person is hereby authorized to download, copy, use, create bug fixes,  */
+/* and distribute, subject to the following conditions: */
+/*  */
+/* 	1.  the software may not be redistributed for a fee except as */
+/* 	    reasonable to cover media costs; */
+/* 	2.  any copy of the software must include this notice, as well as  */
+/* 	    any other embedded copyright notices; and  */
+/* 	3.  any distribution of this software or derivative works thereof  */
+/* 	    must comply with all applicable U.S. export control laws. */
+/*  */
+/* THE SOFTWARE IS MADE AVAILABLE "AS IS" AND WITHOUT EXPRESS OR IMPLIED */
+/* WARRANTY OF ANY KIND, INCLUDING BUT NOT LIMITED TO THE IMPLIED */
+/* WARRANTIES OF DESIGN, MERCHANTIBILITY, FITNESS FOR A PARTICULAR */
+/* PURPOSE, NON-INFRINGEMENT, PERFORMANCE OR CONFORMANCE TO */
+/* SPECIFICATIONS.   */
+/*  */
+/* BY DOWNLOADING AND/OR USING THIS SOFTWARE, THE USER WAIVES ALL CLAIMS */
+/* AGAINST SUN MICROSYSTEMS, INC. AND ITS AFFILIATED COMPANIES IN ANY */
+/* JURISDICTION, INCLUDING BUT NOT LIMITED TO CLAIMS FOR DAMAGES OR */
+/* EQUITABLE RELIEF BASED ON LOSS OF DATA, AND SPECIFICALLY WAIVES EVEN */
+/* UNKNOWN OR UNANTICIPATED CLAIMS OR LOSSES, PRESENT AND FUTURE. */
+/*  */
+/* IN NO EVENT WILL SUN MICROSYSTEMS, INC. OR ANY OF ITS AFFILIATED */
+/* COMPANIES BE LIABLE FOR ANY LOST REVENUE OR PROFITS OR OTHER SPECIAL, */
+/* INDIRECT AND CONSEQUENTIAL DAMAGES, EVEN IF IT HAS BEEN ADVISED OF THE */
+/* POSSIBILITY OF SUCH DAMAGES. */
+/*  */
+/* This file is provided with no support and without any obligation on the */
+/* part of Sun Microsystems, Inc. ("Sun") or any of its affiliated */
+/* companies to assist in its use, correction, modification or */
+/* enhancement.  Nevertheless, and without creating any obligation on its */
+/* part, Sun welcomes your comments concerning the software and requests */
+/* that they be sent to fdlibm-comments@sunpro.sun.com. */
+
+subd p eq - 3ff00000 00000000 bff00000 00000000 40000000 00000000
+subd z eq - 3ff00000 00000000 bff00000 00000000 40000000 00000000
+subd n eq - 3ff00000 00000000 c0000000 00000000 40080000 00000000
+subd m eq - 3ff00000 00000000 c0000000 00000000 40080000 00000000
+subd p eq - 3ff00000 00000000 c0000000 00000000 40080000 00000000
+subd z eq - 3ff00000 00000000 c0000000 00000000 40080000 00000000
+subd n eq - 40000000 00000000 bff00000 00000000 40080000 00000000
+subd m eq - 40000000 00000000 bff00000 00000000 40080000 00000000
+subd p eq - 40000000 00000000 bff00000 00000000 40080000 00000000
+subd z eq - 40000000 00000000 bff00000 00000000 40080000 00000000
+subd n eq - 40000000 00000000 c0000000 00000000 40100000 00000000
+subd m eq - 40000000 00000000 c0000000 00000000 40100000 00000000
+subd p eq - 40000000 00000000 c0000000 00000000 40100000 00000000
+subd z eq - 40000000 00000000 c0000000 00000000 40100000 00000000
+subd n eq - 40000000 00000000 40000000 00000000 00000000 00000000
+subd z eq - 40000000 00000000 40000000 00000000 00000000 00000000
+subd p eq - 40000000 00000000 40000000 00000000 00000000 00000000
+subd m eq - 40000000 00000000 40000000 00000000 80000000 00000000
+subd n eq - 40140000 00000000 40140000 00000000 00000000 00000000
+subd z eq - 40140000 00000000 40140000 00000000 00000000 00000000
+subd p eq - 40140000 00000000 40140000 00000000 00000000 00000000
+subd m eq - 40140000 00000000 40140000 00000000 80000000 00000000
+subd n eq - 3ff00000 00000000 c01c0000 00000000 40200000 00000000
+subd m eq - 3ff00000 00000000 c01c0000 00000000 40200000 00000000
+subd p eq - 3ff00000 00000000 c01c0000 00000000 40200000 00000000
+subd z eq - 3ff00000 00000000 c01c0000 00000000 40200000 00000000
+subd n eq - 40140000 00000000 3ff00000 00000000 40100000 00000000
+subd m eq - 40140000 00000000 3ff00000 00000000 40100000 00000000
+subd p eq - 40140000 00000000 3ff00000 00000000 40100000 00000000
+subd z eq - 40140000 00000000 3ff00000 00000000 40100000 00000000
+subd n eq - 40000000 00000000 40140000 00000000 c0080000 00000000
+subd m eq - 40000000 00000000 40140000 00000000 c0080000 00000000
+subd p eq - 40000000 00000000 40140000 00000000 c0080000 00000000
+subd z eq - 40000000 00000000 40140000 00000000 c0080000 00000000
+subd n eq - 40140000 00000000 00000000 00000000 40140000 00000000
+subd m eq - 40140000 00000000 00000000 00000000 40140000 00000000
+subd p eq - 40140000 00000000 00000000 00000000 40140000 00000000
+subd z eq - 40140000 00000000 00000000 00000000 40140000 00000000
+subd n eq - 3ff00000 00000000 00000000 00000000 3ff00000 00000000
+subd m eq - 3ff00000 00000000 00000000 00000000 3ff00000 00000000
+subd p eq - 3ff00000 00000000 00000000 00000000 3ff00000 00000000
+subd z eq - 3ff00000 00000000 00000000 00000000 3ff00000 00000000
+subd n eq - bff00000 00000000 00000000 00000000 bff00000 00000000
+subd m eq - bff00000 00000000 00000000 00000000 bff00000 00000000
+subd p eq - bff00000 00000000 00000000 00000000 bff00000 00000000
+subd z eq - bff00000 00000000 00000000 00000000 bff00000 00000000
+subd n eq - 00000000 00000000 bff00000 00000000 3ff00000 00000000
+subd m eq - 00000000 00000000 bff00000 00000000 3ff00000 00000000
+subd p eq - 00000000 00000000 bff00000 00000000 3ff00000 00000000
+subd z eq - 00000000 00000000 bff00000 00000000 3ff00000 00000000
+subd n eq - 80000000 00000000 3ff00000 00000000 bff00000 00000000
+subd m eq - 80000000 00000000 3ff00000 00000000 bff00000 00000000
+subd p eq - 80000000 00000000 3ff00000 00000000 bff00000 00000000
+subd z eq - 80000000 00000000 3ff00000 00000000 bff00000 00000000
+subd n eq - 00000000 00000000 00000000 00000000 00000000 00000000
+subd z eq - 00000000 00000000 00000000 00000000 00000000 00000000
+subd p eq - 00000000 00000000 00000000 00000000 00000000 00000000
+subd m eq - 00000000 00000000 00000000 00000000 80000000 00000000
+subd n eq - 80000000 00000000 00000000 00000000 80000000 00000000
+subd m eq - 80000000 00000000 00000000 00000000 80000000 00000000
+subd p eq - 80000000 00000000 00000000 00000000 80000000 00000000
+subd z eq - 80000000 00000000 00000000 00000000 80000000 00000000
+subd n eq - 40080000 00000000 c0080000 00000000 40180000 00000000
+subd m eq - 40080000 00000000 c0080000 00000000 40180000 00000000
+subd p eq - 40080000 00000000 c0080000 00000000 40180000 00000000
+subd z eq - 40080000 00000000 c0080000 00000000 40180000 00000000
+subd n eq - 3ff00000 00000000 3ff00000 00000000 00000000 00000000
+subd z eq - 3ff00000 00000000 3ff00000 00000000 00000000 00000000
+subd p eq - 3ff00000 00000000 3ff00000 00000000 00000000 00000000
+subd n eq - c0080000 00000000 c0080000 00000000 00000000 00000000
+subd z eq - c0080000 00000000 c0080000 00000000 00000000 00000000
+subd p eq - c0080000 00000000 c0080000 00000000 00000000 00000000
+subd m eq - 3ff00000 00000000 3ff00000 00000000 80000000 00000000
+subd m eq - c0080000 00000000 c0080000 00000000 80000000 00000000
+subd n eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+subd m eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+subd p eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+subd z eq - 7ff00000 00000000 00000000 00000000 7ff00000 00000000
+subd n eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+subd m eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+subd p eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+subd z eq - fff00000 00000000 00000000 00000000 fff00000 00000000
+subd n eq - ffe00000 00000000 00000000 00000000 ffe00000 00000000
+subd m eq - ffe00000 00000000 00000000 00000000 ffe00000 00000000
+subd p eq - ffe00000 00000000 00000000 00000000 ffe00000 00000000
+subd z eq - ffe00000 00000000 00000000 00000000 ffe00000 00000000
+subd n eq - 00000000 00000003 00000000 00000000 00000000 00000003
+subd m eq - 00000000 00000003 00000000 00000000 00000000 00000003
+subd p eq - 00000000 00000003 00000000 00000000 00000000 00000003
+subd z eq - 00000000 00000003 00000000 00000000 00000000 00000003
+subd n eq - 80000000 00000003 00000000 00000000 80000000 00000003
+subd m eq - 80000000 00000003 00000000 00000000 80000000 00000003
+subd p eq - 80000000 00000003 00000000 00000000 80000000 00000003
+subd z eq - 80000000 00000003 00000000 00000000 80000000 00000003
+subd n eq - 3ff00000 00000001 3ff00000 00000000 3cb00000 00000000
+subd m eq - 3ff00000 00000001 3ff00000 00000000 3cb00000 00000000
+subd p eq - 3ff00000 00000001 3ff00000 00000000 3cb00000 00000000
+subd z eq - 3ff00000 00000001 3ff00000 00000000 3cb00000 00000000
+subd n eq - bff00000 00000001 bff00000 00000000 bcb00000 00000000
+subd m eq - bff00000 00000001 bff00000 00000000 bcb00000 00000000
+subd p eq - bff00000 00000001 bff00000 00000000 bcb00000 00000000
+subd z eq - bff00000 00000001 bff00000 00000000 bcb00000 00000000
+subd n eq - bfffffff ffffffff c0000000 00000000 3cb00000 00000000
+subd m eq - bfffffff ffffffff c0000000 00000000 3cb00000 00000000
+subd p eq - bfffffff ffffffff c0000000 00000000 3cb00000 00000000
+subd z eq - bfffffff ffffffff c0000000 00000000 3cb00000 00000000
+subd n eq - 3fffffff ffffffff 40000000 00000000 bcb00000 00000000
+subd m eq - 3fffffff ffffffff 40000000 00000000 bcb00000 00000000
+subd p eq - 3fffffff ffffffff 40000000 00000000 bcb00000 00000000
+subd z eq - 3fffffff ffffffff 40000000 00000000 bcb00000 00000000
+subd n eq x 7fe00000 00000000 bff00000 00000000 7fe00000 00000000
+subd z eq x 7fe00000 00000000 bff00000 00000000 7fe00000 00000000
+subd m eq x 7fe00000 00000000 bff00000 00000000 7fe00000 00000000
+subd p eq x 7fe00000 00000000 bff00000 00000000 7fe00000 00000001
+subd n eq x ffe00000 00000000 3ff00000 00000000 ffe00000 00000000
+subd z eq x ffe00000 00000000 3ff00000 00000000 ffe00000 00000000
+subd p eq x ffe00000 00000000 3ff00000 00000000 ffe00000 00000000
+subd m eq x ffe00000 00000000 3ff00000 00000000 ffe00000 00000001
+subd n eq x 7fdfffff ffffffff bff00000 00000000 7fdfffff ffffffff
+subd z eq x 7fdfffff ffffffff bff00000 00000000 7fdfffff ffffffff
+subd m eq x 7fdfffff ffffffff bff00000 00000000 7fdfffff ffffffff
+subd p eq x 7fdfffff ffffffff bff00000 00000000 7fe00000 00000000
+subd n eq x ffdfffff ffffffff 3ff00000 00000000 ffdfffff ffffffff
+subd z eq x ffdfffff ffffffff 3ff00000 00000000 ffdfffff ffffffff
+subd p eq x ffdfffff ffffffff 3ff00000 00000000 ffdfffff ffffffff
+subd m eq x ffdfffff ffffffff 3ff00000 00000000 ffe00000 00000000
+subd n eq x 7fefffff ffffffff bff00000 00000000 7fefffff ffffffff
+subd z eq x 7fefffff ffffffff bff00000 00000000 7fefffff ffffffff
+subd m eq x 7fefffff ffffffff bff00000 00000000 7fefffff ffffffff
+subd p eq xo 7fefffff ffffffff bff00000 00000000 7ff00000 00000000
+subd n eq x ffefffff ffffffff 3ff00000 00000000 ffefffff ffffffff
+subd z eq x ffefffff ffffffff 3ff00000 00000000 ffefffff ffffffff
+subd p eq x ffefffff ffffffff 3ff00000 00000000 ffefffff ffffffff
+subd m eq xo ffefffff ffffffff 3ff00000 00000000 fff00000 00000000
+subd n eq x 7fefffff fffffffe bff00000 00000000 7fefffff fffffffe
+subd z eq x 7fefffff fffffffe bff00000 00000000 7fefffff fffffffe
+subd m eq x 7fefffff fffffffe bff00000 00000000 7fefffff fffffffe
+subd p eq x 7fefffff fffffffe bff00000 00000000 7fefffff ffffffff
+subd n eq x ffefffff fffffffe 3ff00000 00000000 ffefffff fffffffe
+subd z eq x ffefffff fffffffe 3ff00000 00000000 ffefffff fffffffe
+subd p eq x ffefffff fffffffe 3ff00000 00000000 ffefffff fffffffe
+subd m eq x ffefffff fffffffe 3ff00000 00000000 ffefffff ffffffff
+subd n eq x 00000000 00000001 bff00000 00000000 3ff00000 00000000
+subd z eq x 00000000 00000001 bff00000 00000000 3ff00000 00000000
+subd m eq x 00000000 00000001 bff00000 00000000 3ff00000 00000000
+subd p eq x 00000000 00000001 bff00000 00000000 3ff00000 00000001
+subd n eq x 80000000 00000001 3ff00000 00000000 bff00000 00000000
+subd z eq x 80000000 00000001 3ff00000 00000000 bff00000 00000000
+subd p eq x 80000000 00000001 3ff00000 00000000 bff00000 00000000
+subd m eq x 80000000 00000001 3ff00000 00000000 bff00000 00000001
+subd n eq x 7fe00000 00000000 3ff00000 00000000 7fe00000 00000000
+subd p eq x 7fe00000 00000000 3ff00000 00000000 7fe00000 00000000
+subd z eq x 7fe00000 00000000 3ff00000 00000000 7fdfffff ffffffff
+subd m eq x 7fe00000 00000000 3ff00000 00000000 7fdfffff ffffffff
+subd n eq x ffe00000 00000000 bff00000 00000000 ffe00000 00000000
+subd m eq x ffe00000 00000000 bff00000 00000000 ffe00000 00000000
+subd z eq x ffe00000 00000000 bff00000 00000000 ffdfffff ffffffff
+subd p eq x ffe00000 00000000 bff00000 00000000 ffdfffff ffffffff
+subd n eq x 7fdfffff ffffffff 3ff00000 00000000 7fdfffff ffffffff
+subd p eq x 7fdfffff ffffffff 3ff00000 00000000 7fdfffff ffffffff
+subd z eq x 7fdfffff ffffffff 3ff00000 00000000 7fdfffff fffffffe
+subd m eq x 7fdfffff ffffffff 3ff00000 00000000 7fdfffff fffffffe
+subd n eq x ffdfffff ffffffff bff00000 00000000 ffdfffff ffffffff
+subd m eq x ffdfffff ffffffff bff00000 00000000 ffdfffff ffffffff
+subd z eq x ffdfffff ffffffff bff00000 00000000 ffdfffff fffffffe
+subd p eq x ffdfffff ffffffff bff00000 00000000 ffdfffff fffffffe
+subd n eq x 7fefffff ffffffff 3ff00000 00000000 7fefffff ffffffff
+subd p eq x 7fefffff ffffffff 3ff00000 00000000 7fefffff ffffffff
+subd z eq x 7fefffff ffffffff 3ff00000 00000000 7fefffff fffffffe
+subd m eq x 7fefffff ffffffff 3ff00000 00000000 7fefffff fffffffe
+subd n eq x ffefffff ffffffff bff00000 00000000 ffefffff ffffffff
+subd m eq x ffefffff ffffffff bff00000 00000000 ffefffff ffffffff
+subd z eq x ffefffff ffffffff bff00000 00000000 ffefffff fffffffe
+subd p eq x ffefffff ffffffff bff00000 00000000 ffefffff fffffffe
+subd n eq x 7fefffff fffffffe 3ff00000 00000000 7fefffff fffffffe
+subd p eq x 7fefffff fffffffe 3ff00000 00000000 7fefffff fffffffe
+subd z eq x 7fefffff fffffffe 3ff00000 00000000 7fefffff fffffffd
+subd m eq x 7fefffff fffffffe 3ff00000 00000000 7fefffff fffffffd
+subd n eq x ffefffff fffffffe bff00000 00000000 ffefffff fffffffe
+subd m eq x ffefffff fffffffe bff00000 00000000 ffefffff fffffffe
+subd z eq x ffefffff fffffffe bff00000 00000000 ffefffff fffffffd
+subd p eq x ffefffff fffffffe bff00000 00000000 ffefffff fffffffd
+subd n eq x 80000000 00000003 c0080000 00000000 40080000 00000000
+subd p eq x 80000000 00000003 c0080000 00000000 40080000 00000000
+subd z eq x 80000000 00000003 c0080000 00000000 4007ffff ffffffff
+subd m eq x 80000000 00000003 c0080000 00000000 4007ffff ffffffff
+subd n eq x 00000000 00000003 40140000 00000000 c0140000 00000000
+subd m eq x 00000000 00000003 40140000 00000000 c0140000 00000000
+subd z eq x 00000000 00000003 40140000 00000000 c013ffff ffffffff
+subd p eq x 00000000 00000003 40140000 00000000 c013ffff ffffffff
+subd n eq x 3ff00000 00000001 bff00000 00000000 40000000 00000000
+subd z eq x 3ff00000 00000001 bff00000 00000000 40000000 00000000
+subd m eq x 3ff00000 00000001 bff00000 00000000 40000000 00000000
+subd p eq x 3ff00000 00000001 bff00000 00000000 40000000 00000001
+subd n eq x bff00000 00000001 3ff00000 00000000 c0000000 00000000
+subd z eq x bff00000 00000001 3ff00000 00000000 c0000000 00000000
+subd p eq x bff00000 00000001 3ff00000 00000000 c0000000 00000000
+subd m eq x bff00000 00000001 3ff00000 00000000 c0000000 00000001
+subd n uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+subd m uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+subd p uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+subd z uo - 7fffe000 00000000 00000000 00000000 7fffe000 00000000
+subd n uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+subd m uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+subd p uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+subd z uo - 7fffe000 00000000 3ff00000 00000000 7fffe000 00000000
+subd n uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+subd m uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+subd p uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+subd z uo - 7fffe000 00000000 bff00000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+subd m uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+subd p uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+subd z uo v 7ff02000 00000000 00000000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+subd m uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+subd p uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+subd z uo v 7ff02000 00000000 3ff00000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+subd m uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+subd p uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+subd z uo v 7ff02000 00000000 bff00000 00000000 7fffe000 00000000
+subd p eq - 40b00000 00000000 40b00000 00000000 00000000 00000000
+subd n eq - 40b00000 00000000 40b00000 00000000 00000000 00000000
+subd z eq - 40b00000 00000000 40b00000 00000000 00000000 00000000
+subd p eq - 40a00000 00000000 40a00000 00000000 00000000 00000000
+subd n eq - 40a00000 00000000 40a00000 00000000 00000000 00000000
+subd z eq - 40a00000 00000000 40a00000 00000000 00000000 00000000
+subd m eq - 40b00000 00000000 40b00000 00000000 80000000 00000000
+subd m eq - 40a00000 00000000 40a00000 00000000 80000000 00000000
+subd n eq - 40a00000 00000000 40b00000 00000000 c0a00000 00000000
+subd m eq - 40a00000 00000000 40b00000 00000000 c0a00000 00000000
+subd p eq - 40a00000 00000000 40b00000 00000000 c0a00000 00000000
+subd z eq - 40a00000 00000000 40b00000 00000000 c0a00000 00000000
+subd p eq - c0b00000 00000000 c0b00000 00000000 00000000 00000000
+subd n eq - c0b00000 00000000 c0b00000 00000000 00000000 00000000
+subd z eq - c0b00000 00000000 c0b00000 00000000 00000000 00000000
+subd p eq - c0a00000 00000000 c0a00000 00000000 00000000 00000000
+subd n eq - c0a00000 00000000 c0a00000 00000000 00000000 00000000
+subd z eq - c0a00000 00000000 c0a00000 00000000 00000000 00000000
+subd m eq - c0b00000 00000000 c0b00000 00000000 80000000 00000000
+subd m eq - c0a00000 00000000 c0a00000 00000000 80000000 00000000
+subd n eq - c0a00000 00000000 c0b00000 00000000 40a00000 00000000
+subd m eq - c0a00000 00000000 c0b00000 00000000 40a00000 00000000
+subd p eq - c0a00000 00000000 c0b00000 00000000 40a00000 00000000
+subd z eq - c0a00000 00000000 c0b00000 00000000 40a00000 00000000
+subd n eq - 00000000 00000000 40b00000 00000000 c0b00000 00000000
+subd m eq - 00000000 00000000 40b00000 00000000 c0b00000 00000000
+subd p eq - 00000000 00000000 40b00000 00000000 c0b00000 00000000
+subd z eq - 00000000 00000000 40b00000 00000000 c0b00000 00000000
+subd n eq - 40b00000 00000000 00000000 00000000 40b00000 00000000
+subd m eq - 40b00000 00000000 00000000 00000000 40b00000 00000000
+subd p eq - 40b00000 00000000 00000000 00000000 40b00000 00000000
+subd z eq - 40b00000 00000000 00000000 00000000 40b00000 00000000
+subd n eq - c0b00000 00000000 40b00000 00000000 c0c00000 00000000
+subd m eq - c0b00000 00000000 40b00000 00000000 c0c00000 00000000
+subd p eq - c0b00000 00000000 40b00000 00000000 c0c00000 00000000
+subd z eq - c0b00000 00000000 40b00000 00000000 c0c00000 00000000
+subd n eq - 40b00000 00000000 c0b00000 00000000 40c00000 00000000
+subd m eq - 40b00000 00000000 c0b00000 00000000 40c00000 00000000
+subd p eq - 40b00000 00000000 c0b00000 00000000 40c00000 00000000
+subd z eq - 40b00000 00000000 c0b00000 00000000 40c00000 00000000
+subd n eq - 40b00000 00000000 c0a00000 00000000 40b80000 00000000
+subd m eq - 40b00000 00000000 c0a00000 00000000 40b80000 00000000
+subd p eq - 40b00000 00000000 c0a00000 00000000 40b80000 00000000
+subd z eq - 40b00000 00000000 c0a00000 00000000 40b80000 00000000
+subd n eq - c0a00000 00000000 40b00000 00000000 c0b80000 00000000
+subd m eq - c0a00000 00000000 40b00000 00000000 c0b80000 00000000
+subd p eq - c0a00000 00000000 40b00000 00000000 c0b80000 00000000
+subd z eq - c0a00000 00000000 40b00000 00000000 c0b80000 00000000
+subd n eq - c0b00000 00000000 40a00000 00000000 c0b80000 00000000
+subd m eq - c0b00000 00000000 40a00000 00000000 c0b80000 00000000
+subd p eq - c0b00000 00000000 40a00000 00000000 c0b80000 00000000
+subd z eq - c0b00000 00000000 40a00000 00000000 c0b80000 00000000
+subd n eq - 40a00000 00000000 c0b00000 00000000 40b80000 00000000
+subd m eq - 40a00000 00000000 c0b00000 00000000 40b80000 00000000
+subd p eq - 40a00000 00000000 c0b00000 00000000 40b80000 00000000
+subd z eq - 40a00000 00000000 c0b00000 00000000 40b80000 00000000
+subd n eq - 40dfffc0 00000000 3ff00000 00000000 40dfff80 00000000
+subd m eq - 40dfffc0 00000000 3ff00000 00000000 40dfff80 00000000
+subd p eq - 40dfffc0 00000000 3ff00000 00000000 40dfff80 00000000
+subd z eq - 40dfffc0 00000000 3ff00000 00000000 40dfff80 00000000
+subd n eq - 3ff00000 00000000 40dfffc0 00000000 c0dfff80 00000000
+subd m eq - 3ff00000 00000000 40dfffc0 00000000 c0dfff80 00000000
+subd p eq - 3ff00000 00000000 40dfffc0 00000000 c0dfff80 00000000
+subd z eq - 3ff00000 00000000 40dfffc0 00000000 c0dfff80 00000000
+subd n eq - 40dfffc0 00000000 40dfff40 00000000 40000000 00000000
+subd m eq - 40dfffc0 00000000 40dfff40 00000000 40000000 00000000
+subd p eq - 40dfffc0 00000000 40dfff40 00000000 40000000 00000000
+subd z eq - 40dfffc0 00000000 40dfff40 00000000 40000000 00000000
+subd n eq - 40e00000 00000000 40dfffc0 00000000 3ff00000 00000000
+subd m eq - 40e00000 00000000 40dfffc0 00000000 3ff00000 00000000
+subd p eq - 40e00000 00000000 40dfffc0 00000000 3ff00000 00000000
+subd z eq - 40e00000 00000000 40dfffc0 00000000 3ff00000 00000000
+subd n eq - c0e00000 00000000 c0dfffc0 00000000 bff00000 00000000
+subd m eq - c0e00000 00000000 c0dfffc0 00000000 bff00000 00000000
+subd p eq - c0e00000 00000000 c0dfffc0 00000000 bff00000 00000000
+subd z eq - c0e00000 00000000 c0dfffc0 00000000 bff00000 00000000
+subd n eq - 40e00020 00000000 c0dfffc0 00000000 40f00000 00000000
+subd m eq - 40e00020 00000000 c0dfffc0 00000000 40f00000 00000000
+subd p eq - 40e00020 00000000 c0dfffc0 00000000 40f00000 00000000
+subd z eq - 40e00020 00000000 c0dfffc0 00000000 40f00000 00000000
+subd n eq - 40cfff80 00000000 c0cfff80 00000000 40dfff80 00000000
+subd m eq - 40cfff80 00000000 c0cfff80 00000000 40dfff80 00000000
+subd p eq - 40cfff80 00000000 c0cfff80 00000000 40dfff80 00000000
+subd z eq - 40cfff80 00000000 c0cfff80 00000000 40dfff80 00000000
+subd n eq - c0dfffc0 00000000 3ff00000 00000000 c0e00000 00000000
+subd m eq - c0dfffc0 00000000 3ff00000 00000000 c0e00000 00000000
+subd p eq - c0dfffc0 00000000 3ff00000 00000000 c0e00000 00000000
+subd z eq - c0dfffc0 00000000 3ff00000 00000000 c0e00000 00000000
+subd n eq - 80000000 00000000 80000000 00000000 00000000 00000000
+subd n eq x 3ff00000 00000000 00000000 00000001 3ff00000 00000000
+subd z eq x 3ff00000 00000000 00000000 00000001 3fefffff ffffffff
+subd p eq x 3ff00000 00000000 00000000 00000001 3ff00000 00000000
+subd m eq x 3ff00000 00000000 00000000 00000001 3fefffff ffffffff
+subd n eq x 3ff00000 00000000 3c900000 00000000 3ff00000 00000000
+subd n eq x 3ff00000 00000000 3c900000 00000001 3fefffff ffffffff
+subd n eq - 3ff00000 00000000 3ca00000 00000000 3fefffff ffffffff
+subd n eq xo ffe00000 00000001 7fe70000 00000001 fff00000 00000000
+subd z eq xo ffe00000 00000001 7fe70000 00000001 ffefffff ffffffff
+subd p eq xo ffe00000 00000001 7fe70000 00000001 ffefffff ffffffff
+subd m eq xo ffe00000 00000001 7fe70000 00000001 fff00000 00000000
+subd n eq - 40140000 00000000 80000000 00000000 40140000 00000000
+subd m eq - 40140000 00000000 80000000 00000000 40140000 00000000
+subd p eq - 40140000 00000000 80000000 00000000 40140000 00000000
+subd z eq - 40140000 00000000 80000000 00000000 40140000 00000000
+subd n eq - 7ff00000 00000000 fff00000 00000000 7ff00000 00000000
+subd m eq - 7ff00000 00000000 fff00000 00000000 7ff00000 00000000
+subd p eq - 7ff00000 00000000 fff00000 00000000 7ff00000 00000000
+subd z eq - 7ff00000 00000000 fff00000 00000000 7ff00000 00000000
+subd n eq - fff00000 00000000 7ff00000 00000000 fff00000 00000000
+subd m eq - fff00000 00000000 7ff00000 00000000 fff00000 00000000
+subd p eq - fff00000 00000000 7ff00000 00000000 fff00000 00000000
+subd z eq - fff00000 00000000 7ff00000 00000000 fff00000 00000000
+subd n uo v fff00000 00000000 fff00000 00000000 7fffe000 00000000
+subd m uo v fff00000 00000000 fff00000 00000000 7fffe000 00000000
+subd p uo v fff00000 00000000 fff00000 00000000 7fffe000 00000000
+subd z uo v fff00000 00000000 fff00000 00000000 7fffe000 00000000
+subd n uo v 7ff00000 00000000 7ff00000 00000000 7fffe000 00000000
+subd m uo v 7ff00000 00000000 7ff00000 00000000 7fffe000 00000000
+subd p uo v 7ff00000 00000000 7ff00000 00000000 7fffe000 00000000
+subd z uo v 7ff00000 00000000 7ff00000 00000000 7fffe000 00000000
+subd n eq - 7ff00000 00000000 ffe00000 00000000 7ff00000 00000000
+subd m eq - 7ff00000 00000000 ffe00000 00000000 7ff00000 00000000
+subd p eq - 7ff00000 00000000 ffe00000 00000000 7ff00000 00000000
+subd z eq - 7ff00000 00000000 ffe00000 00000000 7ff00000 00000000
+subd n eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+subd m eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+subd p eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+subd z eq - 7ff00000 00000000 7fe00000 00000000 7ff00000 00000000
+subd n eq - fff00000 00000000 ffe00000 00000000 fff00000 00000000
+subd m eq - fff00000 00000000 ffe00000 00000000 fff00000 00000000
+subd p eq - fff00000 00000000 ffe00000 00000000 fff00000 00000000
+subd z eq - fff00000 00000000 ffe00000 00000000 fff00000 00000000
+subd n eq - fff00000 00000000 7fe00000 00000000 fff00000 00000000
+subd m eq - fff00000 00000000 7fe00000 00000000 fff00000 00000000
+subd p eq - fff00000 00000000 7fe00000 00000000 fff00000 00000000
+subd z eq - fff00000 00000000 7fe00000 00000000 fff00000 00000000
+subd n eq - 7fe00000 00000000 fff00000 00000000 7ff00000 00000000
+subd m eq - 7fe00000 00000000 fff00000 00000000 7ff00000 00000000
+subd p eq - 7fe00000 00000000 fff00000 00000000 7ff00000 00000000
+subd z eq - 7fe00000 00000000 fff00000 00000000 7ff00000 00000000
+subd n eq - 7fe00000 00000000 7ff00000 00000000 fff00000 00000000
+subd m eq - 7fe00000 00000000 7ff00000 00000000 fff00000 00000000
+subd p eq - 7fe00000 00000000 7ff00000 00000000 fff00000 00000000
+subd z eq - 7fe00000 00000000 7ff00000 00000000 fff00000 00000000
+subd n eq - ffe00000 00000000 fff00000 00000000 7ff00000 00000000
+subd m eq - ffe00000 00000000 fff00000 00000000 7ff00000 00000000
+subd p eq - ffe00000 00000000 fff00000 00000000 7ff00000 00000000
+subd z eq - ffe00000 00000000 fff00000 00000000 7ff00000 00000000
+subd n eq - ffe00000 00000000 7ff00000 00000000 fff00000 00000000
+subd m eq - ffe00000 00000000 7ff00000 00000000 fff00000 00000000
+subd p eq - ffe00000 00000000 7ff00000 00000000 fff00000 00000000
+subd z eq - ffe00000 00000000 7ff00000 00000000 fff00000 00000000
+subd n eq - 7ff00000 00000000 80000000 00000000 7ff00000 00000000
+subd m eq - 7ff00000 00000000 80000000 00000000 7ff00000 00000000
+subd p eq - 7ff00000 00000000 80000000 00000000 7ff00000 00000000
+subd z eq - 7ff00000 00000000 80000000 00000000 7ff00000 00000000
+subd n eq - fff00000 00000000 80000000 00000000 fff00000 00000000
+subd m eq - fff00000 00000000 80000000 00000000 fff00000 00000000
+subd p eq - fff00000 00000000 80000000 00000000 fff00000 00000000
+subd z eq - fff00000 00000000 80000000 00000000 fff00000 00000000
+subd n eq - 00000000 00000000 fff00000 00000000 7ff00000 00000000
+subd m eq - 00000000 00000000 fff00000 00000000 7ff00000 00000000
+subd p eq - 00000000 00000000 fff00000 00000000 7ff00000 00000000
+subd z eq - 00000000 00000000 fff00000 00000000 7ff00000 00000000
+subd n eq - 80000000 00000000 fff00000 00000000 7ff00000 00000000
+subd m eq - 80000000 00000000 fff00000 00000000 7ff00000 00000000
+subd p eq - 80000000 00000000 fff00000 00000000 7ff00000 00000000
+subd z eq - 80000000 00000000 fff00000 00000000 7ff00000 00000000
+subd n eq - 00000000 00000000 7ff00000 00000000 fff00000 00000000
+subd m eq - 00000000 00000000 7ff00000 00000000 fff00000 00000000
+subd p eq - 00000000 00000000 7ff00000 00000000 fff00000 00000000
+subd z eq - 00000000 00000000 7ff00000 00000000 fff00000 00000000
+subd n eq - 80000000 00000000 7ff00000 00000000 fff00000 00000000
+subd m eq - 80000000 00000000 7ff00000 00000000 fff00000 00000000
+subd p eq - 80000000 00000000 7ff00000 00000000 fff00000 00000000
+subd z eq - 80000000 00000000 7ff00000 00000000 fff00000 00000000
+subd n eq - 7ff00000 00000000 800fffff ffffffff 7ff00000 00000000
+subd m eq - 7ff00000 00000000 800fffff ffffffff 7ff00000 00000000
+subd p eq - 7ff00000 00000000 800fffff ffffffff 7ff00000 00000000
+subd z eq - 7ff00000 00000000 800fffff ffffffff 7ff00000 00000000
+subd n eq - fff00000 00000000 800fffff ffffffff fff00000 00000000
+subd m eq - fff00000 00000000 800fffff ffffffff fff00000 00000000
+subd p eq - fff00000 00000000 800fffff ffffffff fff00000 00000000
+subd z eq - fff00000 00000000 800fffff ffffffff fff00000 00000000
+subd n eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+subd m eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+subd p eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+subd z eq - 7ff00000 00000000 000fffff ffffffff 7ff00000 00000000
+subd n eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+subd m eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+subd p eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+subd z eq - fff00000 00000000 000fffff ffffffff fff00000 00000000
+subd n eq - 00000000 00000003 fff00000 00000000 7ff00000 00000000
+subd m eq - 00000000 00000003 fff00000 00000000 7ff00000 00000000
+subd p eq - 00000000 00000003 fff00000 00000000 7ff00000 00000000
+subd z eq - 00000000 00000003 fff00000 00000000 7ff00000 00000000
+subd n eq - 00000000 00000003 7ff00000 00000000 fff00000 00000000
+subd m eq - 00000000 00000003 7ff00000 00000000 fff00000 00000000
+subd p eq - 00000000 00000003 7ff00000 00000000 fff00000 00000000
+subd z eq - 00000000 00000003 7ff00000 00000000 fff00000 00000000
+subd n eq - 80000000 00000003 fff00000 00000000 7ff00000 00000000
+subd m eq - 80000000 00000003 fff00000 00000000 7ff00000 00000000
+subd p eq - 80000000 00000003 fff00000 00000000 7ff00000 00000000
+subd z eq - 80000000 00000003 fff00000 00000000 7ff00000 00000000
+subd n eq - 80000000 00000003 7ff00000 00000000 fff00000 00000000
+subd m eq - 80000000 00000003 7ff00000 00000000 fff00000 00000000
+subd p eq - 80000000 00000003 7ff00000 00000000 fff00000 00000000
+subd z eq - 80000000 00000003 7ff00000 00000000 fff00000 00000000
+subd n eq - 00000000 00000000 ffe00000 00000000 7fe00000 00000000
+subd m eq - 00000000 00000000 ffe00000 00000000 7fe00000 00000000
+subd p eq - 00000000 00000000 ffe00000 00000000 7fe00000 00000000
+subd z eq - 00000000 00000000 ffe00000 00000000 7fe00000 00000000
+subd n eq - 80000000 00000000 ffe00000 00000000 7fe00000 00000000
+subd m eq - 80000000 00000000 ffe00000 00000000 7fe00000 00000000
+subd p eq - 80000000 00000000 ffe00000 00000000 7fe00000 00000000
+subd z eq - 80000000 00000000 ffe00000 00000000 7fe00000 00000000
+subd n eq - ffe00000 00000000 80000000 00000000 ffe00000 00000000
+subd m eq - ffe00000 00000000 80000000 00000000 ffe00000 00000000
+subd p eq - ffe00000 00000000 80000000 00000000 ffe00000 00000000
+subd z eq - ffe00000 00000000 80000000 00000000 ffe00000 00000000
+subd n eq - 00000000 00000000 800fffff ffffffff 000fffff ffffffff
+subd m eq - 00000000 00000000 800fffff ffffffff 000fffff ffffffff
+subd p eq - 00000000 00000000 800fffff ffffffff 000fffff ffffffff
+subd z eq - 00000000 00000000 800fffff ffffffff 000fffff ffffffff
+subd n eq - 80000000 00000000 800fffff ffffffff 000fffff ffffffff
+subd m eq - 80000000 00000000 800fffff ffffffff 000fffff ffffffff
+subd p eq - 80000000 00000000 800fffff ffffffff 000fffff ffffffff
+subd z eq - 80000000 00000000 800fffff ffffffff 000fffff ffffffff
+subd n eq - 00000000 00000000 000fffff ffffffff 800fffff ffffffff
+subd m eq - 00000000 00000000 000fffff ffffffff 800fffff ffffffff
+subd p eq - 00000000 00000000 000fffff ffffffff 800fffff ffffffff
+subd z eq - 00000000 00000000 000fffff ffffffff 800fffff ffffffff
+subd n eq - 80000000 00000000 000fffff ffffffff 800fffff ffffffff
+subd m eq - 80000000 00000000 000fffff ffffffff 800fffff ffffffff
+subd p eq - 80000000 00000000 000fffff ffffffff 800fffff ffffffff
+subd z eq - 80000000 00000000 000fffff ffffffff 800fffff ffffffff
+subd n eq - 00000000 00000003 80000000 00000000 00000000 00000003
+subd m eq - 00000000 00000003 80000000 00000000 00000000 00000003
+subd p eq - 00000000 00000003 80000000 00000000 00000000 00000003
+subd z eq - 00000000 00000003 80000000 00000000 00000000 00000003
+subd n eq - 80000000 00000003 80000000 00000000 80000000 00000003
+subd m eq - 80000000 00000003 80000000 00000000 80000000 00000003
+subd p eq - 80000000 00000003 80000000 00000000 80000000 00000003
+subd z eq - 80000000 00000003 80000000 00000000 80000000 00000003
+subd n eq - 80000000 00000000 00100000 00000000 80100000 00000000
+subd m eq - 80000000 00000000 00100000 00000000 80100000 00000000
+subd p eq - 80000000 00000000 00100000 00000000 80100000 00000000
+subd z eq - 80000000 00000000 00100000 00000000 80100000 00000000
+subd n eq - 00100000 00000000 80000000 00000000 00100000 00000000
+subd m eq - 00100000 00000000 80000000 00000000 00100000 00000000
+subd p eq - 00100000 00000000 80000000 00000000 00100000 00000000
+subd z eq - 00100000 00000000 80000000 00000000 00100000 00000000
+subd n eq - 00000000 00000000 00100000 00000000 80100000 00000000
+subd m eq - 00000000 00000000 00100000 00000000 80100000 00000000
+subd p eq - 00000000 00000000 00100000 00000000 80100000 00000000
+subd z eq - 00000000 00000000 00100000 00000000 80100000 00000000
+subd n eq - 80100000 00000000 80000000 00000000 80100000 00000000
+subd m eq - 80100000 00000000 80000000 00000000 80100000 00000000
+subd p eq - 80100000 00000000 80000000 00000000 80100000 00000000
+subd z eq - 80100000 00000000 80000000 00000000 80100000 00000000
+subd z eq - 80000000 00000000 80000000 00000000 00000000 00000000
+subd p eq - 80000000 00000000 80000000 00000000 00000000 00000000
+subd m eq - 80000000 00000000 80000000 00000000 80000000 00000000
+subd n eq - 00000000 00000000 80000000 00000000 00000000 00000000
+subd m eq - 00000000 00000000 80000000 00000000 00000000 00000000
+subd p eq - 00000000 00000000 80000000 00000000 00000000 00000000
+subd z eq - 00000000 00000000 80000000 00000000 00000000 00000000
+subd n eq xo 7fe00000 00000000 ffe00000 00000000 7ff00000 00000000
+subd p eq xo 7fe00000 00000000 ffe00000 00000000 7ff00000 00000000
+subd z eq xo 7fe00000 00000000 ffe00000 00000000 7fefffff ffffffff
+subd m eq xo 7fe00000 00000000 ffe00000 00000000 7fefffff ffffffff
+subd n eq xo ffe00000 00000000 7fe00000 00000000 fff00000 00000000
+subd m eq xo ffe00000 00000000 7fe00000 00000000 fff00000 00000000
+subd z eq xo ffe00000 00000000 7fe00000 00000000 ffefffff ffffffff
+subd p eq xo ffe00000 00000000 7fe00000 00000000 ffefffff ffffffff
+subd n eq - 7fdfffff fffffffe ffdfffff fffffffe 7fefffff fffffffe
+subd m eq - 7fdfffff fffffffe ffdfffff fffffffe 7fefffff fffffffe
+subd p eq - 7fdfffff fffffffe ffdfffff fffffffe 7fefffff fffffffe
+subd z eq - 7fdfffff fffffffe ffdfffff fffffffe 7fefffff fffffffe
+subd n eq - ffdfffff fffffffe 7fdfffff fffffffe ffefffff fffffffe
+subd m eq - ffdfffff fffffffe 7fdfffff fffffffe ffefffff fffffffe
+subd p eq - ffdfffff fffffffe 7fdfffff fffffffe ffefffff fffffffe
+subd z eq - ffdfffff fffffffe 7fdfffff fffffffe ffefffff fffffffe
+subd n eq xo 7fefffff fffffffe ffefffff fffffffe 7ff00000 00000000
+subd p eq xo 7fefffff fffffffe ffefffff fffffffe 7ff00000 00000000
+subd z eq xo 7fefffff fffffffe ffefffff fffffffe 7fefffff ffffffff
+subd m eq xo 7fefffff fffffffe ffefffff fffffffe 7fefffff ffffffff
+subd n eq xo ffefffff fffffffe 7fefffff fffffffe fff00000 00000000
+subd m eq xo ffefffff fffffffe 7fefffff fffffffe fff00000 00000000
+subd z eq xo ffefffff fffffffe 7fefffff fffffffe ffefffff ffffffff
+subd p eq xo ffefffff fffffffe 7fefffff fffffffe ffefffff ffffffff
+subd n eq - 00100000 00000000 80100000 00000000 00200000 00000000
+subd m eq - 00100000 00000000 80100000 00000000 00200000 00000000
+subd p eq - 00100000 00000000 80100000 00000000 00200000 00000000
+subd z eq - 00100000 00000000 80100000 00000000 00200000 00000000
+subd n eq - 7fd00000 00000000 ffd00000 00000000 7fe00000 00000000
+subd m eq - 7fd00000 00000000 ffd00000 00000000 7fe00000 00000000
+subd p eq - 7fd00000 00000000 ffd00000 00000000 7fe00000 00000000
+subd z eq - 7fd00000 00000000 ffd00000 00000000 7fe00000 00000000
+subd n eq - 000fffff ffffffff 800fffff ffffffff 001fffff fffffffe
+subd m eq - 000fffff ffffffff 800fffff ffffffff 001fffff fffffffe
+subd p eq - 000fffff ffffffff 800fffff ffffffff 001fffff fffffffe
+subd z eq - 000fffff ffffffff 800fffff ffffffff 001fffff fffffffe
+subd n eq - 800fffff ffffffff 000fffff ffffffff 801fffff fffffffe
+subd m eq - 800fffff ffffffff 000fffff ffffffff 801fffff fffffffe
+subd p eq - 800fffff ffffffff 000fffff ffffffff 801fffff fffffffe
+subd z eq - 800fffff ffffffff 000fffff ffffffff 801fffff fffffffe
+subd n eq - 00000000 00000004 80000000 00000004 00000000 00000008
+subd m eq - 00000000 00000004 80000000 00000004 00000000 00000008
+subd p eq - 00000000 00000004 80000000 00000004 00000000 00000008
+subd z eq - 00000000 00000004 80000000 00000004 00000000 00000008
+subd n eq - 80000000 00000004 00000000 00000004 80000000 00000008
+subd m eq - 80000000 00000004 00000000 00000004 80000000 00000008
+subd p eq - 80000000 00000004 00000000 00000004 80000000 00000008
+subd z eq - 80000000 00000004 00000000 00000004 80000000 00000008
+subd n eq - 00000000 00000001 80000000 00000001 00000000 00000002
+subd m eq - 00000000 00000001 80000000 00000001 00000000 00000002
+subd p eq - 00000000 00000001 80000000 00000001 00000000 00000002
+subd z eq - 00000000 00000001 80000000 00000001 00000000 00000002
+subd n eq - 80000000 00000001 00000000 00000001 80000000 00000002
+subd m eq - 80000000 00000001 00000000 00000001 80000000 00000002
+subd p eq - 80000000 00000001 00000000 00000001 80000000 00000002
+subd z eq - 80000000 00000001 00000000 00000001 80000000 00000002
+subd n eq - 7fe00000 00000000 7fe00000 00000000 00000000 00000000
+subd z eq - 7fe00000 00000000 7fe00000 00000000 00000000 00000000
+subd p eq - 7fe00000 00000000 7fe00000 00000000 00000000 00000000
+subd n eq - ffdfffff fffffffe ffdfffff fffffffe 00000000 00000000
+subd z eq - ffdfffff fffffffe ffdfffff fffffffe 00000000 00000000
+subd p eq - ffdfffff fffffffe ffdfffff fffffffe 00000000 00000000
+subd n eq - 00100000 00000000 00100000 00000000 00000000 00000000
+subd z eq - 00100000 00000000 00100000 00000000 00000000 00000000
+subd p eq - 00100000 00000000 00100000 00000000 00000000 00000000
+subd n eq - 80100000 00000000 80100000 00000000 00000000 00000000
+subd z eq - 80100000 00000000 80100000 00000000 00000000 00000000
+subd p eq - 80100000 00000000 80100000 00000000 00000000 00000000
+subd n eq - 000fffff fffffffc 000fffff fffffffc 00000000 00000000
+subd z eq - 000fffff fffffffc 000fffff fffffffc 00000000 00000000
+subd p eq - 000fffff fffffffc 000fffff fffffffc 00000000 00000000
+subd n eq - 800fffff ffffffff 800fffff ffffffff 00000000 00000000
+subd z eq - 800fffff ffffffff 800fffff ffffffff 00000000 00000000
+subd p eq - 800fffff ffffffff 800fffff ffffffff 00000000 00000000
+subd n eq - 00000000 00000001 00000000 00000001 00000000 00000000
+subd z eq - 00000000 00000001 00000000 00000001 00000000 00000000
+subd p eq - 00000000 00000001 00000000 00000001 00000000 00000000
+subd n eq - 80000000 00000001 80000000 00000001 00000000 00000000
+subd z eq - 80000000 00000001 80000000 00000001 00000000 00000000
+subd p eq - 80000000 00000001 80000000 00000001 00000000 00000000
+subd n eq - 7fefffff ffffffff 7fefffff ffffffff 00000000 00000000
+subd z eq - 7fefffff ffffffff 7fefffff ffffffff 00000000 00000000
+subd p eq - 7fefffff ffffffff 7fefffff ffffffff 00000000 00000000
+subd m eq - 7fe00000 00000000 7fe00000 00000000 80000000 00000000
+subd m eq - ffdfffff fffffffe ffdfffff fffffffe 80000000 00000000
+subd m eq - 00100000 00000000 00100000 00000000 80000000 00000000
+subd m eq - 80100000 00000000 80100000 00000000 80000000 00000000
+subd m eq - 000fffff fffffffc 000fffff fffffffc 80000000 00000000
+subd m eq - 800fffff ffffffff 800fffff ffffffff 80000000 00000000
+subd m eq - 00000000 00000001 00000000 00000001 80000000 00000000
+subd m eq - 80000000 00000001 80000000 00000001 80000000 00000000
+subd m eq - 7fefffff ffffffff 7fefffff ffffffff 80000000 00000000
+subd n eq - 3ff00000 00000001 3ff00000 00000002 bcb00000 00000000
+subd m eq - 3ff00000 00000001 3ff00000 00000002 bcb00000 00000000
+subd p eq - 3ff00000 00000001 3ff00000 00000002 bcb00000 00000000
+subd z eq - 3ff00000 00000001 3ff00000 00000002 bcb00000 00000000
+subd n eq - bff00000 00000001 bff00000 00000002 3cb00000 00000000
+subd m eq - bff00000 00000001 bff00000 00000002 3cb00000 00000000
+subd p eq - bff00000 00000001 bff00000 00000002 3cb00000 00000000
+subd z eq - bff00000 00000001 bff00000 00000002 3cb00000 00000000
+subd n eq - 40000000 00000000 40000000 00000001 bcc00000 00000000
+subd m eq - 40000000 00000000 40000000 00000001 bcc00000 00000000
+subd p eq - 40000000 00000000 40000000 00000001 bcc00000 00000000
+subd z eq - 40000000 00000000 40000000 00000001 bcc00000 00000000
+subd n eq - c0000000 00000000 c0000000 00000001 3cc00000 00000000
+subd m eq - c0000000 00000000 c0000000 00000001 3cc00000 00000000
+subd p eq - c0000000 00000000 c0000000 00000001 3cc00000 00000000
+subd z eq - c0000000 00000000 c0000000 00000001 3cc00000 00000000
+subd n eq - 40000000 00000004 40000000 00000003 3cc00000 00000000
+subd m eq - 40000000 00000004 40000000 00000003 3cc00000 00000000
+subd p eq - 40000000 00000004 40000000 00000003 3cc00000 00000000
+subd z eq - 40000000 00000004 40000000 00000003 3cc00000 00000000
+subd n eq - c0000000 00000004 c0000000 00000003 bcc00000 00000000
+subd m eq - c0000000 00000004 c0000000 00000003 bcc00000 00000000
+subd p eq - c0000000 00000004 c0000000 00000003 bcc00000 00000000
+subd z eq - c0000000 00000004 c0000000 00000003 bcc00000 00000000
+subd n eq - 400fffff ffffffff 400fffff fffffffe 3cc00000 00000000
+subd m eq - 400fffff ffffffff 400fffff fffffffe 3cc00000 00000000
+subd p eq - 400fffff ffffffff 400fffff fffffffe 3cc00000 00000000
+subd z eq - 400fffff ffffffff 400fffff fffffffe 3cc00000 00000000
+subd n eq - c00fffff ffffffff c00fffff fffffffe bcc00000 00000000
+subd m eq - c00fffff ffffffff c00fffff fffffffe bcc00000 00000000
+subd p eq - c00fffff ffffffff c00fffff fffffffe bcc00000 00000000
+subd z eq - c00fffff ffffffff c00fffff fffffffe bcc00000 00000000
+subd n eq - 3fffffff fffffffc 3fffffff fffffffd bcb00000 00000000
+subd m eq - 3fffffff fffffffc 3fffffff fffffffd bcb00000 00000000
+subd p eq - 3fffffff fffffffc 3fffffff fffffffd bcb00000 00000000
+subd z eq - 3fffffff fffffffc 3fffffff fffffffd bcb00000 00000000
+subd n eq - bfffffff fffffffc bfffffff fffffffd 3cb00000 00000000
+subd m eq - bfffffff fffffffc bfffffff fffffffd 3cb00000 00000000
+subd p eq - bfffffff fffffffc bfffffff fffffffd 3cb00000 00000000
+subd z eq - bfffffff fffffffc bfffffff fffffffd 3cb00000 00000000
+subd n eq - 7fe00000 00000001 7fe00000 00000000 7ca00000 00000000
+subd m eq - 7fe00000 00000001 7fe00000 00000000 7ca00000 00000000
+subd p eq - 7fe00000 00000001 7fe00000 00000000 7ca00000 00000000
+subd z eq - 7fe00000 00000001 7fe00000 00000000 7ca00000 00000000
+subd n eq - ffe00000 00000001 ffe00000 00000000 fca00000 00000000
+subd m eq - ffe00000 00000001 ffe00000 00000000 fca00000 00000000
+subd p eq - ffe00000 00000001 ffe00000 00000000 fca00000 00000000
+subd z eq - ffe00000 00000001 ffe00000 00000000 fca00000 00000000
+subd n eq - 7fe00000 00000001 7fe00000 00000002 fca00000 00000000
+subd m eq - 7fe00000 00000001 7fe00000 00000002 fca00000 00000000
+subd p eq - 7fe00000 00000001 7fe00000 00000002 fca00000 00000000
+subd z eq - 7fe00000 00000001 7fe00000 00000002 fca00000 00000000
+subd n eq - ffe00000 00000001 ffe00000 00000002 7ca00000 00000000
+subd m eq - ffe00000 00000001 ffe00000 00000002 7ca00000 00000000
+subd p eq - ffe00000 00000001 ffe00000 00000002 7ca00000 00000000
+subd z eq - ffe00000 00000001 ffe00000 00000002 7ca00000 00000000
+subd n eq - 7fd00000 00000000 7fd00000 00000001 fc900000 00000000
+subd m eq - 7fd00000 00000000 7fd00000 00000001 fc900000 00000000
+subd p eq - 7fd00000 00000000 7fd00000 00000001 fc900000 00000000
+subd z eq - 7fd00000 00000000 7fd00000 00000001 fc900000 00000000
+subd n eq - ffd00000 00000000 ffd00000 00000001 7c900000 00000000
+subd m eq - ffd00000 00000000 ffd00000 00000001 7c900000 00000000
+subd p eq - ffd00000 00000000 ffd00000 00000001 7c900000 00000000
+subd z eq - ffd00000 00000000 ffd00000 00000001 7c900000 00000000
+subd n eq - 7fd00000 00000004 7fd00000 00000003 7c900000 00000000
+subd m eq - 7fd00000 00000004 7fd00000 00000003 7c900000 00000000
+subd p eq - 7fd00000 00000004 7fd00000 00000003 7c900000 00000000
+subd z eq - 7fd00000 00000004 7fd00000 00000003 7c900000 00000000
+subd n eq - ffd00000 00000004 ffd00000 00000003 fc900000 00000000
+subd m eq - ffd00000 00000004 ffd00000 00000003 fc900000 00000000
+subd p eq - ffd00000 00000004 ffd00000 00000003 fc900000 00000000
+subd z eq - ffd00000 00000004 ffd00000 00000003 fc900000 00000000
+subd n eq - 7fcfffff ffffffff 7fcfffff fffffffe 7c800000 00000000
+subd m eq - 7fcfffff ffffffff 7fcfffff fffffffe 7c800000 00000000
+subd p eq - 7fcfffff ffffffff 7fcfffff fffffffe 7c800000 00000000
+subd z eq - 7fcfffff ffffffff 7fcfffff fffffffe 7c800000 00000000
+subd n eq - ffcfffff ffffffff ffcfffff fffffffe fc800000 00000000
+subd m eq - ffcfffff ffffffff ffcfffff fffffffe fc800000 00000000
+subd p eq - ffcfffff ffffffff ffcfffff fffffffe fc800000 00000000
+subd z eq - ffcfffff ffffffff ffcfffff fffffffe fc800000 00000000
+subd n eq - ffefffff fffffffe ffefffff ffffffff 7ca00000 00000000
+subd m eq - ffefffff fffffffe ffefffff ffffffff 7ca00000 00000000
+subd p eq - ffefffff fffffffe ffefffff ffffffff 7ca00000 00000000
+subd z eq - ffefffff fffffffe ffefffff ffffffff 7ca00000 00000000
+subd n eq - 7fefffff fffffffe 7fefffff ffffffff fca00000 00000000
+subd m eq - 7fefffff fffffffe 7fefffff ffffffff fca00000 00000000
+subd p eq - 7fefffff fffffffe 7fefffff ffffffff fca00000 00000000
+subd z eq - 7fefffff fffffffe 7fefffff ffffffff fca00000 00000000
+subd n eq - 80100000 00000001 80100000 00000000 80000000 00000001
+subd m eq - 80100000 00000001 80100000 00000000 80000000 00000001
+subd p eq - 80100000 00000001 80100000 00000000 80000000 00000001
+subd z eq - 80100000 00000001 80100000 00000000 80000000 00000001
+subd n eq - 00100000 00000001 00100000 00000000 00000000 00000001
+subd m eq - 00100000 00000001 00100000 00000000 00000000 00000001
+subd p eq - 00100000 00000001 00100000 00000000 00000000 00000001
+subd z eq - 00100000 00000001 00100000 00000000 00000000 00000001
+subd n eq - 800fffff ffffffff 80100000 00000000 00000000 00000001
+subd m eq - 800fffff ffffffff 80100000 00000000 00000000 00000001
+subd p eq - 800fffff ffffffff 80100000 00000000 00000000 00000001
+subd z eq - 800fffff ffffffff 80100000 00000000 00000000 00000001
+subd n eq - 000fffff ffffffff 00100000 00000000 80000000 00000001
+subd m eq - 000fffff ffffffff 00100000 00000000 80000000 00000001
+subd p eq - 000fffff ffffffff 00100000 00000000 80000000 00000001
+subd z eq - 000fffff ffffffff 00100000 00000000 80000000 00000001
+subd n eq - 00100000 00000001 00100000 00000002 80000000 00000001
+subd m eq - 00100000 00000001 00100000 00000002 80000000 00000001
+subd p eq - 00100000 00000001 00100000 00000002 80000000 00000001
+subd z eq - 00100000 00000001 00100000 00000002 80000000 00000001
+subd n eq - 80100000 00000001 80100000 00000002 00000000 00000001
+subd m eq - 80100000 00000001 80100000 00000002 00000000 00000001
+subd p eq - 80100000 00000001 80100000 00000002 00000000 00000001
+subd z eq - 80100000 00000001 80100000 00000002 00000000 00000001
+subd n eq - 000fffff ffffffff 000fffff fffffffe 00000000 00000001
+subd m eq - 000fffff ffffffff 000fffff fffffffe 00000000 00000001
+subd p eq - 000fffff ffffffff 000fffff fffffffe 00000000 00000001
+subd z eq - 000fffff ffffffff 000fffff fffffffe 00000000 00000001
+subd n eq - 800fffff ffffffff 800fffff fffffffe 80000000 00000001
+subd m eq - 800fffff ffffffff 800fffff fffffffe 80000000 00000001
+subd p eq - 800fffff ffffffff 800fffff fffffffe 80000000 00000001
+subd z eq - 800fffff ffffffff 800fffff fffffffe 80000000 00000001
+subd n eq - 000fffff fffffffd 000fffff fffffffe 80000000 00000001
+subd m eq - 000fffff fffffffd 000fffff fffffffe 80000000 00000001
+subd p eq - 000fffff fffffffd 000fffff fffffffe 80000000 00000001
+subd z eq - 000fffff fffffffd 000fffff fffffffe 80000000 00000001
+subd n eq - 800fffff fffffffd 800fffff fffffffe 00000000 00000001
+subd m eq - 800fffff fffffffd 800fffff fffffffe 00000000 00000001
+subd p eq - 800fffff fffffffd 800fffff fffffffe 00000000 00000001
+subd z eq - 800fffff fffffffd 800fffff fffffffe 00000000 00000001
+subd n eq - 00000000 00000002 00000000 00000001 00000000 00000001
+subd m eq - 00000000 00000002 00000000 00000001 00000000 00000001
+subd p eq - 00000000 00000002 00000000 00000001 00000000 00000001
+subd z eq - 00000000 00000002 00000000 00000001 00000000 00000001
+subd n eq - 80000000 00000002 80000000 00000001 80000000 00000001
+subd m eq - 80000000 00000002 80000000 00000001 80000000 00000001
+subd p eq - 80000000 00000002 80000000 00000001 80000000 00000001
+subd z eq - 80000000 00000002 80000000 00000001 80000000 00000001
+subd n eq - 00000000 00000003 00000000 00000002 00000000 00000001
+subd m eq - 00000000 00000003 00000000 00000002 00000000 00000001
+subd p eq - 00000000 00000003 00000000 00000002 00000000 00000001
+subd z eq - 00000000 00000003 00000000 00000002 00000000 00000001
+subd n eq - 80000000 00000003 80000000 00000002 80000000 00000001
+subd m eq - 80000000 00000003 80000000 00000002 80000000 00000001
+subd p eq - 80000000 00000003 80000000 00000002 80000000 00000001
+subd z eq - 80000000 00000003 80000000 00000002 80000000 00000001
+subd n eq - 40000000 00000000 3fffffff ffffffff 3cb00000 00000000
+subd m eq - 40000000 00000000 3fffffff ffffffff 3cb00000 00000000
+subd p eq - 40000000 00000000 3fffffff ffffffff 3cb00000 00000000
+subd z eq - 40000000 00000000 3fffffff ffffffff 3cb00000 00000000
+subd n eq - c0000000 00000000 bfffffff ffffffff bcb00000 00000000
+subd m eq - c0000000 00000000 bfffffff ffffffff bcb00000 00000000
+subd p eq - c0000000 00000000 bfffffff ffffffff bcb00000 00000000
+subd z eq - c0000000 00000000 bfffffff ffffffff bcb00000 00000000
+subd n eq - 40100000 00000001 400fffff ffffffff 3cd80000 00000000
+subd m eq - 40100000 00000001 400fffff ffffffff 3cd80000 00000000
+subd p eq - 40100000 00000001 400fffff ffffffff 3cd80000 00000000
+subd z eq - 40100000 00000001 400fffff ffffffff 3cd80000 00000000
+subd n eq - c0100000 00000001 c00fffff ffffffff bcd80000 00000000
+subd m eq - c0100000 00000001 c00fffff ffffffff bcd80000 00000000
+subd p eq - c0100000 00000001 c00fffff ffffffff bcd80000 00000000
+subd z eq - c0100000 00000001 c00fffff ffffffff bcd80000 00000000
+subd n eq - 400fffff ffffffff 40100000 00000002 bce40000 00000000
+subd m eq - 400fffff ffffffff 40100000 00000002 bce40000 00000000
+subd p eq - 400fffff ffffffff 40100000 00000002 bce40000 00000000
+subd z eq - 400fffff ffffffff 40100000 00000002 bce40000 00000000
+subd n eq - c00fffff ffffffff c0100000 00000002 3ce40000 00000000
+subd m eq - c00fffff ffffffff c0100000 00000002 3ce40000 00000000
+subd p eq - c00fffff ffffffff c0100000 00000002 3ce40000 00000000
+subd z eq - c00fffff ffffffff c0100000 00000002 3ce40000 00000000
+subd n eq - 40000000 00000001 3ff00000 00000001 3ff00000 00000001
+subd m eq - 40000000 00000001 3ff00000 00000001 3ff00000 00000001
+subd p eq - 40000000 00000001 3ff00000 00000001 3ff00000 00000001
+subd z eq - 40000000 00000001 3ff00000 00000001 3ff00000 00000001
+subd n eq - c0000000 00000001 bff00000 00000001 bff00000 00000001
+subd m eq - c0000000 00000001 bff00000 00000001 bff00000 00000001
+subd p eq - c0000000 00000001 bff00000 00000001 bff00000 00000001
+subd z eq - c0000000 00000001 bff00000 00000001 bff00000 00000001
+subd n eq - 40000000 00000002 3ff00000 00000001 3ff00000 00000003
+subd m eq - 40000000 00000002 3ff00000 00000001 3ff00000 00000003
+subd p eq - 40000000 00000002 3ff00000 00000001 3ff00000 00000003
+subd z eq - 40000000 00000002 3ff00000 00000001 3ff00000 00000003
+subd n eq - c0000000 00000002 bff00000 00000001 bff00000 00000003
+subd m eq - c0000000 00000002 bff00000 00000001 bff00000 00000003
+subd p eq - c0000000 00000002 bff00000 00000001 bff00000 00000003
+subd z eq - c0000000 00000002 bff00000 00000001 bff00000 00000003
+subd n eq - 40000000 00000002 3ff00000 00000003 3ff00000 00000001
+subd m eq - 40000000 00000002 3ff00000 00000003 3ff00000 00000001
+subd p eq - 40000000 00000002 3ff00000 00000003 3ff00000 00000001
+subd z eq - 40000000 00000002 3ff00000 00000003 3ff00000 00000001
+subd n eq - c0000000 00000002 bff00000 00000003 bff00000 00000001
+subd m eq - c0000000 00000002 bff00000 00000003 bff00000 00000001
+subd p eq - c0000000 00000002 bff00000 00000003 bff00000 00000001
+subd z eq - c0000000 00000002 bff00000 00000003 bff00000 00000001
+subd n eq - 7fd00000 00000000 7fcfffff ffffffff 7c800000 00000000
+subd m eq - 7fd00000 00000000 7fcfffff ffffffff 7c800000 00000000
+subd p eq - 7fd00000 00000000 7fcfffff ffffffff 7c800000 00000000
+subd z eq - 7fd00000 00000000 7fcfffff ffffffff 7c800000 00000000
+subd n eq - ffd00000 00000000 ffcfffff ffffffff fc800000 00000000
+subd m eq - ffd00000 00000000 ffcfffff ffffffff fc800000 00000000
+subd p eq - ffd00000 00000000 ffcfffff ffffffff fc800000 00000000
+subd z eq - ffd00000 00000000 ffcfffff ffffffff fc800000 00000000
+subd n eq - ffdfffff ffffffff ffe00000 00000000 7c900000 00000000
+subd m eq - ffdfffff ffffffff ffe00000 00000000 7c900000 00000000
+subd p eq - ffdfffff ffffffff ffe00000 00000000 7c900000 00000000
+subd z eq - ffdfffff ffffffff ffe00000 00000000 7c900000 00000000
+subd n eq - 7fdfffff ffffffff 7fe00000 00000000 fc900000 00000000
+subd m eq - 7fdfffff ffffffff 7fe00000 00000000 fc900000 00000000
+subd p eq - 7fdfffff ffffffff 7fe00000 00000000 fc900000 00000000
+subd z eq - 7fdfffff ffffffff 7fe00000 00000000 fc900000 00000000
+subd n eq - 7fb00000 00000001 7fafffff ffffffff 7c780000 00000000
+subd m eq - 7fb00000 00000001 7fafffff ffffffff 7c780000 00000000
+subd p eq - 7fb00000 00000001 7fafffff ffffffff 7c780000 00000000
+subd z eq - 7fb00000 00000001 7fafffff ffffffff 7c780000 00000000
+subd n eq - ffb00000 00000001 ffafffff ffffffff fc780000 00000000
+subd m eq - ffb00000 00000001 ffafffff ffffffff fc780000 00000000
+subd p eq - ffb00000 00000001 ffafffff ffffffff fc780000 00000000
+subd z eq - ffb00000 00000001 ffafffff ffffffff fc780000 00000000
+subd n eq - 7fcfffff ffffffff 7fd00000 00000002 fca40000 00000000
+subd m eq - 7fcfffff ffffffff 7fd00000 00000002 fca40000 00000000
+subd p eq - 7fcfffff ffffffff 7fd00000 00000002 fca40000 00000000
+subd z eq - 7fcfffff ffffffff 7fd00000 00000002 fca40000 00000000
+subd n eq - ffcfffff ffffffff ffd00000 00000002 7ca40000 00000000
+subd m eq - ffcfffff ffffffff ffd00000 00000002 7ca40000 00000000
+subd p eq - ffcfffff ffffffff ffd00000 00000002 7ca40000 00000000
+subd z eq - ffcfffff ffffffff ffd00000 00000002 7ca40000 00000000
+subd n eq - 7fd00000 00000001 7fe00000 00000001 ffd00000 00000001
+subd m eq - 7fd00000 00000001 7fe00000 00000001 ffd00000 00000001
+subd p eq - 7fd00000 00000001 7fe00000 00000001 ffd00000 00000001
+subd z eq - 7fd00000 00000001 7fe00000 00000001 ffd00000 00000001
+subd n eq - ffd00000 00000001 ffe00000 00000001 7fd00000 00000001
+subd m eq - ffd00000 00000001 ffe00000 00000001 7fd00000 00000001
+subd p eq - ffd00000 00000001 ffe00000 00000001 7fd00000 00000001
+subd z eq - ffd00000 00000001 ffe00000 00000001 7fd00000 00000001
+subd n eq - 7fe00000 00000002 7fd00000 00000001 7fd00000 00000003
+subd m eq - 7fe00000 00000002 7fd00000 00000001 7fd00000 00000003
+subd p eq - 7fe00000 00000002 7fd00000 00000001 7fd00000 00000003
+subd z eq - 7fe00000 00000002 7fd00000 00000001 7fd00000 00000003
+subd n eq - ffe00000 00000002 ffd00000 00000001 ffd00000 00000003
+subd m eq - ffe00000 00000002 ffd00000 00000001 ffd00000 00000003
+subd p eq - ffe00000 00000002 ffd00000 00000001 ffd00000 00000003
+subd z eq - ffe00000 00000002 ffd00000 00000001 ffd00000 00000003
+subd n eq - 7fd00000 00000002 7fc00000 00000003 7fc00000 00000001
+subd m eq - 7fd00000 00000002 7fc00000 00000003 7fc00000 00000001
+subd p eq - 7fd00000 00000002 7fc00000 00000003 7fc00000 00000001
+subd z eq - 7fd00000 00000002 7fc00000 00000003 7fc00000 00000001
+subd n eq - ffd00000 00000002 ffc00000 00000003 ffc00000 00000001
+subd m eq - ffd00000 00000002 ffc00000 00000003 ffc00000 00000001
+subd p eq - ffd00000 00000002 ffc00000 00000003 ffc00000 00000001
+subd z eq - ffd00000 00000002 ffc00000 00000003 ffc00000 00000001
+subd n eq - 00200000 00000000 001fffff ffffffff 00000000 00000001
+subd m eq - 00200000 00000000 001fffff ffffffff 00000000 00000001
+subd p eq - 00200000 00000000 001fffff ffffffff 00000000 00000001
+subd z eq - 00200000 00000000 001fffff ffffffff 00000000 00000001
+subd n eq - 80200000 00000000 801fffff ffffffff 80000000 00000001
+subd m eq - 80200000 00000000 801fffff ffffffff 80000000 00000001
+subd p eq - 80200000 00000000 801fffff ffffffff 80000000 00000001
+subd z eq - 80200000 00000000 801fffff ffffffff 80000000 00000001
+subd n eq - 801fffff ffffffff 80200000 00000000 00000000 00000001
+subd m eq - 801fffff ffffffff 80200000 00000000 00000000 00000001
+subd p eq - 801fffff ffffffff 80200000 00000000 00000000 00000001
+subd z eq - 801fffff ffffffff 80200000 00000000 00000000 00000001
+subd n eq - 001fffff ffffffff 00200000 00000000 80000000 00000001
+subd m eq - 001fffff ffffffff 00200000 00000000 80000000 00000001
+subd p eq - 001fffff ffffffff 00200000 00000000 80000000 00000001
+subd z eq - 001fffff ffffffff 00200000 00000000 80000000 00000001
+subd n eq - 00200000 00000001 001fffff ffffffff 00000000 00000003
+subd m eq - 00200000 00000001 001fffff ffffffff 00000000 00000003
+subd p eq - 00200000 00000001 001fffff ffffffff 00000000 00000003
+subd z eq - 00200000 00000001 001fffff ffffffff 00000000 00000003
+subd n eq - 80200000 00000001 801fffff ffffffff 80000000 00000003
+subd m eq - 80200000 00000001 801fffff ffffffff 80000000 00000003
+subd p eq - 80200000 00000001 801fffff ffffffff 80000000 00000003
+subd z eq - 80200000 00000001 801fffff ffffffff 80000000 00000003
+subd n eq - 00300000 00000000 002fffff ffffffff 00000000 00000002
+subd m eq - 00300000 00000000 002fffff ffffffff 00000000 00000002
+subd p eq - 00300000 00000000 002fffff ffffffff 00000000 00000002
+subd z eq - 00300000 00000000 002fffff ffffffff 00000000 00000002
+subd n eq - 80300000 00000000 802fffff ffffffff 80000000 00000002
+subd m eq - 80300000 00000000 802fffff ffffffff 80000000 00000002
+subd p eq - 80300000 00000000 802fffff ffffffff 80000000 00000002
+subd z eq - 80300000 00000000 802fffff ffffffff 80000000 00000002
+subd n eq - 802fffff ffffffff 80300000 00000000 00000000 00000002
+subd m eq - 802fffff ffffffff 80300000 00000000 00000000 00000002
+subd p eq - 802fffff ffffffff 80300000 00000000 00000000 00000002
+subd z eq - 802fffff ffffffff 80300000 00000000 00000000 00000002
+subd n eq - 002fffff ffffffff 00300000 00000000 80000000 00000002
+subd m eq - 002fffff ffffffff 00300000 00000000 80000000 00000002
+subd p eq - 002fffff ffffffff 00300000 00000000 80000000 00000002
+subd z eq - 002fffff ffffffff 00300000 00000000 80000000 00000002
+subd n eq - 00300000 00000001 002fffff ffffffff 00000000 00000006
+subd m eq - 00300000 00000001 002fffff ffffffff 00000000 00000006
+subd p eq - 00300000 00000001 002fffff ffffffff 00000000 00000006
+subd z eq - 00300000 00000001 002fffff ffffffff 00000000 00000006
+subd n eq - 80300000 00000001 802fffff ffffffff 80000000 00000006
+subd m eq - 80300000 00000001 802fffff ffffffff 80000000 00000006
+subd p eq - 80300000 00000001 802fffff ffffffff 80000000 00000006
+subd z eq - 80300000 00000001 802fffff ffffffff 80000000 00000006
+subd n eq - 001fffff ffffffff 00200000 00000002 80000000 00000005
+subd m eq - 001fffff ffffffff 00200000 00000002 80000000 00000005
+subd p eq - 001fffff ffffffff 00200000 00000002 80000000 00000005
+subd z eq - 001fffff ffffffff 00200000 00000002 80000000 00000005
+subd n eq - 801fffff ffffffff 80200000 00000002 00000000 00000005
+subd m eq - 801fffff ffffffff 80200000 00000002 00000000 00000005
+subd p eq - 801fffff ffffffff 80200000 00000002 00000000 00000005
+subd z eq - 801fffff ffffffff 80200000 00000002 00000000 00000005
+subd n eq - 001fffff ffffffff 00200000 00000004 80000000 00000009
+subd m eq - 001fffff ffffffff 00200000 00000004 80000000 00000009
+subd p eq - 001fffff ffffffff 00200000 00000004 80000000 00000009
+subd z eq - 001fffff ffffffff 00200000 00000004 80000000 00000009
+subd n eq - 801fffff ffffffff 80200000 00000004 00000000 00000009
+subd m eq - 801fffff ffffffff 80200000 00000004 00000000 00000009
+subd p eq - 801fffff ffffffff 80200000 00000004 00000000 00000009
+subd z eq - 801fffff ffffffff 80200000 00000004 00000000 00000009
+subd n eq - 00200000 00000001 00100000 00000001 00100000 00000001
+subd m eq - 00200000 00000001 00100000 00000001 00100000 00000001
+subd p eq - 00200000 00000001 00100000 00000001 00100000 00000001
+subd z eq - 00200000 00000001 00100000 00000001 00100000 00000001
+subd n eq - 80200000 00000001 80100000 00000001 80100000 00000001
+subd m eq - 80200000 00000001 80100000 00000001 80100000 00000001
+subd p eq - 80200000 00000001 80100000 00000001 80100000 00000001
+subd z eq - 80200000 00000001 80100000 00000001 80100000 00000001
+subd n eq - 00200000 00000002 00100000 00000001 00100000 00000003
+subd m eq - 00200000 00000002 00100000 00000001 00100000 00000003
+subd p eq - 00200000 00000002 00100000 00000001 00100000 00000003
+subd z eq - 00200000 00000002 00100000 00000001 00100000 00000003
+subd n eq - 80200000 00000002 80100000 00000001 80100000 00000003
+subd m eq - 80200000 00000002 80100000 00000001 80100000 00000003
+subd p eq - 80200000 00000002 80100000 00000001 80100000 00000003
+subd z eq - 80200000 00000002 80100000 00000001 80100000 00000003
+subd n eq - 00300000 00000002 00200000 00000003 00200000 00000001
+subd m eq - 00300000 00000002 00200000 00000003 00200000 00000001
+subd p eq - 00300000 00000002 00200000 00000003 00200000 00000001
+subd z eq - 00300000 00000002 00200000 00000003 00200000 00000001
+subd n eq - 80300000 00000002 80200000 00000003 80200000 00000001
+subd m eq - 80300000 00000002 80200000 00000003 80200000 00000001
+subd p eq - 80300000 00000002 80200000 00000003 80200000 00000001
+subd z eq - 80300000 00000002 80200000 00000003 80200000 00000001
+subd n eq x 00000000 00000001 ffe00000 00000000 7fe00000 00000000
+subd z eq x 00000000 00000001 ffe00000 00000000 7fe00000 00000000
+subd m eq x 00000000 00000001 ffe00000 00000000 7fe00000 00000000
+subd p eq x 00000000 00000001 ffe00000 00000000 7fe00000 00000001
+subd n eq x 80000000 00000001 7fe00000 00000000 ffe00000 00000000
+subd z eq x 80000000 00000001 7fe00000 00000000 ffe00000 00000000
+subd p eq x 80000000 00000001 7fe00000 00000000 ffe00000 00000000
+subd m eq x 80000000 00000001 7fe00000 00000000 ffe00000 00000001
+subd n eq x 00000000 00000001 ffdfffff ffffffff 7fdfffff ffffffff
+subd z eq x 00000000 00000001 ffdfffff ffffffff 7fdfffff ffffffff
+subd m eq x 00000000 00000001 ffdfffff ffffffff 7fdfffff ffffffff
+subd p eq x 00000000 00000001 ffdfffff ffffffff 7fe00000 00000000
+subd n eq x 80000000 00000001 7fdfffff ffffffff ffdfffff ffffffff
+subd z eq x 80000000 00000001 7fdfffff ffffffff ffdfffff ffffffff
+subd p eq x 80000000 00000001 7fdfffff ffffffff ffdfffff ffffffff
+subd m eq x 80000000 00000001 7fdfffff ffffffff ffe00000 00000000
+subd n eq x 00000000 00000001 ffefffff ffffffff 7fefffff ffffffff
+subd z eq x 00000000 00000001 ffefffff ffffffff 7fefffff ffffffff
+subd m eq x 00000000 00000001 ffefffff ffffffff 7fefffff ffffffff
+subd p eq xo 00000000 00000001 ffefffff ffffffff 7ff00000 00000000
+subd n eq x 80000000 00000001 7fefffff ffffffff ffefffff ffffffff
+subd z eq x 80000000 00000001 7fefffff ffffffff ffefffff ffffffff
+subd p eq x 80000000 00000001 7fefffff ffffffff ffefffff ffffffff
+subd m eq xo 80000000 00000001 7fefffff ffffffff fff00000 00000000
+subd n eq x 00000000 00000001 ffefffff fffffffe 7fefffff fffffffe
+subd z eq x 00000000 00000001 ffefffff fffffffe 7fefffff fffffffe
+subd m eq x 00000000 00000001 ffefffff fffffffe 7fefffff fffffffe
+subd p eq x 00000000 00000001 ffefffff fffffffe 7fefffff ffffffff
+subd n eq x 80000000 00000001 7fefffff fffffffe ffefffff fffffffe
+subd z eq x 80000000 00000001 7fefffff fffffffe ffefffff fffffffe
+subd p eq x 80000000 00000001 7fefffff fffffffe ffefffff fffffffe
+subd m eq x 80000000 00000001 7fefffff fffffffe ffefffff ffffffff
+subd n eq x 00000000 00000001 bfefffff ffffffff 3fefffff ffffffff
+subd z eq x 00000000 00000001 bfefffff ffffffff 3fefffff ffffffff
+subd m eq x 00000000 00000001 bfefffff ffffffff 3fefffff ffffffff
+subd p eq x 00000000 00000001 bfefffff ffffffff 3ff00000 00000000
+subd n eq x 80000000 00000001 3fefffff ffffffff bfefffff ffffffff
+subd z eq x 80000000 00000001 3fefffff ffffffff bfefffff ffffffff
+subd p eq x 80000000 00000001 3fefffff ffffffff bfefffff ffffffff
+subd m eq x 80000000 00000001 3fefffff ffffffff bff00000 00000000
+subd n eq x 00000000 00000001 bfffffff ffffffff 3fffffff ffffffff
+subd z eq x 00000000 00000001 bfffffff ffffffff 3fffffff ffffffff
+subd m eq x 00000000 00000001 bfffffff ffffffff 3fffffff ffffffff
+subd p eq x 00000000 00000001 bfffffff ffffffff 40000000 00000000
+subd n eq x 80000000 00000001 3fffffff ffffffff bfffffff ffffffff
+subd z eq x 80000000 00000001 3fffffff ffffffff bfffffff ffffffff
+subd p eq x 80000000 00000001 3fffffff ffffffff bfffffff ffffffff
+subd m eq x 80000000 00000001 3fffffff ffffffff c0000000 00000000
+subd n eq x 00000000 00000001 bfffffff fffffffe 3fffffff fffffffe
+subd z eq x 00000000 00000001 bfffffff fffffffe 3fffffff fffffffe
+subd m eq x 00000000 00000001 bfffffff fffffffe 3fffffff fffffffe
+subd p eq x 00000000 00000001 bfffffff fffffffe 3fffffff ffffffff
+subd n eq x 80000000 00000001 3fffffff fffffffe bfffffff fffffffe
+subd z eq x 80000000 00000001 3fffffff fffffffe bfffffff fffffffe
+subd p eq x 80000000 00000001 3fffffff fffffffe bfffffff fffffffe
+subd m eq x 80000000 00000001 3fffffff fffffffe bfffffff ffffffff
+subd n eq x 7fefffff ffffffff 00000000 00000001 7fefffff ffffffff
+subd p eq x 7fefffff ffffffff 00000000 00000001 7fefffff ffffffff
+subd z eq x 7fefffff ffffffff 00000000 00000001 7fefffff fffffffe
+subd m eq x 7fefffff ffffffff 00000000 00000001 7fefffff fffffffe
+subd n eq x ffefffff ffffffff 80000000 00000001 ffefffff ffffffff
+subd m eq x ffefffff ffffffff 80000000 00000001 ffefffff ffffffff
+subd z eq x ffefffff ffffffff 80000000 00000001 ffefffff fffffffe
+subd p eq x ffefffff ffffffff 80000000 00000001 ffefffff fffffffe
+subd n eq x 80000000 00000003 ffe00000 00000000 7fe00000 00000000
+subd p eq x 80000000 00000003 ffe00000 00000000 7fe00000 00000000
+subd z eq x 80000000 00000003 ffe00000 00000000 7fdfffff ffffffff
+subd m eq x 80000000 00000003 ffe00000 00000000 7fdfffff ffffffff
+subd n eq x 00000000 00000003 7fe00000 00000000 ffe00000 00000000
+subd m eq x 00000000 00000003 7fe00000 00000000 ffe00000 00000000
+subd z eq x 00000000 00000003 7fe00000 00000000 ffdfffff ffffffff
+subd p eq x 00000000 00000003 7fe00000 00000000 ffdfffff ffffffff
+subd n eq x 3fefffff ffffffff 00000000 00000001 3fefffff ffffffff
+subd p eq x 3fefffff ffffffff 00000000 00000001 3fefffff ffffffff
+subd z eq x 3fefffff ffffffff 00000000 00000001 3fefffff fffffffe
+subd m eq x 3fefffff ffffffff 00000000 00000001 3fefffff fffffffe
+subd n eq x bfffffff ffffffff 80000000 00000001 bfffffff ffffffff
+subd m eq x bfffffff ffffffff 80000000 00000001 bfffffff ffffffff
+subd z eq x bfffffff ffffffff 80000000 00000001 bfffffff fffffffe
+subd p eq x bfffffff ffffffff 80000000 00000001 bfffffff fffffffe
+subd n eq x c0000000 00000000 40000000 00000001 c0100000 00000000
+subd z eq x c0000000 00000000 40000000 00000001 c0100000 00000000
+subd p eq x c0000000 00000000 40000000 00000001 c0100000 00000000
+subd m eq x c0000000 00000000 40000000 00000001 c0100000 00000001
+subd n eq x 40000000 00000000 c0000000 00000001 40100000 00000000
+subd z eq x 40000000 00000000 c0000000 00000001 40100000 00000000
+subd m eq x 40000000 00000000 c0000000 00000001 40100000 00000000
+subd p eq x 40000000 00000000 c0000000 00000001 40100000 00000001
+subd n eq x 3ff00000 00000000 bff00000 00000003 40000000 00000002
+subd p eq x 3ff00000 00000000 bff00000 00000003 40000000 00000002
+subd z eq x 3ff00000 00000000 bff00000 00000003 40000000 00000001
+subd m eq x 3ff00000 00000000 bff00000 00000003 40000000 00000001
+subd n eq x bff00000 00000000 3ff00000 00000003 c0000000 00000002
+subd m eq x bff00000 00000000 3ff00000 00000003 c0000000 00000002
+subd z eq x bff00000 00000000 3ff00000 00000003 c0000000 00000001
+subd p eq x bff00000 00000000 3ff00000 00000003 c0000000 00000001
+subd n eq x c0000000 00000001 40000000 00000002 c0100000 00000002
+subd m eq x c0000000 00000001 40000000 00000002 c0100000 00000002
+subd z eq x c0000000 00000001 40000000 00000002 c0100000 00000001
+subd p eq x c0000000 00000001 40000000 00000002 c0100000 00000001
+subd n eq x 40000000 00000001 c0000000 00000002 40100000 00000002
+subd p eq x 40000000 00000001 c0000000 00000002 40100000 00000002
+subd z eq x 40000000 00000001 c0000000 00000002 40100000 00000001
+subd m eq x 40000000 00000001 c0000000 00000002 40100000 00000001
+subd n eq xo 7fefffff fffffffe ffefffff ffffffff 7ff00000 00000000
+subd p eq xo 7fefffff fffffffe ffefffff ffffffff 7ff00000 00000000
+subd z eq xo 7fefffff fffffffe ffefffff ffffffff 7fefffff ffffffff
+subd m eq xo 7fefffff fffffffe ffefffff ffffffff 7fefffff ffffffff
+subd n eq xo ffefffff fffffffe 7fefffff ffffffff fff00000 00000000
+subd m eq xo ffefffff fffffffe 7fefffff ffffffff fff00000 00000000
+subd z eq xo ffefffff fffffffe 7fefffff ffffffff ffefffff ffffffff
+subd p eq xo ffefffff fffffffe 7fefffff ffffffff ffefffff ffffffff
+subd n eq xo 7fdfffff ffffffff ffe00000 00000000 7ff00000 00000000
+subd p eq xo 7fdfffff ffffffff ffe00000 00000000 7ff00000 00000000
+subd z eq x 7fdfffff ffffffff ffe00000 00000000 7fefffff ffffffff
+subd m eq x 7fdfffff ffffffff ffe00000 00000000 7fefffff ffffffff
+subd n eq xo ffdfffff ffffffff 7fe00000 00000000 fff00000 00000000
+subd m eq xo ffdfffff ffffffff 7fe00000 00000000 fff00000 00000000
+subd z eq x ffdfffff ffffffff 7fe00000 00000000 ffefffff ffffffff
+subd p eq x ffdfffff ffffffff 7fe00000 00000000 ffefffff ffffffff
+subd n eq xo 7fe00000 00000001 ffe00000 00000000 7ff00000 00000000
+subd p eq xo 7fe00000 00000001 ffe00000 00000000 7ff00000 00000000
+subd z eq xo 7fe00000 00000001 ffe00000 00000000 7fefffff ffffffff
+subd m eq xo 7fe00000 00000001 ffe00000 00000000 7fefffff ffffffff
+subd n eq xo ffe00000 00000001 7fe00000 00000000 fff00000 00000000
+subd m eq xo ffe00000 00000001 7fe00000 00000000 fff00000 00000000
+subd z eq xo ffe00000 00000001 7fe00000 00000000 ffefffff ffffffff
+subd p eq xo ffe00000 00000001 7fe00000 00000000 ffefffff ffffffff
+subd n eq x 7fd00000 00000001 ffd00000 00000000 7fe00000 00000000
+subd z eq x 7fd00000 00000001 ffd00000 00000000 7fe00000 00000000
+subd m eq x 7fd00000 00000001 ffd00000 00000000 7fe00000 00000000
+subd p eq x 7fd00000 00000001 ffd00000 00000000 7fe00000 00000001
+subd n eq x ffd00000 00000001 7fd00000 00000000 ffe00000 00000000
+subd z eq x ffd00000 00000001 7fd00000 00000000 ffe00000 00000000
+subd p eq x ffd00000 00000001 7fd00000 00000000 ffe00000 00000000
+subd m eq x ffd00000 00000001 7fd00000 00000000 ffe00000 00000001
+subd n eq x 7fdfffff fffffffe ffdfffff ffffffff 7fefffff fffffffe
+subd z eq x 7fdfffff fffffffe ffdfffff ffffffff 7fefffff fffffffe
+subd m eq x 7fdfffff fffffffe ffdfffff ffffffff 7fefffff fffffffe
+subd p eq x 7fdfffff fffffffe ffdfffff ffffffff 7fefffff ffffffff
+subd n eq x ffdfffff fffffffe 7fdfffff ffffffff ffefffff fffffffe
+subd z eq x ffdfffff fffffffe 7fdfffff ffffffff ffefffff fffffffe
+subd p eq x ffdfffff fffffffe 7fdfffff ffffffff ffefffff fffffffe
+subd m eq x ffdfffff fffffffe 7fdfffff ffffffff ffefffff ffffffff
+subd p eq x 40000000 00000000 bcb00000 00000000 40000000 00000001
+subd n eq x 40000000 00000000 bcb00000 00000000 40000000 00000000
+subd z eq x 40000000 00000000 bcb00000 00000000 40000000 00000000
+subd m eq x 40000000 00000000 bcb00000 00000000 40000000 00000000
+subd n eq x 40000000 00000001 bcb00000 00000000 40000000 00000002
+subd p eq x 40000000 00000001 bcb00000 00000000 40000000 00000002
+subd z eq x 40000000 00000001 bcb00000 00000000 40000000 00000001
+subd m eq x 40000000 00000001 bcb00000 00000000 40000000 00000001
+subd n eq x 400fffff ffffffff bcb00000 00000000 40100000 00000000
+subd p eq x 400fffff ffffffff bcb00000 00000000 40100000 00000000
+subd z eq x 400fffff ffffffff bcb00000 00000000 400fffff ffffffff
+subd m eq x 400fffff ffffffff bcb00000 00000000 400fffff ffffffff
+subd p eq x 400fffff ffffffff bcafffff ffffffff 40100000 00000000
+subd z eq x 400fffff ffffffff bcafffff ffffffff 400fffff ffffffff
+subd n eq x 400fffff ffffffff bcafffff ffffffff 400fffff ffffffff
+subd m eq x 400fffff ffffffff bcafffff ffffffff 400fffff ffffffff
+subd n eq x c00fffff ffffffff 3cb00000 00000000 c0100000 00000000
+subd m eq x c00fffff ffffffff 3cb00000 00000000 c0100000 00000000
+subd z eq x c00fffff ffffffff 3cb00000 00000000 c00fffff ffffffff
+subd p eq x c00fffff ffffffff 3cb00000 00000000 c00fffff ffffffff
+subd m eq x c00fffff ffffffff 3cafffff ffffffff c0100000 00000000
+subd z eq x c00fffff ffffffff 3cafffff ffffffff c00fffff ffffffff
+subd n eq x c00fffff ffffffff 3cafffff ffffffff c00fffff ffffffff
+subd p eq x c00fffff ffffffff 3cafffff ffffffff c00fffff ffffffff
+subd n uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+subd m uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+subd p uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+subd z uo - 7fffe000 00000000 80000000 00000000 7fffe000 00000000
+subd n uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+subd m uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+subd p uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+subd z uo - 00000000 00000000 7fffe000 00000000 7fffe000 00000000
+subd n uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+subd m uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+subd p uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+subd z uo - 80000000 00000000 7fffe000 00000000 7fffe000 00000000
+subd n uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd m uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd p uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd z uo - 3ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd n uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd m uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd p uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd z uo - bff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd n uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd m uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd p uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd z uo - 000fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd n uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd m uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd p uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd z uo - 800fffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd n uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+subd m uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+subd p uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+subd z uo - 7fffe000 00000000 000fffff ffffffff 7fffe000 00000000
+subd n uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+subd m uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+subd p uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+subd z uo - 7fffe000 00000000 800fffff ffffffff 7fffe000 00000000
+subd n uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+subd m uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+subd p uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+subd z uo - 7fffe000 00000000 00000000 00000001 7fffe000 00000000
+subd n uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+subd m uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+subd p uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+subd z uo - 7fffe000 00000000 80000000 00000001 7fffe000 00000000
+subd n uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+subd m uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+subd p uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+subd z uo - 00000000 00000001 7fffe000 00000000 7fffe000 00000000
+subd n uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+subd m uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+subd p uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+subd z uo - 80000000 00000001 7fffe000 00000000 7fffe000 00000000
+subd n uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+subd m uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+subd p uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+subd z uo - 7fffe000 00000000 7fefffff ffffffff 7fffe000 00000000
+subd n uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+subd m uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+subd p uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+subd z uo - 7fffe000 00000000 ffefffff ffffffff 7fffe000 00000000
+subd n uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd m uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd p uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd z uo - 7fefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd n uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd m uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd p uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd z uo - ffefffff ffffffff 7fffe000 00000000 7fffe000 00000000
+subd n uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+subd m uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+subd p uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+subd z uo - 7fffe000 00000000 7ff00000 00000000 7fffe000 00000000
+subd n uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+subd m uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+subd p uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+subd z uo - 7fffe000 00000000 fff00000 00000000 7fffe000 00000000
+subd n uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd m uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd p uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd z uo - 7ff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd n uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd m uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd p uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd z uo - fff00000 00000000 7fffe000 00000000 7fffe000 00000000
+subd n uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+subd m uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+subd p uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+subd z uo - 7fffe000 00000000 7fffe000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+subd m uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+subd p uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+subd z uo v 7ff02000 00000000 80000000 00000000 7fffe000 00000000
+subd n uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+subd m uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+subd p uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+subd z uo v 00000000 00000000 7ff02000 00000000 7fffe000 00000000
+subd n uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+subd m uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+subd p uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+subd z uo v 80000000 00000000 7ff02000 00000000 7fffe000 00000000
+subd n uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd m uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd p uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd z uo v 3ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd n uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd m uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd p uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd z uo v bff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd n uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd m uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd p uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd z uo v 000fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd n uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd m uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd p uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd z uo v 800fffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+subd m uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+subd p uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+subd z uo v 7ff02000 00000000 000fffff ffffffff 7fffe000 00000000
+subd n uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+subd m uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+subd p uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+subd z uo v 7ff02000 00000000 800fffff ffffffff 7fffe000 00000000
+subd n uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+subd m uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+subd p uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+subd z uo v 7ff02000 00000000 00000000 00000001 7fffe000 00000000
+subd n uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+subd m uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+subd p uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+subd z uo v 7ff02000 00000000 80000000 00000001 7fffe000 00000000
+subd n uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+subd m uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+subd p uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+subd z uo v 00000000 00000001 7ff02000 00000000 7fffe000 00000000
+subd n uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+subd m uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+subd p uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+subd z uo v 80000000 00000001 7ff02000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+subd m uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+subd p uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+subd z uo v 7ff02000 00000000 7fefffff ffffffff 7fffe000 00000000
+subd n uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+subd m uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+subd p uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+subd z uo v 7ff02000 00000000 ffefffff ffffffff 7fffe000 00000000
+subd n uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd m uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd p uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd z uo v 7fefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd n uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd m uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd p uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd z uo v ffefffff ffffffff 7ff02000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+subd m uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+subd p uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+subd z uo v 7ff02000 00000000 7ff00000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+subd m uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+subd p uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+subd z uo v 7ff02000 00000000 fff00000 00000000 7fffe000 00000000
+subd n uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd m uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd p uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd z uo v 7ff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd n uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd m uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd p uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd z uo v fff00000 00000000 7ff02000 00000000 7fffe000 00000000
+subd n uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+subd m uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+subd p uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+subd z uo v 7fffe000 00000000 7ff02000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+subd m uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+subd p uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+subd z uo v 7ff02000 00000000 7fffe000 00000000 7fffe000 00000000
+subd n uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+subd m uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+subd p uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+subd z uo v 7ff02000 00000000 7ff02000 00000000 7fffe000 00000000
+subd n eq - 41f00000 00004000 41f00000 00000000 3f900000 00000000
+subd m eq - 41f00000 00004000 41f00000 00000000 3f900000 00000000
+subd p eq - 41f00000 00004000 41f00000 00000000 3f900000 00000000
+subd z eq - 41f00000 00004000 41f00000 00000000 3f900000 00000000
+subd n eq x 43300000 00000000 3fe7ff00 00000000 432fffff ffffffff
+subd p eq x 43300000 00000000 3fe7ff00 00000000 432fffff ffffffff
+subd m eq x 43300000 00000000 3fe7ff00 00000000 432fffff fffffffe
+subd z eq x 43300000 00000000 3fe7ff00 00000000 432fffff fffffffe
diff --git a/verrou/unitTest/checkUCB-vecto/inputData/subs.input b/verrou/unitTest/checkUCB-vecto/inputData/subs.input
new file mode 100644
index 0000000000000000000000000000000000000000..c75f600d7028a7263c45eddc3852a6042c8712eb
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/inputData/subs.input
@@ -0,0 +1,1303 @@
+/* Copyright (C) 1988-1994 Sun Microsystems, Inc. 2550 Garcia Avenue */
+/* Mountain View, California  94043 All rights reserved. */
+/*  */
+/* Any person is hereby authorized to download, copy, use, create bug fixes,  */
+/* and distribute, subject to the following conditions: */
+/*  */
+/* 	1.  the software may not be redistributed for a fee except as */
+/* 	    reasonable to cover media costs; */
+/* 	2.  any copy of the software must include this notice, as well as  */
+/* 	    any other embedded copyright notices; and  */
+/* 	3.  any distribution of this software or derivative works thereof  */
+/* 	    must comply with all applicable U.S. export control laws. */
+/*  */
+/* THE SOFTWARE IS MADE AVAILABLE "AS IS" AND WITHOUT EXPRESS OR IMPLIED */
+/* WARRANTY OF ANY KIND, INCLUDING BUT NOT LIMITED TO THE IMPLIED */
+/* WARRANTIES OF DESIGN, MERCHANTIBILITY, FITNESS FOR A PARTICULAR */
+/* PURPOSE, NON-INFRINGEMENT, PERFORMANCE OR CONFORMANCE TO */
+/* SPECIFICATIONS.   */
+/*  */
+/* BY DOWNLOADING AND/OR USING THIS SOFTWARE, THE USER WAIVES ALL CLAIMS */
+/* AGAINST SUN MICROSYSTEMS, INC. AND ITS AFFILIATED COMPANIES IN ANY */
+/* JURISDICTION, INCLUDING BUT NOT LIMITED TO CLAIMS FOR DAMAGES OR */
+/* EQUITABLE RELIEF BASED ON LOSS OF DATA, AND SPECIFICALLY WAIVES EVEN */
+/* UNKNOWN OR UNANTICIPATED CLAIMS OR LOSSES, PRESENT AND FUTURE. */
+/*  */
+/* IN NO EVENT WILL SUN MICROSYSTEMS, INC. OR ANY OF ITS AFFILIATED */
+/* COMPANIES BE LIABLE FOR ANY LOST REVENUE OR PROFITS OR OTHER SPECIAL, */
+/* INDIRECT AND CONSEQUENTIAL DAMAGES, EVEN IF IT HAS BEEN ADVISED OF THE */
+/* POSSIBILITY OF SUCH DAMAGES. */
+/*  */
+/* This file is provided with no support and without any obligation on the */
+/* part of Sun Microsystems, Inc. ("Sun") or any of its affiliated */
+/* companies to assist in its use, correction, modification or */
+/* enhancement.  Nevertheless, and without creating any obligation on its */
+/* part, Sun welcomes your comments concerning the software and requests */
+/* that they be sent to fdlibm-comments@sunpro.sun.com. */
+
+subs p eq - 3f800000 bf800000 40000000
+subs z eq - 3f800000 bf800000 40000000
+subs n eq - 3f800000 c0000000 40400000
+subs m eq - 3f800000 c0000000 40400000
+subs p eq - 3f800000 c0000000 40400000
+subs z eq - 3f800000 c0000000 40400000
+subs n eq - 40000000 bf800000 40400000
+subs m eq - 40000000 bf800000 40400000
+subs p eq - 40000000 bf800000 40400000
+subs z eq - 40000000 bf800000 40400000
+subs n eq - 40000000 c0000000 40800000
+subs m eq - 40000000 c0000000 40800000
+subs p eq - 40000000 c0000000 40800000
+subs z eq - 40000000 c0000000 40800000
+subs n eq - 40000000 40000000 00000000
+subs z eq - 40000000 40000000 00000000
+subs p eq - 40000000 40000000 00000000
+subs m eq - 40000000 40000000 80000000
+subs n eq - 40a00000 40a00000 00000000
+subs z eq - 40a00000 40a00000 00000000
+subs p eq - 40a00000 40a00000 00000000
+subs m eq - 40a00000 40a00000 80000000
+subs n eq - 3f800000 c0e00000 41000000
+subs m eq - 3f800000 c0e00000 41000000
+subs p eq - 3f800000 c0e00000 41000000
+subs z eq - 3f800000 c0e00000 41000000
+subs n eq - 40a00000 3f800000 40800000
+subs m eq - 40a00000 3f800000 40800000
+subs p eq - 40a00000 3f800000 40800000
+subs z eq - 40a00000 3f800000 40800000
+subs n eq - 40000000 40a00000 c0400000
+subs m eq - 40000000 40a00000 c0400000
+subs p eq - 40000000 40a00000 c0400000
+subs z eq - 40000000 40a00000 c0400000
+subs n eq - 40a00000 00000000 40a00000
+subs m eq - 40a00000 00000000 40a00000
+subs p eq - 40a00000 00000000 40a00000
+subs z eq - 40a00000 00000000 40a00000
+subs n eq - 3f800000 00000000 3f800000
+subs m eq - 3f800000 00000000 3f800000
+subs p eq - 3f800000 00000000 3f800000
+subs z eq - 3f800000 00000000 3f800000
+subs n eq - bf800000 00000000 bf800000
+subs m eq - bf800000 00000000 bf800000
+subs p eq - bf800000 00000000 bf800000
+subs z eq - bf800000 00000000 bf800000
+subs n eq - 00000000 bf800000 3f800000
+subs m eq - 00000000 bf800000 3f800000
+subs p eq - 00000000 bf800000 3f800000
+subs z eq - 00000000 bf800000 3f800000
+subs n eq - 80000000 3f800000 bf800000
+subs m eq - 80000000 3f800000 bf800000
+subs p eq - 80000000 3f800000 bf800000
+subs z eq - 80000000 3f800000 bf800000
+subs n eq - 00000000 00000000 00000000
+subs z eq - 00000000 00000000 00000000
+subs p eq - 00000000 00000000 00000000
+subs m eq - 00000000 00000000 80000000
+subs n eq - 80000000 00000000 80000000
+subs m eq - 80000000 00000000 80000000
+subs p eq - 80000000 00000000 80000000
+subs z eq - 80000000 00000000 80000000
+subs n eq - 40400000 c0400000 40c00000
+subs m eq - 40400000 c0400000 40c00000
+subs p eq - 40400000 c0400000 40c00000
+subs z eq - 40400000 c0400000 40c00000
+subs n eq - 3f800000 3f800000 00000000
+subs z eq - 3f800000 3f800000 00000000
+subs p eq - 3f800000 3f800000 00000000
+subs n eq - c0400000 c0400000 00000000
+subs z eq - c0400000 c0400000 00000000
+subs p eq - c0400000 c0400000 00000000
+subs m eq - 3f800000 3f800000 80000000
+subs m eq - c0400000 c0400000 80000000
+subs n eq - 7f800000 00000000 7f800000
+subs m eq - 7f800000 00000000 7f800000
+subs p eq - 7f800000 00000000 7f800000
+subs z eq - 7f800000 00000000 7f800000
+subs n eq - ff800000 00000000 ff800000
+subs m eq - ff800000 00000000 ff800000
+subs p eq - ff800000 00000000 ff800000
+subs z eq - ff800000 00000000 ff800000
+subs n eq - ff000000 00000000 ff000000
+subs m eq - ff000000 00000000 ff000000
+subs p eq - ff000000 00000000 ff000000
+subs z eq - ff000000 00000000 ff000000
+subs n eq - 00000003 00000000 00000003
+subs m eq - 00000003 00000000 00000003
+subs p eq - 00000003 00000000 00000003
+subs z eq - 00000003 00000000 00000003
+subs n eq - 80000003 00000000 80000003
+subs m eq - 80000003 00000000 80000003
+subs p eq - 80000003 00000000 80000003
+subs z eq - 80000003 00000000 80000003
+subs n eq - 3f800001 3f800000 34000000
+subs m eq - 3f800001 3f800000 34000000
+subs p eq - 3f800001 3f800000 34000000
+subs z eq - 3f800001 3f800000 34000000
+subs n eq - bf800001 bf800000 b4000000
+subs m eq - bf800001 bf800000 b4000000
+subs p eq - bf800001 bf800000 b4000000
+subs z eq - bf800001 bf800000 b4000000
+subs n eq - bfffffff c0000000 34000000
+subs m eq - bfffffff c0000000 34000000
+subs p eq - bfffffff c0000000 34000000
+subs z eq - bfffffff c0000000 34000000
+subs n eq - 3fffffff 40000000 b4000000
+subs m eq - 3fffffff 40000000 b4000000
+subs p eq - 3fffffff 40000000 b4000000
+subs z eq - 3fffffff 40000000 b4000000
+subs n eq x 7f000000 bf800000 7f000000
+subs z eq x 7f000000 bf800000 7f000000
+subs m eq x 7f000000 bf800000 7f000000
+subs p eq x 7f000000 bf800000 7f000001
+subs n eq x ff000000 3f800000 ff000000
+subs z eq x ff000000 3f800000 ff000000
+subs p eq x ff000000 3f800000 ff000000
+subs m eq x ff000000 3f800000 ff000001
+subs n eq x 7effffff bf800000 7effffff
+subs z eq x 7effffff bf800000 7effffff
+subs m eq x 7effffff bf800000 7effffff
+subs p eq x 7effffff bf800000 7f000000
+subs n eq x feffffff 3f800000 feffffff
+subs z eq x feffffff 3f800000 feffffff
+subs p eq x feffffff 3f800000 feffffff
+subs m eq x feffffff 3f800000 ff000000
+subs n eq x 7f7fffff bf800000 7f7fffff
+subs z eq x 7f7fffff bf800000 7f7fffff
+subs m eq x 7f7fffff bf800000 7f7fffff
+subs p eq xo 7f7fffff bf800000 7f800000
+subs n eq x ff7fffff 3f800000 ff7fffff
+subs z eq x ff7fffff 3f800000 ff7fffff
+subs p eq x ff7fffff 3f800000 ff7fffff
+subs m eq xo ff7fffff 3f800000 ff800000
+subs n eq x 7f7ffffe bf800000 7f7ffffe
+subs z eq x 7f7ffffe bf800000 7f7ffffe
+subs m eq x 7f7ffffe bf800000 7f7ffffe
+subs p eq x 7f7ffffe bf800000 7f7fffff
+subs n eq x ff7ffffe 3f800000 ff7ffffe
+subs z eq x ff7ffffe 3f800000 ff7ffffe
+subs p eq x ff7ffffe 3f800000 ff7ffffe
+subs m eq x ff7ffffe 3f800000 ff7fffff
+subs n eq x 00000001 bf800000 3f800000
+subs z eq x 00000001 bf800000 3f800000
+subs m eq x 00000001 bf800000 3f800000
+subs p eq x 00000001 bf800000 3f800001
+subs n eq x 80000001 3f800000 bf800000
+subs z eq x 80000001 3f800000 bf800000
+subs p eq x 80000001 3f800000 bf800000
+subs m eq x 80000001 3f800000 bf800001
+subs n eq x 7f000000 3f800000 7f000000
+subs p eq x 7f000000 3f800000 7f000000
+subs z eq x 7f000000 3f800000 7effffff
+subs m eq x 7f000000 3f800000 7effffff
+subs n eq x ff000000 bf800000 ff000000
+subs m eq x ff000000 bf800000 ff000000
+subs z eq x ff000000 bf800000 feffffff
+subs p eq x ff000000 bf800000 feffffff
+subs n eq x 7effffff 3f800000 7effffff
+subs p eq x 7effffff 3f800000 7effffff
+subs z eq x 7effffff 3f800000 7efffffe
+subs m eq x 7effffff 3f800000 7efffffe
+subs n eq x feffffff bf800000 feffffff
+subs m eq x feffffff bf800000 feffffff
+subs z eq x feffffff bf800000 fefffffe
+subs p eq x feffffff bf800000 fefffffe
+subs n eq x 7f7fffff 3f800000 7f7fffff
+subs p eq x 7f7fffff 3f800000 7f7fffff
+subs z eq x 7f7fffff 3f800000 7f7ffffe
+subs m eq x 7f7fffff 3f800000 7f7ffffe
+subs n eq x ff7fffff bf800000 ff7fffff
+subs m eq x ff7fffff bf800000 ff7fffff
+subs z eq x ff7fffff bf800000 ff7ffffe
+subs p eq x ff7fffff bf800000 ff7ffffe
+subs n eq x 7f7ffffe 3f800000 7f7ffffe
+subs p eq x 7f7ffffe 3f800000 7f7ffffe
+subs z eq x 7f7ffffe 3f800000 7f7ffffd
+subs m eq x 7f7ffffe 3f800000 7f7ffffd
+subs n eq x ff7ffffe bf800000 ff7ffffe
+subs m eq x ff7ffffe bf800000 ff7ffffe
+subs z eq x ff7ffffe bf800000 ff7ffffd
+subs p eq x ff7ffffe bf800000 ff7ffffd
+subs n eq x 80000003 c0400000 40400000
+subs p eq x 80000003 c0400000 40400000
+subs z eq x 80000003 c0400000 403fffff
+subs m eq x 80000003 c0400000 403fffff
+subs n eq x 00000003 40a00000 c0a00000
+subs m eq x 00000003 40a00000 c0a00000
+subs z eq x 00000003 40a00000 c09fffff
+subs p eq x 00000003 40a00000 c09fffff
+subs n eq x 3f800001 bf800000 40000000
+subs z eq x 3f800001 bf800000 40000000
+subs m eq x 3f800001 bf800000 40000000
+subs p eq x 3f800001 bf800000 40000001
+subs n eq x bf800001 3f800000 c0000000
+subs z eq x bf800001 3f800000 c0000000
+subs p eq x bf800001 3f800000 c0000000
+subs m eq x bf800001 3f800000 c0000001
+subs n uo - 7fff0000 00000000 7fff0000
+subs m uo - 7fff0000 00000000 7fff0000
+subs p uo - 7fff0000 00000000 7fff0000
+subs z uo - 7fff0000 00000000 7fff0000
+subs n uo - 7fff0000 3f800000 7fff0000
+subs m uo - 7fff0000 3f800000 7fff0000
+subs p uo - 7fff0000 3f800000 7fff0000
+subs z uo - 7fff0000 3f800000 7fff0000
+subs n uo - 7fff0000 bf800000 7fff0000
+subs m uo - 7fff0000 bf800000 7fff0000
+subs p uo - 7fff0000 bf800000 7fff0000
+subs z uo - 7fff0000 bf800000 7fff0000
+subs n uo v 7f810000 00000000 7fff0000
+subs m uo v 7f810000 00000000 7fff0000
+subs p uo v 7f810000 00000000 7fff0000
+subs z uo v 7f810000 00000000 7fff0000
+subs n uo v 7f810000 3f800000 7fff0000
+subs m uo v 7f810000 3f800000 7fff0000
+subs p uo v 7f810000 3f800000 7fff0000
+subs z uo v 7f810000 3f800000 7fff0000
+subs n uo v 7f810000 bf800000 7fff0000
+subs m uo v 7f810000 bf800000 7fff0000
+subs p uo v 7f810000 bf800000 7fff0000
+subs z uo v 7f810000 bf800000 7fff0000
+subs p eq - 45800000 45800000 00000000
+subs n eq - 45800000 45800000 00000000
+subs z eq - 45800000 45800000 00000000
+subs p eq - 45000000 45000000 00000000
+subs n eq - 45000000 45000000 00000000
+subs z eq - 45000000 45000000 00000000
+subs m eq - 45800000 45800000 80000000
+subs m eq - 45000000 45000000 80000000
+subs n eq - 45000000 45800000 c5000000
+subs m eq - 45000000 45800000 c5000000
+subs p eq - 45000000 45800000 c5000000
+subs z eq - 45000000 45800000 c5000000
+subs p eq - c5800000 c5800000 00000000
+subs n eq - c5800000 c5800000 00000000
+subs z eq - c5800000 c5800000 00000000
+subs p eq - c5000000 c5000000 00000000
+subs n eq - c5000000 c5000000 00000000
+subs z eq - c5000000 c5000000 00000000
+subs m eq - c5800000 c5800000 80000000
+subs m eq - c5000000 c5000000 80000000
+subs n eq - c5000000 c5800000 45000000
+subs m eq - c5000000 c5800000 45000000
+subs p eq - c5000000 c5800000 45000000
+subs z eq - c5000000 c5800000 45000000
+subs n eq - 00000000 45800000 c5800000
+subs m eq - 00000000 45800000 c5800000
+subs p eq - 00000000 45800000 c5800000
+subs z eq - 00000000 45800000 c5800000
+subs n eq - 45800000 00000000 45800000
+subs m eq - 45800000 00000000 45800000
+subs p eq - 45800000 00000000 45800000
+subs z eq - 45800000 00000000 45800000
+subs n eq - c5800000 45800000 c6000000
+subs m eq - c5800000 45800000 c6000000
+subs p eq - c5800000 45800000 c6000000
+subs z eq - c5800000 45800000 c6000000
+subs n eq - 45800000 c5800000 46000000
+subs m eq - 45800000 c5800000 46000000
+subs p eq - 45800000 c5800000 46000000
+subs z eq - 45800000 c5800000 46000000
+subs n eq - 45800000 c5000000 45c00000
+subs m eq - 45800000 c5000000 45c00000
+subs p eq - 45800000 c5000000 45c00000
+subs z eq - 45800000 c5000000 45c00000
+subs n eq - c5000000 45800000 c5c00000
+subs m eq - c5000000 45800000 c5c00000
+subs p eq - c5000000 45800000 c5c00000
+subs z eq - c5000000 45800000 c5c00000
+subs n eq - c5800000 45000000 c5c00000
+subs m eq - c5800000 45000000 c5c00000
+subs p eq - c5800000 45000000 c5c00000
+subs z eq - c5800000 45000000 c5c00000
+subs n eq - 45000000 c5800000 45c00000
+subs m eq - 45000000 c5800000 45c00000
+subs p eq - 45000000 c5800000 45c00000
+subs z eq - 45000000 c5800000 45c00000
+subs n eq - 46fffe00 3f800000 46fffc00
+subs m eq - 46fffe00 3f800000 46fffc00
+subs p eq - 46fffe00 3f800000 46fffc00
+subs z eq - 46fffe00 3f800000 46fffc00
+subs n eq - 3f800000 46fffe00 c6fffc00
+subs m eq - 3f800000 46fffe00 c6fffc00
+subs p eq - 3f800000 46fffe00 c6fffc00
+subs z eq - 3f800000 46fffe00 c6fffc00
+subs n eq - 46fffe00 46fffa00 40000000
+subs m eq - 46fffe00 46fffa00 40000000
+subs p eq - 46fffe00 46fffa00 40000000
+subs z eq - 46fffe00 46fffa00 40000000
+subs n eq - 47000000 46fffe00 3f800000
+subs m eq - 47000000 46fffe00 3f800000
+subs p eq - 47000000 46fffe00 3f800000
+subs z eq - 47000000 46fffe00 3f800000
+subs n eq - c7000000 c6fffe00 bf800000
+subs m eq - c7000000 c6fffe00 bf800000
+subs p eq - c7000000 c6fffe00 bf800000
+subs z eq - c7000000 c6fffe00 bf800000
+subs n eq - 47000100 c6fffe00 47800000
+subs m eq - 47000100 c6fffe00 47800000
+subs p eq - 47000100 c6fffe00 47800000
+subs z eq - 47000100 c6fffe00 47800000
+subs n eq - 467ffc00 c67ffc00 46fffc00
+subs m eq - 467ffc00 c67ffc00 46fffc00
+subs p eq - 467ffc00 c67ffc00 46fffc00
+subs z eq - 467ffc00 c67ffc00 46fffc00
+subs n eq - c6fffe00 3f800000 c7000000
+subs m eq - c6fffe00 3f800000 c7000000
+subs p eq - c6fffe00 3f800000 c7000000
+subs z eq - c6fffe00 3f800000 c7000000
+subs n eq - 40a00000 80000000 40a00000
+subs m eq - 40a00000 80000000 40a00000
+subs p eq - 40a00000 80000000 40a00000
+subs z eq - 40a00000 80000000 40a00000
+subs n eq - 7f800000 ff800000 7f800000
+subs m eq - 7f800000 ff800000 7f800000
+subs p eq - 7f800000 ff800000 7f800000
+subs z eq - 7f800000 ff800000 7f800000
+subs n eq - ff800000 7f800000 ff800000
+subs m eq - ff800000 7f800000 ff800000
+subs p eq - ff800000 7f800000 ff800000
+subs z eq - ff800000 7f800000 ff800000
+subs n uo v ff800000 ff800000 7fff0000
+subs m uo v ff800000 ff800000 7fff0000
+subs p uo v ff800000 ff800000 7fff0000
+subs z uo v ff800000 ff800000 7fff0000
+subs n uo v 7f800000 7f800000 7fff0000
+subs m uo v 7f800000 7f800000 7fff0000
+subs p uo v 7f800000 7f800000 7fff0000
+subs z uo v 7f800000 7f800000 7fff0000
+subs n eq - 7f800000 ff000000 7f800000
+subs m eq - 7f800000 ff000000 7f800000
+subs p eq - 7f800000 ff000000 7f800000
+subs z eq - 7f800000 ff000000 7f800000
+subs n eq - 7f800000 7f000000 7f800000
+subs m eq - 7f800000 7f000000 7f800000
+subs p eq - 7f800000 7f000000 7f800000
+subs z eq - 7f800000 7f000000 7f800000
+subs n eq - ff800000 ff000000 ff800000
+subs m eq - ff800000 ff000000 ff800000
+subs p eq - ff800000 ff000000 ff800000
+subs z eq - ff800000 ff000000 ff800000
+subs n eq - ff800000 7f000000 ff800000
+subs m eq - ff800000 7f000000 ff800000
+subs p eq - ff800000 7f000000 ff800000
+subs z eq - ff800000 7f000000 ff800000
+subs n eq - 7f000000 ff800000 7f800000
+subs m eq - 7f000000 ff800000 7f800000
+subs p eq - 7f000000 ff800000 7f800000
+subs z eq - 7f000000 ff800000 7f800000
+subs n eq - 7f000000 7f800000 ff800000
+subs m eq - 7f000000 7f800000 ff800000
+subs p eq - 7f000000 7f800000 ff800000
+subs z eq - 7f000000 7f800000 ff800000
+subs n eq - ff000000 ff800000 7f800000
+subs m eq - ff000000 ff800000 7f800000
+subs p eq - ff000000 ff800000 7f800000
+subs z eq - ff000000 ff800000 7f800000
+subs n eq - ff000000 7f800000 ff800000
+subs m eq - ff000000 7f800000 ff800000
+subs p eq - ff000000 7f800000 ff800000
+subs z eq - ff000000 7f800000 ff800000
+subs n eq - 7f800000 80000000 7f800000
+subs m eq - 7f800000 80000000 7f800000
+subs p eq - 7f800000 80000000 7f800000
+subs z eq - 7f800000 80000000 7f800000
+subs n eq - ff800000 80000000 ff800000
+subs m eq - ff800000 80000000 ff800000
+subs p eq - ff800000 80000000 ff800000
+subs z eq - ff800000 80000000 ff800000
+subs n eq - 00000000 ff800000 7f800000
+subs m eq - 00000000 ff800000 7f800000
+subs p eq - 00000000 ff800000 7f800000
+subs z eq - 00000000 ff800000 7f800000
+subs n eq - 80000000 ff800000 7f800000
+subs m eq - 80000000 ff800000 7f800000
+subs p eq - 80000000 ff800000 7f800000
+subs z eq - 80000000 ff800000 7f800000
+subs n eq - 00000000 7f800000 ff800000
+subs m eq - 00000000 7f800000 ff800000
+subs p eq - 00000000 7f800000 ff800000
+subs z eq - 00000000 7f800000 ff800000
+subs n eq - 80000000 7f800000 ff800000
+subs m eq - 80000000 7f800000 ff800000
+subs p eq - 80000000 7f800000 ff800000
+subs z eq - 80000000 7f800000 ff800000
+subs n eq - 7f800000 807fffff 7f800000
+subs m eq - 7f800000 807fffff 7f800000
+subs p eq - 7f800000 807fffff 7f800000
+subs z eq - 7f800000 807fffff 7f800000
+subs n eq - ff800000 807fffff ff800000
+subs m eq - ff800000 807fffff ff800000
+subs p eq - ff800000 807fffff ff800000
+subs z eq - ff800000 807fffff ff800000
+subs n eq - 7f800000 007fffff 7f800000
+subs m eq - 7f800000 007fffff 7f800000
+subs p eq - 7f800000 007fffff 7f800000
+subs z eq - 7f800000 007fffff 7f800000
+subs n eq - ff800000 007fffff ff800000
+subs m eq - ff800000 007fffff ff800000
+subs p eq - ff800000 007fffff ff800000
+subs z eq - ff800000 007fffff ff800000
+subs n eq - 00000003 ff800000 7f800000
+subs m eq - 00000003 ff800000 7f800000
+subs p eq - 00000003 ff800000 7f800000
+subs z eq - 00000003 ff800000 7f800000
+subs n eq - 00000003 7f800000 ff800000
+subs m eq - 00000003 7f800000 ff800000
+subs p eq - 00000003 7f800000 ff800000
+subs z eq - 00000003 7f800000 ff800000
+subs n eq - 80000003 ff800000 7f800000
+subs m eq - 80000003 ff800000 7f800000
+subs p eq - 80000003 ff800000 7f800000
+subs z eq - 80000003 ff800000 7f800000
+subs n eq - 80000003 7f800000 ff800000
+subs m eq - 80000003 7f800000 ff800000
+subs p eq - 80000003 7f800000 ff800000
+subs z eq - 80000003 7f800000 ff800000
+subs n eq - 00000000 ff000000 7f000000
+subs m eq - 00000000 ff000000 7f000000
+subs p eq - 00000000 ff000000 7f000000
+subs z eq - 00000000 ff000000 7f000000
+subs n eq - 80000000 ff000000 7f000000
+subs m eq - 80000000 ff000000 7f000000
+subs p eq - 80000000 ff000000 7f000000
+subs z eq - 80000000 ff000000 7f000000
+subs n eq - ff000000 80000000 ff000000
+subs m eq - ff000000 80000000 ff000000
+subs p eq - ff000000 80000000 ff000000
+subs z eq - ff000000 80000000 ff000000
+subs n eq - 00000000 807fffff 007fffff
+subs m eq - 00000000 807fffff 007fffff
+subs p eq - 00000000 807fffff 007fffff
+subs z eq - 00000000 807fffff 007fffff
+subs n eq - 80000000 807fffff 007fffff
+subs m eq - 80000000 807fffff 007fffff
+subs p eq - 80000000 807fffff 007fffff
+subs z eq - 80000000 807fffff 007fffff
+subs n eq - 00000000 007fffff 807fffff
+subs m eq - 00000000 007fffff 807fffff
+subs p eq - 00000000 007fffff 807fffff
+subs z eq - 00000000 007fffff 807fffff
+subs n eq - 80000000 007fffff 807fffff
+subs m eq - 80000000 007fffff 807fffff
+subs p eq - 80000000 007fffff 807fffff
+subs z eq - 80000000 007fffff 807fffff
+subs n eq - 00000003 80000000 00000003
+subs m eq - 00000003 80000000 00000003
+subs p eq - 00000003 80000000 00000003
+subs z eq - 00000003 80000000 00000003
+subs n eq - 80000003 80000000 80000003
+subs m eq - 80000003 80000000 80000003
+subs p eq - 80000003 80000000 80000003
+subs z eq - 80000003 80000000 80000003
+subs n eq - 80000000 00800000 80800000
+subs m eq - 80000000 00800000 80800000
+subs p eq - 80000000 00800000 80800000
+subs z eq - 80000000 00800000 80800000
+subs n eq - 00800000 80000000 00800000
+subs m eq - 00800000 80000000 00800000
+subs p eq - 00800000 80000000 00800000
+subs z eq - 00800000 80000000 00800000
+subs n eq - 00000000 00800000 80800000
+subs m eq - 00000000 00800000 80800000
+subs p eq - 00000000 00800000 80800000
+subs z eq - 00000000 00800000 80800000
+subs n eq - 80800000 80000000 80800000
+subs m eq - 80800000 80000000 80800000
+subs p eq - 80800000 80000000 80800000
+subs z eq - 80800000 80000000 80800000
+subs n eq - 80000000 80000000 00000000
+subs z eq - 80000000 80000000 00000000
+subs p eq - 80000000 80000000 00000000
+subs m eq - 80000000 80000000 80000000
+subs n eq - 00000000 80000000 00000000
+subs m eq - 00000000 80000000 00000000
+subs p eq - 00000000 80000000 00000000
+subs z eq - 00000000 80000000 00000000
+subs n eq xo 7f000000 ff000000 7f800000
+subs p eq xo 7f000000 ff000000 7f800000
+subs z eq xo 7f000000 ff000000 7f7fffff
+subs m eq xo 7f000000 ff000000 7f7fffff
+subs n eq xo ff000000 7f000000 ff800000
+subs m eq xo ff000000 7f000000 ff800000
+subs z eq xo ff000000 7f000000 ff7fffff
+subs p eq xo ff000000 7f000000 ff7fffff
+subs n eq - 7efffffe fefffffe 7f7ffffe
+subs m eq - 7efffffe fefffffe 7f7ffffe
+subs p eq - 7efffffe fefffffe 7f7ffffe
+subs z eq - 7efffffe fefffffe 7f7ffffe
+subs n eq - fefffffe 7efffffe ff7ffffe
+subs m eq - fefffffe 7efffffe ff7ffffe
+subs p eq - fefffffe 7efffffe ff7ffffe
+subs z eq - fefffffe 7efffffe ff7ffffe
+subs n eq xo 7f7ffffe ff7ffffe 7f800000
+subs p eq xo 7f7ffffe ff7ffffe 7f800000
+subs z eq xo 7f7ffffe ff7ffffe 7f7fffff
+subs m eq xo 7f7ffffe ff7ffffe 7f7fffff
+subs n eq xo ff7ffffe 7f7ffffe ff800000
+subs m eq xo ff7ffffe 7f7ffffe ff800000
+subs z eq xo ff7ffffe 7f7ffffe ff7fffff
+subs p eq xo ff7ffffe 7f7ffffe ff7fffff
+subs n eq - 00800000 80800000 01000000
+subs m eq - 00800000 80800000 01000000
+subs p eq - 00800000 80800000 01000000
+subs z eq - 00800000 80800000 01000000
+subs n eq - 7e800000 fe800000 7f000000
+subs m eq - 7e800000 fe800000 7f000000
+subs p eq - 7e800000 fe800000 7f000000
+subs z eq - 7e800000 fe800000 7f000000
+subs n eq - 007fffff 807fffff 00fffffe
+subs m eq - 007fffff 807fffff 00fffffe
+subs p eq - 007fffff 807fffff 00fffffe
+subs z eq - 007fffff 807fffff 00fffffe
+subs n eq - 807fffff 007fffff 80fffffe
+subs m eq - 807fffff 007fffff 80fffffe
+subs p eq - 807fffff 007fffff 80fffffe
+subs z eq - 807fffff 007fffff 80fffffe
+subs n eq - 00000004 80000004 00000008
+subs m eq - 00000004 80000004 00000008
+subs p eq - 00000004 80000004 00000008
+subs z eq - 00000004 80000004 00000008
+subs n eq - 80000004 00000004 80000008
+subs m eq - 80000004 00000004 80000008
+subs p eq - 80000004 00000004 80000008
+subs z eq - 80000004 00000004 80000008
+subs n eq - 00000001 80000001 00000002
+subs m eq - 00000001 80000001 00000002
+subs p eq - 00000001 80000001 00000002
+subs z eq - 00000001 80000001 00000002
+subs n eq - 80000001 00000001 80000002
+subs m eq - 80000001 00000001 80000002
+subs p eq - 80000001 00000001 80000002
+subs z eq - 80000001 00000001 80000002
+subs n eq - 7f000000 7f000000 00000000
+subs z eq - 7f000000 7f000000 00000000
+subs p eq - 7f000000 7f000000 00000000
+subs n eq - fefffffe fefffffe 00000000
+subs z eq - fefffffe fefffffe 00000000
+subs p eq - fefffffe fefffffe 00000000
+subs n eq - 00800000 00800000 00000000
+subs z eq - 00800000 00800000 00000000
+subs p eq - 00800000 00800000 00000000
+subs n eq - 80800000 80800000 00000000
+subs z eq - 80800000 80800000 00000000
+subs p eq - 80800000 80800000 00000000
+subs n eq - 007ffffc 007ffffc 00000000
+subs z eq - 007ffffc 007ffffc 00000000
+subs p eq - 007ffffc 007ffffc 00000000
+subs n eq - 807fffff 807fffff 00000000
+subs z eq - 807fffff 807fffff 00000000
+subs p eq - 807fffff 807fffff 00000000
+subs n eq - 00000001 00000001 00000000
+subs z eq - 00000001 00000001 00000000
+subs p eq - 00000001 00000001 00000000
+subs n eq - 80000001 80000001 00000000
+subs z eq - 80000001 80000001 00000000
+subs p eq - 80000001 80000001 00000000
+subs n eq - 7f7fffff 7f7fffff 00000000
+subs z eq - 7f7fffff 7f7fffff 00000000
+subs p eq - 7f7fffff 7f7fffff 00000000
+subs m eq - 7f000000 7f000000 80000000
+subs m eq - fefffffe fefffffe 80000000
+subs m eq - 00800000 00800000 80000000
+subs m eq - 80800000 80800000 80000000
+subs m eq - 007ffffc 007ffffc 80000000
+subs m eq - 807fffff 807fffff 80000000
+subs m eq - 00000001 00000001 80000000
+subs m eq - 80000001 80000001 80000000
+subs m eq - 7f7fffff 7f7fffff 80000000
+subs n eq - 3f800001 3f800002 b4000000
+subs m eq - 3f800001 3f800002 b4000000
+subs p eq - 3f800001 3f800002 b4000000
+subs z eq - 3f800001 3f800002 b4000000
+subs n eq - bf800001 bf800002 34000000
+subs m eq - bf800001 bf800002 34000000
+subs p eq - bf800001 bf800002 34000000
+subs z eq - bf800001 bf800002 34000000
+subs n eq - 40000000 40000001 b4800000
+subs m eq - 40000000 40000001 b4800000
+subs p eq - 40000000 40000001 b4800000
+subs z eq - 40000000 40000001 b4800000
+subs n eq - c0000000 c0000001 34800000
+subs m eq - c0000000 c0000001 34800000
+subs p eq - c0000000 c0000001 34800000
+subs z eq - c0000000 c0000001 34800000
+subs n eq - 40000004 40000003 34800000
+subs m eq - 40000004 40000003 34800000
+subs p eq - 40000004 40000003 34800000
+subs z eq - 40000004 40000003 34800000
+subs n eq - c0000004 c0000003 b4800000
+subs m eq - c0000004 c0000003 b4800000
+subs p eq - c0000004 c0000003 b4800000
+subs z eq - c0000004 c0000003 b4800000
+subs n eq - 407fffff 407ffffe 34800000
+subs m eq - 407fffff 407ffffe 34800000
+subs p eq - 407fffff 407ffffe 34800000
+subs z eq - 407fffff 407ffffe 34800000
+subs n eq - c07fffff c07ffffe b4800000
+subs m eq - c07fffff c07ffffe b4800000
+subs p eq - c07fffff c07ffffe b4800000
+subs z eq - c07fffff c07ffffe b4800000
+subs n eq - 3ffffffc 3ffffffd b4000000
+subs m eq - 3ffffffc 3ffffffd b4000000
+subs p eq - 3ffffffc 3ffffffd b4000000
+subs z eq - 3ffffffc 3ffffffd b4000000
+subs n eq - bffffffc bffffffd 34000000
+subs m eq - bffffffc bffffffd 34000000
+subs p eq - bffffffc bffffffd 34000000
+subs z eq - bffffffc bffffffd 34000000
+subs n eq - 7f000001 7f000000 73800000
+subs m eq - 7f000001 7f000000 73800000
+subs p eq - 7f000001 7f000000 73800000
+subs z eq - 7f000001 7f000000 73800000
+subs n eq - ff000001 ff000000 f3800000
+subs m eq - ff000001 ff000000 f3800000
+subs p eq - ff000001 ff000000 f3800000
+subs z eq - ff000001 ff000000 f3800000
+subs n eq - 7f000001 7f000002 f3800000
+subs m eq - 7f000001 7f000002 f3800000
+subs p eq - 7f000001 7f000002 f3800000
+subs z eq - 7f000001 7f000002 f3800000
+subs n eq - ff000001 ff000002 73800000
+subs m eq - ff000001 ff000002 73800000
+subs p eq - ff000001 ff000002 73800000
+subs z eq - ff000001 ff000002 73800000
+subs n eq - 7e800000 7e800001 f3000000
+subs m eq - 7e800000 7e800001 f3000000
+subs p eq - 7e800000 7e800001 f3000000
+subs z eq - 7e800000 7e800001 f3000000
+subs n eq - fe800000 fe800001 73000000
+subs m eq - fe800000 fe800001 73000000
+subs p eq - fe800000 fe800001 73000000
+subs z eq - fe800000 fe800001 73000000
+subs n eq - 7e800004 7e800003 73000000
+subs m eq - 7e800004 7e800003 73000000
+subs p eq - 7e800004 7e800003 73000000
+subs z eq - 7e800004 7e800003 73000000
+subs n eq - fe800004 fe800003 f3000000
+subs m eq - fe800004 fe800003 f3000000
+subs p eq - fe800004 fe800003 f3000000
+subs z eq - fe800004 fe800003 f3000000
+subs n eq - 7e7fffff 7e7ffffe 72800000
+subs m eq - 7e7fffff 7e7ffffe 72800000
+subs p eq - 7e7fffff 7e7ffffe 72800000
+subs z eq - 7e7fffff 7e7ffffe 72800000
+subs n eq - fe7fffff fe7ffffe f2800000
+subs m eq - fe7fffff fe7ffffe f2800000
+subs p eq - fe7fffff fe7ffffe f2800000
+subs z eq - fe7fffff fe7ffffe f2800000
+subs n eq - ff7ffffe ff7fffff 73800000
+subs m eq - ff7ffffe ff7fffff 73800000
+subs p eq - ff7ffffe ff7fffff 73800000
+subs z eq - ff7ffffe ff7fffff 73800000
+subs n eq - 7f7ffffe 7f7fffff f3800000
+subs m eq - 7f7ffffe 7f7fffff f3800000
+subs p eq - 7f7ffffe 7f7fffff f3800000
+subs z eq - 7f7ffffe 7f7fffff f3800000
+subs n eq - 80800001 80800000 80000001
+subs m eq - 80800001 80800000 80000001
+subs p eq - 80800001 80800000 80000001
+subs z eq - 80800001 80800000 80000001
+subs n eq - 00800001 00800000 00000001
+subs m eq - 00800001 00800000 00000001
+subs p eq - 00800001 00800000 00000001
+subs z eq - 00800001 00800000 00000001
+subs n eq - 807fffff 80800000 00000001
+subs m eq - 807fffff 80800000 00000001
+subs p eq - 807fffff 80800000 00000001
+subs z eq - 807fffff 80800000 00000001
+subs n eq - 007fffff 00800000 80000001
+subs m eq - 007fffff 00800000 80000001
+subs p eq - 007fffff 00800000 80000001
+subs z eq - 007fffff 00800000 80000001
+subs n eq - 00800001 00800002 80000001
+subs m eq - 00800001 00800002 80000001
+subs p eq - 00800001 00800002 80000001
+subs z eq - 00800001 00800002 80000001
+subs n eq - 80800001 80800002 00000001
+subs m eq - 80800001 80800002 00000001
+subs p eq - 80800001 80800002 00000001
+subs z eq - 80800001 80800002 00000001
+subs n eq - 007fffff 007ffffe 00000001
+subs m eq - 007fffff 007ffffe 00000001
+subs p eq - 007fffff 007ffffe 00000001
+subs z eq - 007fffff 007ffffe 00000001
+subs n eq - 807fffff 807ffffe 80000001
+subs m eq - 807fffff 807ffffe 80000001
+subs p eq - 807fffff 807ffffe 80000001
+subs z eq - 807fffff 807ffffe 80000001
+subs n eq - 007ffffd 007ffffe 80000001
+subs m eq - 007ffffd 007ffffe 80000001
+subs p eq - 007ffffd 007ffffe 80000001
+subs z eq - 007ffffd 007ffffe 80000001
+subs n eq - 807ffffd 807ffffe 00000001
+subs m eq - 807ffffd 807ffffe 00000001
+subs p eq - 807ffffd 807ffffe 00000001
+subs z eq - 807ffffd 807ffffe 00000001
+subs n eq - 00000002 00000001 00000001
+subs m eq - 00000002 00000001 00000001
+subs p eq - 00000002 00000001 00000001
+subs z eq - 00000002 00000001 00000001
+subs n eq - 80000002 80000001 80000001
+subs m eq - 80000002 80000001 80000001
+subs p eq - 80000002 80000001 80000001
+subs z eq - 80000002 80000001 80000001
+subs n eq - 00000003 00000002 00000001
+subs m eq - 00000003 00000002 00000001
+subs p eq - 00000003 00000002 00000001
+subs z eq - 00000003 00000002 00000001
+subs n eq - 80000003 80000002 80000001
+subs m eq - 80000003 80000002 80000001
+subs p eq - 80000003 80000002 80000001
+subs z eq - 80000003 80000002 80000001
+subs n eq - 40000000 3fffffff 34000000
+subs m eq - 40000000 3fffffff 34000000
+subs p eq - 40000000 3fffffff 34000000
+subs z eq - 40000000 3fffffff 34000000
+subs n eq - c0000000 bfffffff b4000000
+subs m eq - c0000000 bfffffff b4000000
+subs p eq - c0000000 bfffffff b4000000
+subs z eq - c0000000 bfffffff b4000000
+subs n eq - 40800001 407fffff 35400000
+subs m eq - 40800001 407fffff 35400000
+subs p eq - 40800001 407fffff 35400000
+subs z eq - 40800001 407fffff 35400000
+subs n eq - c0800001 c07fffff b5400000
+subs m eq - c0800001 c07fffff b5400000
+subs p eq - c0800001 c07fffff b5400000
+subs z eq - c0800001 c07fffff b5400000
+subs n eq - 407fffff 40800002 b5a00000
+subs m eq - 407fffff 40800002 b5a00000
+subs p eq - 407fffff 40800002 b5a00000
+subs z eq - 407fffff 40800002 b5a00000
+subs n eq - c07fffff c0800002 35a00000
+subs m eq - c07fffff c0800002 35a00000
+subs p eq - c07fffff c0800002 35a00000
+subs z eq - c07fffff c0800002 35a00000
+subs n eq - 40000001 3f800001 3f800001
+subs m eq - 40000001 3f800001 3f800001
+subs p eq - 40000001 3f800001 3f800001
+subs z eq - 40000001 3f800001 3f800001
+subs n eq - c0000001 bf800001 bf800001
+subs m eq - c0000001 bf800001 bf800001
+subs p eq - c0000001 bf800001 bf800001
+subs z eq - c0000001 bf800001 bf800001
+subs n eq - 40000002 3f800001 3f800003
+subs m eq - 40000002 3f800001 3f800003
+subs p eq - 40000002 3f800001 3f800003
+subs z eq - 40000002 3f800001 3f800003
+subs n eq - c0000002 bf800001 bf800003
+subs m eq - c0000002 bf800001 bf800003
+subs p eq - c0000002 bf800001 bf800003
+subs z eq - c0000002 bf800001 bf800003
+subs n eq - 40000002 3f800003 3f800001
+subs m eq - 40000002 3f800003 3f800001
+subs p eq - 40000002 3f800003 3f800001
+subs z eq - 40000002 3f800003 3f800001
+subs n eq - c0000002 bf800003 bf800001
+subs m eq - c0000002 bf800003 bf800001
+subs p eq - c0000002 bf800003 bf800001
+subs z eq - c0000002 bf800003 bf800001
+subs n eq - 7e800000 7e7fffff 72800000
+subs m eq - 7e800000 7e7fffff 72800000
+subs p eq - 7e800000 7e7fffff 72800000
+subs z eq - 7e800000 7e7fffff 72800000
+subs n eq - fe800000 fe7fffff f2800000
+subs m eq - fe800000 fe7fffff f2800000
+subs p eq - fe800000 fe7fffff f2800000
+subs z eq - fe800000 fe7fffff f2800000
+subs n eq - feffffff ff000000 73000000
+subs m eq - feffffff ff000000 73000000
+subs p eq - feffffff ff000000 73000000
+subs z eq - feffffff ff000000 73000000
+subs n eq - 7effffff 7f000000 f3000000
+subs m eq - 7effffff 7f000000 f3000000
+subs p eq - 7effffff 7f000000 f3000000
+subs z eq - 7effffff 7f000000 f3000000
+subs n eq - 7d800001 7d7fffff 72400000
+subs m eq - 7d800001 7d7fffff 72400000
+subs p eq - 7d800001 7d7fffff 72400000
+subs z eq - 7d800001 7d7fffff 72400000
+subs n eq - fd800001 fd7fffff f2400000
+subs m eq - fd800001 fd7fffff f2400000
+subs p eq - fd800001 fd7fffff f2400000
+subs z eq - fd800001 fd7fffff f2400000
+subs n eq - 7e7fffff 7e800002 f3a00000
+subs m eq - 7e7fffff 7e800002 f3a00000
+subs p eq - 7e7fffff 7e800002 f3a00000
+subs z eq - 7e7fffff 7e800002 f3a00000
+subs n eq - fe7fffff fe800002 73a00000
+subs m eq - fe7fffff fe800002 73a00000
+subs p eq - fe7fffff fe800002 73a00000
+subs z eq - fe7fffff fe800002 73a00000
+subs n eq - 7e800001 7f000001 fe800001
+subs m eq - 7e800001 7f000001 fe800001
+subs p eq - 7e800001 7f000001 fe800001
+subs z eq - 7e800001 7f000001 fe800001
+subs n eq - fe800001 ff000001 7e800001
+subs m eq - fe800001 ff000001 7e800001
+subs p eq - fe800001 ff000001 7e800001
+subs z eq - fe800001 ff000001 7e800001
+subs n eq - 7f000002 7e800001 7e800003
+subs m eq - 7f000002 7e800001 7e800003
+subs p eq - 7f000002 7e800001 7e800003
+subs z eq - 7f000002 7e800001 7e800003
+subs n eq - ff000002 fe800001 fe800003
+subs m eq - ff000002 fe800001 fe800003
+subs p eq - ff000002 fe800001 fe800003
+subs z eq - ff000002 fe800001 fe800003
+subs n eq - 7e800002 7e000003 7e000001
+subs m eq - 7e800002 7e000003 7e000001
+subs p eq - 7e800002 7e000003 7e000001
+subs z eq - 7e800002 7e000003 7e000001
+subs n eq - fe800002 fe000003 fe000001
+subs m eq - fe800002 fe000003 fe000001
+subs p eq - fe800002 fe000003 fe000001
+subs z eq - fe800002 fe000003 fe000001
+subs n eq - 01000000 00ffffff 00000001
+subs m eq - 01000000 00ffffff 00000001
+subs p eq - 01000000 00ffffff 00000001
+subs z eq - 01000000 00ffffff 00000001
+subs n eq - 81000000 80ffffff 80000001
+subs m eq - 81000000 80ffffff 80000001
+subs p eq - 81000000 80ffffff 80000001
+subs z eq - 81000000 80ffffff 80000001
+subs n eq - 80ffffff 81000000 00000001
+subs m eq - 80ffffff 81000000 00000001
+subs p eq - 80ffffff 81000000 00000001
+subs z eq - 80ffffff 81000000 00000001
+subs n eq - 00ffffff 01000000 80000001
+subs m eq - 00ffffff 01000000 80000001
+subs p eq - 00ffffff 01000000 80000001
+subs z eq - 00ffffff 01000000 80000001
+subs n eq - 01000001 00ffffff 00000003
+subs m eq - 01000001 00ffffff 00000003
+subs p eq - 01000001 00ffffff 00000003
+subs z eq - 01000001 00ffffff 00000003
+subs n eq - 81000001 80ffffff 80000003
+subs m eq - 81000001 80ffffff 80000003
+subs p eq - 81000001 80ffffff 80000003
+subs z eq - 81000001 80ffffff 80000003
+subs n eq - 01800000 017fffff 00000002
+subs m eq - 01800000 017fffff 00000002
+subs p eq - 01800000 017fffff 00000002
+subs z eq - 01800000 017fffff 00000002
+subs n eq - 81800000 817fffff 80000002
+subs m eq - 81800000 817fffff 80000002
+subs p eq - 81800000 817fffff 80000002
+subs z eq - 81800000 817fffff 80000002
+subs n eq - 817fffff 81800000 00000002
+subs m eq - 817fffff 81800000 00000002
+subs p eq - 817fffff 81800000 00000002
+subs z eq - 817fffff 81800000 00000002
+subs n eq - 017fffff 01800000 80000002
+subs m eq - 017fffff 01800000 80000002
+subs p eq - 017fffff 01800000 80000002
+subs z eq - 017fffff 01800000 80000002
+subs n eq - 01800001 017fffff 00000006
+subs m eq - 01800001 017fffff 00000006
+subs p eq - 01800001 017fffff 00000006
+subs z eq - 01800001 017fffff 00000006
+subs n eq - 81800001 817fffff 80000006
+subs m eq - 81800001 817fffff 80000006
+subs p eq - 81800001 817fffff 80000006
+subs z eq - 81800001 817fffff 80000006
+subs n eq - 00ffffff 01000002 80000005
+subs m eq - 00ffffff 01000002 80000005
+subs p eq - 00ffffff 01000002 80000005
+subs z eq - 00ffffff 01000002 80000005
+subs n eq - 80ffffff 81000002 00000005
+subs m eq - 80ffffff 81000002 00000005
+subs p eq - 80ffffff 81000002 00000005
+subs z eq - 80ffffff 81000002 00000005
+subs n eq - 00ffffff 01000004 80000009
+subs m eq - 00ffffff 01000004 80000009
+subs p eq - 00ffffff 01000004 80000009
+subs z eq - 00ffffff 01000004 80000009
+subs n eq - 80ffffff 81000004 00000009
+subs m eq - 80ffffff 81000004 00000009
+subs p eq - 80ffffff 81000004 00000009
+subs z eq - 80ffffff 81000004 00000009
+subs n eq - 01000001 00800001 00800001
+subs m eq - 01000001 00800001 00800001
+subs p eq - 01000001 00800001 00800001
+subs z eq - 01000001 00800001 00800001
+subs n eq - 81000001 80800001 80800001
+subs m eq - 81000001 80800001 80800001
+subs p eq - 81000001 80800001 80800001
+subs z eq - 81000001 80800001 80800001
+subs n eq - 01000002 00800001 00800003
+subs m eq - 01000002 00800001 00800003
+subs p eq - 01000002 00800001 00800003
+subs z eq - 01000002 00800001 00800003
+subs n eq - 81000002 80800001 80800003
+subs m eq - 81000002 80800001 80800003
+subs p eq - 81000002 80800001 80800003
+subs z eq - 81000002 80800001 80800003
+subs n eq - 01800002 01000003 01000001
+subs m eq - 01800002 01000003 01000001
+subs p eq - 01800002 01000003 01000001
+subs z eq - 01800002 01000003 01000001
+subs n eq - 81800002 81000003 81000001
+subs m eq - 81800002 81000003 81000001
+subs p eq - 81800002 81000003 81000001
+subs z eq - 81800002 81000003 81000001
+subs n eq x 00000001 ff000000 7f000000
+subs z eq x 00000001 ff000000 7f000000
+subs m eq x 00000001 ff000000 7f000000
+subs p eq x 00000001 ff000000 7f000001
+subs n eq x 80000001 7f000000 ff000000
+subs z eq x 80000001 7f000000 ff000000
+subs p eq x 80000001 7f000000 ff000000
+subs m eq x 80000001 7f000000 ff000001
+subs n eq x 00000001 feffffff 7effffff
+subs z eq x 00000001 feffffff 7effffff
+subs m eq x 00000001 feffffff 7effffff
+subs p eq x 00000001 feffffff 7f000000
+subs n eq x 80000001 7effffff feffffff
+subs z eq x 80000001 7effffff feffffff
+subs p eq x 80000001 7effffff feffffff
+subs m eq x 80000001 7effffff ff000000
+subs n eq x 00000001 ff7fffff 7f7fffff
+subs z eq x 00000001 ff7fffff 7f7fffff
+subs m eq x 00000001 ff7fffff 7f7fffff
+subs p eq xo 00000001 ff7fffff 7f800000
+subs n eq x 80000001 7f7fffff ff7fffff
+subs z eq x 80000001 7f7fffff ff7fffff
+subs p eq x 80000001 7f7fffff ff7fffff
+subs m eq xo 80000001 7f7fffff ff800000
+subs n eq x 00000001 ff7ffffe 7f7ffffe
+subs z eq x 00000001 ff7ffffe 7f7ffffe
+subs m eq x 00000001 ff7ffffe 7f7ffffe
+subs p eq x 00000001 ff7ffffe 7f7fffff
+subs n eq x 80000001 7f7ffffe ff7ffffe
+subs z eq x 80000001 7f7ffffe ff7ffffe
+subs p eq x 80000001 7f7ffffe ff7ffffe
+subs m eq x 80000001 7f7ffffe ff7fffff
+subs n eq x 00000001 bf7fffff 3f7fffff
+subs z eq x 00000001 bf7fffff 3f7fffff
+subs m eq x 00000001 bf7fffff 3f7fffff
+subs p eq x 00000001 bf7fffff 3f800000
+subs n eq x 80000001 3f7fffff bf7fffff
+subs z eq x 80000001 3f7fffff bf7fffff
+subs p eq x 80000001 3f7fffff bf7fffff
+subs m eq x 80000001 3f7fffff bf800000
+subs n eq x 00000001 bfffffff 3fffffff
+subs z eq x 00000001 bfffffff 3fffffff
+subs m eq x 00000001 bfffffff 3fffffff
+subs p eq x 00000001 bfffffff 40000000
+subs n eq x 80000001 3fffffff bfffffff
+subs z eq x 80000001 3fffffff bfffffff
+subs p eq x 80000001 3fffffff bfffffff
+subs m eq x 80000001 3fffffff c0000000
+subs n eq x 00000001 bffffffe 3ffffffe
+subs z eq x 00000001 bffffffe 3ffffffe
+subs m eq x 00000001 bffffffe 3ffffffe
+subs p eq x 00000001 bffffffe 3fffffff
+subs n eq x 80000001 3ffffffe bffffffe
+subs z eq x 80000001 3ffffffe bffffffe
+subs p eq x 80000001 3ffffffe bffffffe
+subs m eq x 80000001 3ffffffe bfffffff
+subs n eq x 7f7fffff 00000001 7f7fffff
+subs p eq x 7f7fffff 00000001 7f7fffff
+subs z eq x 7f7fffff 00000001 7f7ffffe
+subs m eq x 7f7fffff 00000001 7f7ffffe
+subs n eq x ff7fffff 80000001 ff7fffff
+subs m eq x ff7fffff 80000001 ff7fffff
+subs z eq x ff7fffff 80000001 ff7ffffe
+subs p eq x ff7fffff 80000001 ff7ffffe
+subs n eq x 80000003 ff000000 7f000000
+subs p eq x 80000003 ff000000 7f000000
+subs z eq x 80000003 ff000000 7effffff
+subs m eq x 80000003 ff000000 7effffff
+subs n eq x 00000003 7f000000 ff000000
+subs m eq x 00000003 7f000000 ff000000
+subs z eq x 00000003 7f000000 feffffff
+subs p eq x 00000003 7f000000 feffffff
+subs n eq x 3f7fffff 00000001 3f7fffff
+subs p eq x 3f7fffff 00000001 3f7fffff
+subs z eq x 3f7fffff 00000001 3f7ffffe
+subs m eq x 3f7fffff 00000001 3f7ffffe
+subs n eq x bfffffff 80000001 bfffffff
+subs m eq x bfffffff 80000001 bfffffff
+subs z eq x bfffffff 80000001 bffffffe
+subs p eq x bfffffff 80000001 bffffffe
+subs n eq x c0000000 40000001 c0800000
+subs z eq x c0000000 40000001 c0800000
+subs p eq x c0000000 40000001 c0800000
+subs m eq x c0000000 40000001 c0800001
+subs n eq x 40000000 c0000001 40800000
+subs z eq x 40000000 c0000001 40800000
+subs m eq x 40000000 c0000001 40800000
+subs p eq x 40000000 c0000001 40800001
+subs n eq x 3f800000 bf800003 40000002
+subs p eq x 3f800000 bf800003 40000002
+subs z eq x 3f800000 bf800003 40000001
+subs m eq x 3f800000 bf800003 40000001
+subs n eq x bf800000 3f800003 c0000002
+subs m eq x bf800000 3f800003 c0000002
+subs z eq x bf800000 3f800003 c0000001
+subs p eq x bf800000 3f800003 c0000001
+subs n eq x c0000001 40000002 c0800002
+subs m eq x c0000001 40000002 c0800002
+subs z eq x c0000001 40000002 c0800001
+subs p eq x c0000001 40000002 c0800001
+subs n eq x 40000001 c0000002 40800002
+subs p eq x 40000001 c0000002 40800002
+subs z eq x 40000001 c0000002 40800001
+subs m eq x 40000001 c0000002 40800001
+subs n eq xo 7f7ffffe ff7fffff 7f800000
+subs p eq xo 7f7ffffe ff7fffff 7f800000
+subs z eq xo 7f7ffffe ff7fffff 7f7fffff
+subs m eq xo 7f7ffffe ff7fffff 7f7fffff
+subs n eq xo ff7ffffe 7f7fffff ff800000
+subs m eq xo ff7ffffe 7f7fffff ff800000
+subs z eq xo ff7ffffe 7f7fffff ff7fffff
+subs p eq xo ff7ffffe 7f7fffff ff7fffff
+subs n eq xo 7effffff ff000000 7f800000
+subs p eq xo 7effffff ff000000 7f800000
+subs z eq x 7effffff ff000000 7f7fffff
+subs m eq x 7effffff ff000000 7f7fffff
+subs n eq xo feffffff 7f000000 ff800000
+subs m eq xo feffffff 7f000000 ff800000
+subs z eq x feffffff 7f000000 ff7fffff
+subs p eq x feffffff 7f000000 ff7fffff
+subs n eq xo 7f000001 ff000000 7f800000
+subs p eq xo 7f000001 ff000000 7f800000
+subs z eq xo 7f000001 ff000000 7f7fffff
+subs m eq xo 7f000001 ff000000 7f7fffff
+subs n eq xo ff000001 7f000000 ff800000
+subs m eq xo ff000001 7f000000 ff800000
+subs z eq xo ff000001 7f000000 ff7fffff
+subs p eq xo ff000001 7f000000 ff7fffff
+subs n eq x 7e800001 fe800000 7f000000
+subs z eq x 7e800001 fe800000 7f000000
+subs m eq x 7e800001 fe800000 7f000000
+subs p eq x 7e800001 fe800000 7f000001
+subs n eq x fe800001 7e800000 ff000000
+subs z eq x fe800001 7e800000 ff000000
+subs p eq x fe800001 7e800000 ff000000
+subs m eq x fe800001 7e800000 ff000001
+subs n eq x 7efffffe feffffff 7f7ffffe
+subs z eq x 7efffffe feffffff 7f7ffffe
+subs m eq x 7efffffe feffffff 7f7ffffe
+subs p eq x 7efffffe feffffff 7f7fffff
+subs n eq x fefffffe 7effffff ff7ffffe
+subs z eq x fefffffe 7effffff ff7ffffe
+subs p eq x fefffffe 7effffff ff7ffffe
+subs m eq x fefffffe 7effffff ff7fffff
+subs p eq x 40000000 b4000000 40000001
+subs n eq x 40000000 b4000000 40000000
+subs z eq x 40000000 b4000000 40000000
+subs m eq x 40000000 b4000000 40000000
+subs n eq x 40000001 b4000000 40000002
+subs p eq x 40000001 b4000000 40000002
+subs z eq x 40000001 b4000000 40000001
+subs m eq x 40000001 b4000000 40000001
+subs n eq x 407fffff b4000000 40800000
+subs p eq x 407fffff b4000000 40800000
+subs z eq x 407fffff b4000000 407fffff
+subs m eq x 407fffff b4000000 407fffff
+subs p eq x 407fffff b3ffffff 40800000
+subs z eq x 407fffff b3ffffff 407fffff
+subs n eq x 407fffff b3ffffff 407fffff
+subs m eq x 407fffff b3ffffff 407fffff
+subs n eq x c07fffff 34000000 c0800000
+subs m eq x c07fffff 34000000 c0800000
+subs z eq x c07fffff 34000000 c07fffff
+subs p eq x c07fffff 34000000 c07fffff
+subs m eq x c07fffff 33ffffff c0800000
+subs z eq x c07fffff 33ffffff c07fffff
+subs n eq x c07fffff 33ffffff c07fffff
+subs p eq x c07fffff 33ffffff c07fffff
+subs n uo - 7fff0000 80000000 7fff0000
+subs m uo - 7fff0000 80000000 7fff0000
+subs p uo - 7fff0000 80000000 7fff0000
+subs z uo - 7fff0000 80000000 7fff0000
+subs n uo - 00000000 7fff0000 7fff0000
+subs m uo - 00000000 7fff0000 7fff0000
+subs p uo - 00000000 7fff0000 7fff0000
+subs z uo - 00000000 7fff0000 7fff0000
+subs n uo - 80000000 7fff0000 7fff0000
+subs m uo - 80000000 7fff0000 7fff0000
+subs p uo - 80000000 7fff0000 7fff0000
+subs z uo - 80000000 7fff0000 7fff0000
+subs n uo - 3f800000 7fff0000 7fff0000
+subs m uo - 3f800000 7fff0000 7fff0000
+subs p uo - 3f800000 7fff0000 7fff0000
+subs z uo - 3f800000 7fff0000 7fff0000
+subs n uo - bf800000 7fff0000 7fff0000
+subs m uo - bf800000 7fff0000 7fff0000
+subs p uo - bf800000 7fff0000 7fff0000
+subs z uo - bf800000 7fff0000 7fff0000
+subs n uo - 007fffff 7fff0000 7fff0000
+subs m uo - 007fffff 7fff0000 7fff0000
+subs p uo - 007fffff 7fff0000 7fff0000
+subs z uo - 007fffff 7fff0000 7fff0000
+subs n uo - 807fffff 7fff0000 7fff0000
+subs m uo - 807fffff 7fff0000 7fff0000
+subs p uo - 807fffff 7fff0000 7fff0000
+subs z uo - 807fffff 7fff0000 7fff0000
+subs n uo - 7fff0000 007fffff 7fff0000
+subs m uo - 7fff0000 007fffff 7fff0000
+subs p uo - 7fff0000 007fffff 7fff0000
+subs z uo - 7fff0000 007fffff 7fff0000
+subs n uo - 7fff0000 807fffff 7fff0000
+subs m uo - 7fff0000 807fffff 7fff0000
+subs p uo - 7fff0000 807fffff 7fff0000
+subs z uo - 7fff0000 807fffff 7fff0000
+subs n uo - 7fff0000 00000001 7fff0000
+subs m uo - 7fff0000 00000001 7fff0000
+subs p uo - 7fff0000 00000001 7fff0000
+subs z uo - 7fff0000 00000001 7fff0000
+subs n uo - 7fff0000 80000001 7fff0000
+subs m uo - 7fff0000 80000001 7fff0000
+subs p uo - 7fff0000 80000001 7fff0000
+subs z uo - 7fff0000 80000001 7fff0000
+subs n uo - 00000001 7fff0000 7fff0000
+subs m uo - 00000001 7fff0000 7fff0000
+subs p uo - 00000001 7fff0000 7fff0000
+subs z uo - 00000001 7fff0000 7fff0000
+subs n uo - 80000001 7fff0000 7fff0000
+subs m uo - 80000001 7fff0000 7fff0000
+subs p uo - 80000001 7fff0000 7fff0000
+subs z uo - 80000001 7fff0000 7fff0000
+subs n uo - 7fff0000 7f7fffff 7fff0000
+subs m uo - 7fff0000 7f7fffff 7fff0000
+subs p uo - 7fff0000 7f7fffff 7fff0000
+subs z uo - 7fff0000 7f7fffff 7fff0000
+subs n uo - 7fff0000 ff7fffff 7fff0000
+subs m uo - 7fff0000 ff7fffff 7fff0000
+subs p uo - 7fff0000 ff7fffff 7fff0000
+subs z uo - 7fff0000 ff7fffff 7fff0000
+subs n uo - 7f7fffff 7fff0000 7fff0000
+subs m uo - 7f7fffff 7fff0000 7fff0000
+subs p uo - 7f7fffff 7fff0000 7fff0000
+subs z uo - 7f7fffff 7fff0000 7fff0000
+subs n uo - ff7fffff 7fff0000 7fff0000
+subs m uo - ff7fffff 7fff0000 7fff0000
+subs p uo - ff7fffff 7fff0000 7fff0000
+subs z uo - ff7fffff 7fff0000 7fff0000
+subs n uo - 7fff0000 7f800000 7fff0000
+subs m uo - 7fff0000 7f800000 7fff0000
+subs p uo - 7fff0000 7f800000 7fff0000
+subs z uo - 7fff0000 7f800000 7fff0000
+subs n uo - 7fff0000 ff800000 7fff0000
+subs m uo - 7fff0000 ff800000 7fff0000
+subs p uo - 7fff0000 ff800000 7fff0000
+subs z uo - 7fff0000 ff800000 7fff0000
+subs n uo - 7f800000 7fff0000 7fff0000
+subs m uo - 7f800000 7fff0000 7fff0000
+subs p uo - 7f800000 7fff0000 7fff0000
+subs z uo - 7f800000 7fff0000 7fff0000
+subs n uo - ff800000 7fff0000 7fff0000
+subs m uo - ff800000 7fff0000 7fff0000
+subs p uo - ff800000 7fff0000 7fff0000
+subs z uo - ff800000 7fff0000 7fff0000
+subs n uo - 7fff0000 7fff0000 7fff0000
+subs m uo - 7fff0000 7fff0000 7fff0000
+subs p uo - 7fff0000 7fff0000 7fff0000
+subs z uo - 7fff0000 7fff0000 7fff0000
+subs n uo v 7f810000 80000000 7fff0000
+subs m uo v 7f810000 80000000 7fff0000
+subs p uo v 7f810000 80000000 7fff0000
+subs z uo v 7f810000 80000000 7fff0000
+subs n uo v 00000000 7f810000 7fff0000
+subs m uo v 00000000 7f810000 7fff0000
+subs p uo v 00000000 7f810000 7fff0000
+subs z uo v 00000000 7f810000 7fff0000
+subs n uo v 80000000 7f810000 7fff0000
+subs m uo v 80000000 7f810000 7fff0000
+subs p uo v 80000000 7f810000 7fff0000
+subs z uo v 80000000 7f810000 7fff0000
+subs n uo v 3f800000 7f810000 7fff0000
+subs m uo v 3f800000 7f810000 7fff0000
+subs p uo v 3f800000 7f810000 7fff0000
+subs z uo v 3f800000 7f810000 7fff0000
+subs n uo v bf800000 7f810000 7fff0000
+subs m uo v bf800000 7f810000 7fff0000
+subs p uo v bf800000 7f810000 7fff0000
+subs z uo v bf800000 7f810000 7fff0000
+subs n uo v 007fffff 7f810000 7fff0000
+subs m uo v 007fffff 7f810000 7fff0000
+subs p uo v 007fffff 7f810000 7fff0000
+subs z uo v 007fffff 7f810000 7fff0000
+subs n uo v 807fffff 7f810000 7fff0000
+subs m uo v 807fffff 7f810000 7fff0000
+subs p uo v 807fffff 7f810000 7fff0000
+subs z uo v 807fffff 7f810000 7fff0000
+subs n uo v 7f810000 007fffff 7fff0000
+subs m uo v 7f810000 007fffff 7fff0000
+subs p uo v 7f810000 007fffff 7fff0000
+subs z uo v 7f810000 007fffff 7fff0000
+subs n uo v 7f810000 807fffff 7fff0000
+subs m uo v 7f810000 807fffff 7fff0000
+subs p uo v 7f810000 807fffff 7fff0000
+subs z uo v 7f810000 807fffff 7fff0000
+subs n uo v 7f810000 00000001 7fff0000
+subs m uo v 7f810000 00000001 7fff0000
+subs p uo v 7f810000 00000001 7fff0000
+subs z uo v 7f810000 00000001 7fff0000
+subs n uo v 7f810000 80000001 7fff0000
+subs m uo v 7f810000 80000001 7fff0000
+subs p uo v 7f810000 80000001 7fff0000
+subs z uo v 7f810000 80000001 7fff0000
+subs n uo v 00000001 7f810000 7fff0000
+subs m uo v 00000001 7f810000 7fff0000
+subs p uo v 00000001 7f810000 7fff0000
+subs z uo v 00000001 7f810000 7fff0000
+subs n uo v 80000001 7f810000 7fff0000
+subs m uo v 80000001 7f810000 7fff0000
+subs p uo v 80000001 7f810000 7fff0000
+subs z uo v 80000001 7f810000 7fff0000
+subs n uo v 7f810000 7f7fffff 7fff0000
+subs m uo v 7f810000 7f7fffff 7fff0000
+subs p uo v 7f810000 7f7fffff 7fff0000
+subs z uo v 7f810000 7f7fffff 7fff0000
+subs n uo v 7f810000 ff7fffff 7fff0000
+subs m uo v 7f810000 ff7fffff 7fff0000
+subs p uo v 7f810000 ff7fffff 7fff0000
+subs z uo v 7f810000 ff7fffff 7fff0000
+subs n uo v 7f7fffff 7f810000 7fff0000
+subs m uo v 7f7fffff 7f810000 7fff0000
+subs p uo v 7f7fffff 7f810000 7fff0000
+subs z uo v 7f7fffff 7f810000 7fff0000
+subs n uo v ff7fffff 7f810000 7fff0000
+subs m uo v ff7fffff 7f810000 7fff0000
+subs p uo v ff7fffff 7f810000 7fff0000
+subs z uo v ff7fffff 7f810000 7fff0000
+subs n uo v 7f810000 7f800000 7fff0000
+subs m uo v 7f810000 7f800000 7fff0000
+subs p uo v 7f810000 7f800000 7fff0000
+subs z uo v 7f810000 7f800000 7fff0000
+subs n uo v 7f810000 ff800000 7fff0000
+subs m uo v 7f810000 ff800000 7fff0000
+subs p uo v 7f810000 ff800000 7fff0000
+subs z uo v 7f810000 ff800000 7fff0000
+subs n uo v 7f800000 7f810000 7fff0000
+subs m uo v 7f800000 7f810000 7fff0000
+subs p uo v 7f800000 7f810000 7fff0000
+subs z uo v 7f800000 7f810000 7fff0000
+subs n uo v ff800000 7f810000 7fff0000
+subs m uo v ff800000 7f810000 7fff0000
+subs p uo v ff800000 7f810000 7fff0000
+subs z uo v ff800000 7f810000 7fff0000
+subs n uo v 7fff0000 7f810000 7fff0000
+subs m uo v 7fff0000 7f810000 7fff0000
+subs p uo v 7fff0000 7f810000 7fff0000
+subs z uo v 7fff0000 7f810000 7fff0000
+subs n uo v 7f810000 7fff0000 7fff0000
+subs m uo v 7f810000 7fff0000 7fff0000
+subs p uo v 7f810000 7fff0000 7fff0000
+subs z uo v 7f810000 7fff0000 7fff0000
+subs n uo v 7f810000 7f810000 7fff0000
+subs m uo v 7f810000 7f810000 7fff0000
+subs p uo v 7f810000 7f810000 7fff0000
+subs z uo v 7f810000 7f810000 7fff0000
+subs n eq - 4f804000 4f800000 4b000000
+subs m eq - 4f804000 4f800000 4b000000
+subs p eq - 4f804000 4f800000 4b000000
+subs z eq - 4f804000 4f800000 4b000000
+subs n eq x 59800000 3f3ff800 59800000
+subs p eq x 59800000 3f3ff800 59800000
+subs m eq x 59800000 3f3ff800 597fffff
+subs z eq x 59800000 3f3ff800 597fffff
diff --git a/verrou/unitTest/checkUCB-vecto/testFloat.cxx b/verrou/unitTest/checkUCB-vecto/testFloat.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..bf4b7504439ef0167fe8fac80498e22bbfe0f730
--- /dev/null
+++ b/verrou/unitTest/checkUCB-vecto/testFloat.cxx
@@ -0,0 +1,440 @@
+#include <iostream>
+#include <fstream>
+#include <sstream>
+#include <vector>
+#include <stdint.h>
+//#include <xmmintrin.h>
+#include  <immintrin.h>
+//#include <avxintrin.h>
+#include <fenv.h>
+
+
+//Basic Floating point Operation in simple precision
+struct Add32{
+  typedef float RealType;
+  static std::string name(){return std::string("adds");}
+  static RealType apply(RealType a, RealType b){return a+b;}  
+  static __m128 applySSE(__m128 ai, __m128 bi){return _mm_add_ps(ai,bi);};
+  static __m256 applyAVX(__m256 ai, __m256 bi){return _mm256_add_ps(ai,bi);};  
+};
+
+struct Mul32{
+  typedef float RealType;
+  static std::string name(){return std::string("muls");}
+  static RealType apply(RealType a, RealType b){return a*b;}  
+  static __m128 applySSE(__m128 ai, __m128 bi){return _mm_mul_ps(ai,bi);};
+  static __m256 applyAVX(__m256 ai, __m256 bi){return _mm256_mul_ps(ai,bi);};  
+
+};
+
+struct Sub32{
+  typedef float RealType;
+  static std::string name(){return std::string("subs");}
+  static RealType apply(RealType a, RealType b){return a-b;}  
+  static __m128 applySSE(__m128 ai, __m128 bi){return _mm_sub_ps(ai,bi);};
+  static __m256 applyAVX(__m256 ai, __m256 bi){return _mm256_sub_ps(ai,bi);};    
+};
+
+struct Div32{
+  typedef float RealType;
+  static std::string name(){return std::string("divs");}
+  static RealType apply(RealType a, RealType b){return a/b;}  
+  static __m128 applySSE(__m128 ai, __m128 bi){return _mm_div_ps(ai,bi);};
+  static __m256 applyAVX(__m256 ai, __m256 bi){return _mm256_div_ps(ai,bi);};
+};
+
+
+//Basic Floating point Operation in double precision
+struct Add64{
+  typedef double RealType;
+  static std::string name(){return std::string("addd");}
+  static RealType apply(RealType a, RealType b){return a+b;}  
+  static __m128d applySSE(__m128d ai, __m128d bi){return _mm_add_pd(ai,bi);};
+  static __m256d applyAVX(__m256d ai, __m256d bi){return _mm256_add_pd(ai,bi);};
+};
+
+struct Mul64{
+  typedef double RealType;
+  static std::string name(){return std::string("muld");}
+  static RealType apply(RealType a, RealType b){return a*b;}  
+  static __m128d applySSE(__m128d ai, __m128d bi){return _mm_mul_pd(ai,bi);};
+  static __m256d applyAVX(__m256d ai, __m256d bi){return _mm256_mul_pd(ai,bi);};
+};
+
+struct Sub64{
+  typedef double RealType;
+  static std::string name(){return std::string("subd");}
+  static RealType apply(RealType a, RealType b){return a-b;}  
+  static __m128d applySSE(__m128d ai, __m128d bi){return _mm_sub_pd(ai,bi);};
+  static __m256d applyAVX(__m256d ai, __m256d bi){return _mm256_sub_pd(ai,bi);};
+};
+
+struct Div64{
+  typedef double RealType;
+  static std::string name(){return std::string("divd");}
+  static RealType apply(RealType a, RealType b){return a/b;}  
+  static __m128d applySSE(__m128d ai, __m128d bi){return _mm_div_pd(ai,bi);};
+  static __m256d applyAVX(__m256d ai, __m256d bi){return _mm256_div_pd(ai,bi);};
+};
+
+
+//Loop over std::vector
+template<class OP,class REALTYPE>
+struct Loop{
+};
+//Loop over std::vector :: Simple precision
+template<class OP>
+struct Loop<OP,float>{
+  typedef std::vector<float> vectType;
+
+  static void applyScalar(vectType& res, const vectType& v1, const vectType&v2){
+    int size=(int)v1.size();
+    for(int i=0; i< size;i++){
+      res[i]=OP::apply(v1[i],v2[i]);
+    }    
+  }
+
+  static void applySSE(vectType& res, const vectType& v1, const vectType&v2){
+    int size=(int)v1.size();
+    for(int i=0; i< size;i+=4){
+      __m128 ai, bi,ri;
+      ai = _mm_loadu_ps(&(v1[i])); // unaligned version : performance is not the test purpose 
+      bi = _mm_loadu_ps(&(v2[i]));
+      ri= OP::applySSE(ai,bi);
+      _mm_storeu_ps(&res[i],ri);
+    }    
+  }
+
+  static void applyAVX(vectType& res, const vectType& v1, const vectType&v2){
+    int size=(int)v1.size();
+    for(int i=0; i< size;i+=8){
+      __m256 ai, bi,ri;
+      ai = _mm256_loadu_ps(&(v1[i]));
+      bi = _mm256_loadu_ps(&(v2[i]));
+      ri= OP::applyAVX(ai,bi);
+      _mm256_storeu_ps(&res[i],ri);
+    }    
+  }  
+};
+
+//Loop over std::vector :: Double precision
+template<class OP>
+struct Loop<OP,double>{
+  typedef std::vector<double> vectType;
+
+  static void applyScalar(vectType& res, const vectType& v1, const vectType&v2){
+    int size=(int)v1.size();
+    for(int i=0; i< size;i++){
+      res[i]=OP::apply(v1[i],v2[i]);
+    }    
+  }
+
+  static void applySSE(vectType& res, const vectType& v1, const vectType&v2){
+    int size=(int)v1.size();
+    for(int i=0; i< size;i+=2){
+      __m128d ai, bi,ri;
+      ai = _mm_loadu_pd(&(v1[i]));
+      bi = _mm_loadu_pd(&(v2[i]));
+      ri= OP::applySSE(ai,bi);
+      _mm_storeu_pd(&res[i],ri);
+    }    
+  }
+
+  static void applyAVX(vectType& res, const vectType& v1, const vectType&v2){
+    int size=(int)v1.size();
+    for(int i=0; i< size;i+=4){
+      __m256d ai, bi,ri;
+      ai = _mm256_loadu_pd(&(v1[i]));
+      bi = _mm256_loadu_pd(&(v2[i]));
+      ri= OP::applyAVX(ai,bi);
+      _mm256_storeu_pd(&res[i],ri);
+    }    
+  }
+  
+};
+
+
+
+
+
+
+//Basic conversion
+
+// realType to Hex
+std::string realTypeToHex(float a){
+  std::ostringstream res ;
+  res << std::hex << std::uppercase << *(reinterpret_cast<uint32_t*>(&a));
+  return res.str() ;
+}
+std::string realTypeToHex(double a){
+  std::ostringstream res ;
+  res << std::hex << std::uppercase << *(reinterpret_cast<uint64_t*>(&a)) ;
+  return res.str() ;
+}
+
+
+// Hex to RealType
+float convertStringToRealType(std::string str){
+  char buff[11];
+  buff[0]='0';
+  buff[1]='x';
+  for(int i=0;i<8;i++)
+    buff[2+i]=str[i];
+  buff[10]='\0';
+  uint32_t res;
+  sscanf(buff, "%x",&res);
+  return *(reinterpret_cast<float*>(&res));
+}
+
+double convertStringToRealType(std::string str, std::string str2){
+  char buff[19];
+  buff[0]='0';
+  buff[1]='x';
+  buff[10]='\0';
+  for(int i=0;i<8;i++)
+    buff[2+i]=str[i];
+  uint32_t res1;
+  sscanf(buff, "%x",&res1);
+
+  uint32_t res2;
+  for(int i=0;i<8;i++)
+    buff[2+i]=str2[i];
+  sscanf(buff, "%x",&res2);
+
+  uint64_t res= (((uint64_t)res1) << 32)+(uint64_t)res2;
+
+  double resDouble=*(reinterpret_cast<double*>(&res));
+  return resDouble;
+}
+
+
+
+
+//Check if two floating point are equals
+bool isRealTypeEqual(const float& ref,
+		     const float& valueToTest){
+
+  //binary check
+#ifdef BINARY_CHECK
+  return *reinterpret_cast<const int32_t*>(&ref)== *reinterpret_cast<const int32_t*>(&valueToTest);
+#else
+  //float check
+  if(ref!=ref){//Nan expected
+    if(valueToTest!=valueToTest){
+      return true;
+    }
+    return false;
+  }else{
+    return (valueToTest==ref);
+  }
+#endif
+}
+
+
+
+
+
+
+//Check if  testComputationTabSeq and refTab are equal : other variables are usefull to print debug info in case of failure
+template<class RealType>
+bool checkTab(const std::string& fileName,
+	      const std::string& computationType, // Seq, SSE,AVX
+	      const std::vector<RealType>& testComputationTabSeq,
+	      const std::vector<RealType>& refTab,
+	      const std::vector<int>& lineTab,	     
+	      const std::vector<RealType>& v1Tab,
+	      const std::vector<RealType>& v2Tab){
+  int ok=0,ko=0;
+  int size=(int)refTab.size();
+  for(int i=0;i<size;i++){
+    bool isEqual=isRealTypeEqual(refTab[i],testComputationTabSeq[i]);
+    if(isEqual){
+      ok++;
+    }else{
+      ko++;
+      std::cout << fileName << ":"<<lineTab[i]<< " "<<computationType<< " "
+		<<" v1  v2  resRef res \t"
+		<<v1Tab[i] << "("<< realTypeToHex(v1Tab[i])<<")\t"
+		<<v2Tab[i] << "("<< realTypeToHex(v2Tab[i])<<")\t"
+		<<refTab[i] << "("<< realTypeToHex(refTab[i])<<")\t"
+		<< testComputationTabSeq[i]<< "("<<realTypeToHex(testComputationTabSeq[i])<<")"<< std::endl;
+      //      return;
+    }    
+  }  
+  std::cout <<fileName<<  " ok:" << ok <<"\tko:" <<ko <<std::endl<<std::endl;
+  return (ko==0 && ok!=0);
+}
+
+
+//Parse the end of the line of the UCB format : float
+void parseEndUCBLine(std::istringstream& iss, float& v1, float& v2, float& ref ){
+  std::string v1Str,v2Str,refStr;
+  iss >> v1Str >> v2Str>> refStr;
+ 
+  v1=convertStringToRealType(v1Str);
+  v2=convertStringToRealType(v2Str); 
+  ref=convertStringToRealType(refStr);
+}
+
+//Parse the end of the line of the UCB format : double
+void parseEndUCBLine(std::istringstream& iss, double& v1, double& v2, double& ref ){
+  std::string v1StrL,v2StrL,refStrL;
+  std::string v1StrH,v2StrH,refStrH;
+  iss >> v1StrL >> v1StrH
+      >> v2StrL >> v2StrH
+      >> refStrL >> refStrH;
+ 
+  v1=convertStringToRealType(v1StrL, v1StrH);
+  v2=convertStringToRealType(v2StrL, v2StrH ); 
+  ref=convertStringToRealType(refStrL, refStrH);
+}
+
+
+
+//LOAD UCB File for one OP with a specific rounding configuration and check results for scalar, sse and avx
+template<class OP>
+inline int loadDataAndCheck(const std::string& round){
+
+  typedef typename OP::RealType RealType;
+  std::vector<RealType> v1Tab, v2Tab,refTab;
+  std::vector<int> lineTab;
+
+  const std::string fileName("./inputData/"+OP::name()+std::string(".input")); 
+
+  std::cout <<fileName<<" "<< round <<std::endl; 
+  //Reference LOADING
+  int lineNumber=0;
+  std::ifstream input(fileName.c_str());
+  std::string line;  
+  while(std::getline(input,line)){
+    lineNumber++;
+    if(line[0]=='/'){ //Commentaire
+      continue;
+    }
+      
+    std::istringstream iss(line);
+
+    std::string fun,dir, eq, sep;
+    iss >> fun >> dir >> eq>> sep;
+    if(dir!=round){
+      continue;
+    }
+    RealType v1,v2,ref;
+    parseEndUCBLine(iss, v1,v2,ref);
+
+    v1Tab.push_back(v1);
+    v2Tab.push_back(v2);
+    refTab.push_back(ref);
+    lineTab.push_back(lineNumber);
+  }
+
+  //Padding
+  while((v1Tab.size() % 8) !=0){
+    v1Tab.push_back(0.);
+    v2Tab.push_back(1.);
+    refTab.push_back(OP::apply(0.,1.));
+    lineTab.push_back(-1);
+  }
+
+
+  
+  int size((int)v1Tab.size());
+  bool ok;
+  int nbKO=0;
+  //Scalar
+  std::vector<RealType> testComputationTabScalar(size);
+  Loop<OP,RealType>::applyScalar(testComputationTabScalar, v1Tab,v2Tab);
+  ok=checkTab(fileName,std::string("Scalar"),testComputationTabScalar,refTab,lineTab,v1Tab,v2Tab);
+  if(!ok) nbKO++;
+
+  //SSE
+  std::vector<RealType> testComputationTabSSE(size);
+  Loop<OP,RealType>::applySSE(testComputationTabSSE, v1Tab,v2Tab);
+  ok=checkTab(fileName,std::string("SSE"),testComputationTabSSE,refTab,lineTab,v1Tab,v2Tab);
+  if(!ok) nbKO++;
+  
+  //AVX
+  std::vector<RealType> testComputationTabAVX(size);
+  Loop<OP,RealType>::applyAVX(testComputationTabAVX, v1Tab,v2Tab);
+  ok=checkTab(fileName,std::string("AVX"),testComputationTabSSE,refTab,lineTab,v1Tab,v2Tab);
+  if(!ok) nbKO++;
+
+  return nbKO;
+}
+
+
+
+int loadDataAndCheck(std::string rounding){
+  int nbKO=0;
+  nbKO+=loadDataAndCheck<Add32>(rounding);
+  nbKO+=loadDataAndCheck<Add64>(rounding);
+  nbKO+=loadDataAndCheck<Mul32>(rounding);
+  nbKO+=loadDataAndCheck<Mul64>(rounding);
+  nbKO+=loadDataAndCheck<Div32>(rounding);
+  nbKO+=loadDataAndCheck<Div64>(rounding);
+  nbKO+=loadDataAndCheck<Sub32>(rounding);
+  nbKO+=loadDataAndCheck<Sub64>(rounding);
+
+  std::cout << "nbKO:" << nbKO<<std::endl;
+  return nbKO;
+}
+  
+
+
+void usage(char** argv){
+      std::cout << "usage: "<<argv[0] << "ENV --rounding-mode=[nearest|upward|downward|toward_zero]"<<std::endl;
+      std::cout << "ENV is in fenv, valgrind, verificarlo"<<std::endl;       
+}
+
+
+
+int main(int argc, char** argv){
+  if(argc!=3){
+    usage(argv);
+    return EXIT_FAILURE;
+  }
+
+  std::string env(argv[1]);
+  bool fenv=false;
+  if(env==std::string("fenv")){
+    fenv=true;
+  }else{
+    if(env!= std::string("valgrind") && env!=std::string("verificarlo")){
+      usage(argv);
+      return EXIT_FAILURE;
+    }
+    
+  }
+  
+  
+
+  std::string option(argv[2]);
+
+  if(option==std::string("--rounding-mode=nearest")){
+    if(fenv){
+      fesetround(FE_TONEAREST);
+    }
+    return loadDataAndCheck("n");
+  }
+  if(option==std::string("--rounding-mode=upward")){
+    if(fenv){
+      fesetround(FE_UPWARD);
+    }
+    return loadDataAndCheck("p");
+  }
+  if(option==std::string("--rounding-mode=downward")){
+    if(fenv){
+      fesetround(FE_DOWNWARD);
+    }
+    return loadDataAndCheck("m");
+  }
+  if(option==std::string("--rounding-mode=toward_zero")){
+    if(fenv){
+      fesetround(FE_TOWARDZERO);
+    }
+    return loadDataAndCheck("z");
+  }
+  usage(argv);
+  return EXIT_FAILURE;
+
+}
diff --git a/verrou/unitTest/flag.mk.in b/verrou/unitTest/flag.mk.in
new file mode 100644
index 0000000000000000000000000000000000000000..e5b0dff9fd85ab007269953668cab3b86d009ae2
--- /dev/null
+++ b/verrou/unitTest/flag.mk.in
@@ -0,0 +1,6 @@
+# @configure_input@
+
+AVX512FLAG=@vg_test_no_avx512f_flag@
+
+DEBUGFLAGS = -mfpmath=sse $(AVX512FLAG)
+NATIVEFLAGS= -mfpmath=sse -march=native -mfma $(AVX512FLAG)
diff --git a/verrou/unitTest/installpath b/verrou/unitTest/installpath
new file mode 100755
index 0000000000000000000000000000000000000000..0c22ea898080667d3c2d78dbc3faad32ff35fdb1
--- /dev/null
+++ b/verrou/unitTest/installpath
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+source ../../env.sh; readlink -f $(dirname $(which valgrind))/..
diff --git a/verrou/unitTest/makefile b/verrou/unitTest/makefile
new file mode 100644
index 0000000000000000000000000000000000000000..52540939160c9b3431f77f98ec5febd946de7215
--- /dev/null
+++ b/verrou/unitTest/makefile
@@ -0,0 +1,17 @@
+.PHONY: checkRounding checkUCB-vecto check-libM valgrind-test
+
+
+all: checkRounding checkUCB-vecto check-libM
+checkRounding:
+	make -C ./checkRounding
+
+checkUCB-vecto:
+	make -C ./checkUCB-vecto
+
+check-libM:
+	make -C ./check-libM
+#to be able to do valgrind test from this directory
+valgrind-test:
+	make -C ../../tests check
+	make -C ../../verrou check
+	cd ../..; perl ./tests/vg_regtest verrou/tests
diff --git a/verrou/unitTest/nocheckExemples/addVecDoubleAVX.cxx b/verrou/unitTest/nocheckExemples/addVecDoubleAVX.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..79d06df3db8c177f9e59e816731a4ca485f54988
--- /dev/null
+++ b/verrou/unitTest/nocheckExemples/addVecDoubleAVX.cxx
@@ -0,0 +1,50 @@
+
+#include <iostream>
+#include <math.h>
+#include <cstdlib>
+
+
+#include  <immintrin.h>
+#include <avxintrin.h>
+
+int main(int argc, char** argv){
+  double a[4]  __attribute__ ((aligned(32)));
+  double b[4]  __attribute__ ((aligned(32)));
+  double r[4]  __attribute__ ((aligned(32)));
+    
+  if(argc==9){
+    a[0]=atof(argv[1]);
+    a[1]=atof(argv[2]);
+    a[2]=atof(argv[3]);
+    a[3]=atof(argv[4]);
+    b[0]=atof(argv[5]);
+    b[1]=atof(argv[6]);
+    b[2]=atof(argv[7]);
+    b[3]=atof(argv[8]);
+  }else{
+    std::cerr << "demande 8 arguments"<<std::endl;
+    return EXIT_FAILURE;
+  }
+
+  {
+    __m256d ai, bi,ri;
+  ai = _mm256_load_pd(a);
+  bi = _mm256_load_pd(b);
+  ri=_mm256_add_pd(ai,bi);
+  _mm256_store_pd(r,ri);
+  }
+  
+
+    std::cout.precision(10);
+  std::cout << " a[0], a[1], a[2], a[3] " << a[0] <<","<< a[1]<<","<< a[2]<<","<< a[3] <<std::endl 
+	    << " b[0], b[1], b[2], b[3] " << b[0] <<","<< b[1]<<","<< b[2]<<","<< b[3] <<std::endl 
+	    << " r[0], r[1], r[2], r[3] " << r[0] <<","<< r[1]<<","<< r[2]<<","<< r[3] <<std::endl 
+	    << "  diff1-0 : "<<   (r[1]-r[0])<<std::endl
+  	    << "  diff2-0 : "<<   (r[2]-r[0])<<std::endl
+	    << "  diff3-0 : "<<   (r[3]-r[0])<<std::endl;
+
+
+
+
+  return EXIT_SUCCESS;
+}
diff --git a/verrou/unitTest/nocheckExemples/addVecDoubleSSE.cxx b/verrou/unitTest/nocheckExemples/addVecDoubleSSE.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..702f926b5784d1e38bc7cdf971412110f3806802
--- /dev/null
+++ b/verrou/unitTest/nocheckExemples/addVecDoubleSSE.cxx
@@ -0,0 +1,42 @@
+
+#include <iostream>
+#include <math.h>
+#include <cstdlib>
+
+
+#include  <immintrin.h>
+//#include <avxintrin_emu.h>
+int main(int argc, char** argv){
+  double a[2] ,b[2] ;
+  double r[2];
+    
+  if(argc==5){
+    a[0]=atof(argv[1]);
+    a[1]=atof(argv[2]);
+    b[0]=atof(argv[3]);
+    b[1]=atof(argv[4]);    
+  }else{
+    std::cerr << "demande 4 argument"<<std::endl;
+    return EXIT_FAILURE;
+  }
+
+  {
+    __m128d ai, bi,ri;
+  ai = _mm_load_pd(a);
+  bi = _mm_load_pd(b);
+  ri=_mm_add_pd(ai,bi);
+  _mm_store_pd(r,ri);
+  }
+  
+
+  
+  std::cout << " a[0], a[1] " << a[0] <<","<< a[1] <<std::endl 
+	    << " b[0], b[1] " << b[0] <<","<< b[1] <<std::endl 
+	    << " r[0], r[1] "  << r[0] <<","<< r[1] <<std::endl 
+	    << "  diff"<<   (r[1]-r[0])<<std::endl;
+
+
+
+
+  return EXIT_SUCCESS;
+}
diff --git a/verrou/unitTest/nocheckExemples/addVecFloatAVX.cxx b/verrou/unitTest/nocheckExemples/addVecFloatAVX.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..af217a66010ec26ac5c3c0dcaddf51a9860dac46
--- /dev/null
+++ b/verrou/unitTest/nocheckExemples/addVecFloatAVX.cxx
@@ -0,0 +1,65 @@
+
+#include <iostream>
+#include <math.h>
+#include <cstdlib>
+
+
+#include  <immintrin.h>
+#include <avxintrin.h>
+
+int main(int argc, char** argv){
+  float a[8]  __attribute__ ((aligned(32)));
+  float b[8]  __attribute__ ((aligned(32)));
+  float r[8]  __attribute__ ((aligned(32)));
+    
+  if(argc==17){
+    a[0]=atof(argv[1]);
+    a[1]=atof(argv[2]);
+    a[2]=atof(argv[3]);
+    a[3]=atof(argv[4]);
+    a[4]=atof(argv[5]);
+    a[5]=atof(argv[6]);
+    a[6]=atof(argv[7]);
+    a[7]=atof(argv[8]);
+
+
+    b[0]=atof(argv[9]);
+    b[1]=atof(argv[10]);
+    b[2]=atof(argv[11]);
+    b[3]=atof(argv[12]);
+    b[4]=atof(argv[13]);
+    b[5]=atof(argv[14]);
+    b[6]=atof(argv[15]);
+    b[7]=atof(argv[16]);
+
+  }else{
+    std::cerr << "demande 16 arguments : "<< argc-1 <<std::endl;
+    return EXIT_FAILURE;
+  }
+
+  {
+    __m256 ai, bi,ri;
+  ai = _mm256_load_ps(a);
+  bi = _mm256_load_ps(b);
+  ri=_mm256_add_ps(ai,bi);
+  _mm256_store_ps(r,ri);
+  }
+  
+
+    std::cout.precision(10);
+  std::cout << " a[0], a[1], a[2], a[3], a[4], a[5], a[6], a[7] " << a[0] <<","<< a[1]<<","<< a[2]<<","<< a[3] <<"," << a[4] <<","<< a[5]<<","<< a[6]<<","<< a[7] <<std::endl 
+	    << " b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7] " << b[0] <<","<< b[1]<<","<< b[2]<<","<< b[3] <<"," << b[4] <<","<< b[5]<<","<< b[6]<<","<< b[7] <<std::endl 
+    	    << " r[0], r[1], r[2], r[3], r[4], r[5], r[6], r[7] " << r[0] <<","<< r[1]<<","<< r[2]<<","<< r[3] <<"," << r[4] <<","<< r[5]<<","<< r[6]<<","<< r[7] <<std::endl 
+	    << "  diff1-0 : "<<   (r[1]-r[0])<<std::endl
+  	    << "  diff2-0 : "<<   (r[2]-r[0])<<std::endl
+	    << "  diff3-0 : "<<   (r[3]-r[0])<<std::endl
+      	    << "  diff4-0 : "<<   (r[4]-r[0])<<std::endl
+      	    << "  diff5-0 : "<<   (r[5]-r[0])<<std::endl
+      	    << "  diff6-0 : "<<   (r[6]-r[0])<<std::endl
+      	    << "  diff7-0 : "<<   (r[7]-r[0])<<std::endl  ;
+
+
+
+
+  return EXIT_SUCCESS;
+}
diff --git a/verrou/unitTest/nocheckExemples/addVecFloatSSE.cxx b/verrou/unitTest/nocheckExemples/addVecFloatSSE.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..07de2752bc615ae6f79519a13ba29916c30937b9
--- /dev/null
+++ b/verrou/unitTest/nocheckExemples/addVecFloatSSE.cxx
@@ -0,0 +1,51 @@
+
+#include <iostream>
+#include <math.h>
+#include <cstdlib>
+
+
+#include  <immintrin.h>
+//#include <avxintrin_emu.h>
+int main(int argc, char** argv){
+  float a[4] ,b[4] ;
+  float r[4];
+    
+  if(argc==9){
+    a[0]=atof(argv[1]);
+    a[1]=atof(argv[2]);
+    a[2]=atof(argv[3]);
+    a[3]=atof(argv[4]);
+    b[0]=atof(argv[5]);
+    b[1]=atof(argv[6]);
+    b[2]=atof(argv[7]);
+    b[3]=atof(argv[8]);
+
+    
+  }else{
+    std::cerr << "demande 8 arguments"<<std::endl;
+    return EXIT_FAILURE;
+  }
+
+  {
+    __m128 ai, bi,ri;
+  ai = _mm_load_ps(a);
+  bi = _mm_load_ps(b);
+  ri=_mm_add_ps(ai,bi);
+  _mm_store_ps(r,ri);
+  }
+  
+
+  
+  std::cout.precision(10);
+  std::cout << " a[0], a[1], a[2], a[3] " << a[0] <<","<< a[1]<<","<< a[2]<<","<< a[3] <<std::endl 
+	    << " b[0], b[1], b[2], b[3] " << b[0] <<","<< b[1]<<","<< b[2]<<","<< b[3] <<std::endl 
+	    << " r[0], r[1], r[2], r[3] " << r[0] <<","<< r[1]<<","<< r[2]<<","<< r[3] <<std::endl 
+	    << "  diff1-0"<<   (r[1]-r[0])<<std::endl
+  	    << "  diff2-0"<<   (r[2]-r[0])<<std::endl
+	    << "  diff3-0"<<   (r[3]-r[0])<<std::endl;
+
+
+
+
+  return EXIT_SUCCESS;
+}
diff --git a/verrou/unitTest/nocheckExemples/cast.cxx b/verrou/unitTest/nocheckExemples/cast.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..ebac9a87197fc77bf3dfcb6e717b8ec402f65faf
--- /dev/null
+++ b/verrou/unitTest/nocheckExemples/cast.cxx
@@ -0,0 +1,24 @@
+#include <iostream>
+#include <math.h>
+#include <cstdlib>
+
+
+#include  <immintrin.h>
+//#include <avxintrin_emu.h>
+int main(int argc, char** argv){
+  double a;
+  if(argc==2){
+    a=atof(argv[1]);
+  }else{
+    std::cerr << "demande 1 argument"<<std::endl;
+    return EXIT_FAILURE;
+  }
+
+  float af=(float)a;
+
+  std::cout << "a:" <<a<<std::endl;
+  std::cout << "af:" <<af<<std::endl;
+  std::cout << "diff:" <<a -(double)af<<std::endl;
+
+  return EXIT_SUCCESS;
+}
diff --git a/verrou/unitTest/nocheckExemples/checkCosSin.cxx b/verrou/unitTest/nocheckExemples/checkCosSin.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..a1f9b15c93e6d6e1b99f4aedd74293d368307030
--- /dev/null
+++ b/verrou/unitTest/nocheckExemples/checkCosSin.cxx
@@ -0,0 +1,82 @@
+
+#include <iostream>
+#include <cstdlib>
+#include <math.h>
+#include <string.h>
+
+
+#include <limits>
+
+#include "verrou.h"
+
+//VERROU_STOP_INSTRUMENTATION;
+//VERROU_START_INSTRUMENTATION;
+
+
+void verrou_stop(const void* constPtr)    {
+  volatile void* ptr=const_cast<void*>(constPtr);
+  __asm__ volatile (" ":: "X" (ptr) :"memory"  );
+  VERROU_STOP_INSTRUMENTATION;  
+  __asm__ volatile (" ":: "X" (ptr) :"memory"  );
+}
+
+void verrou_start(const void* constPtr)    {
+  volatile void* ptr=const_cast<void*>(constPtr);
+  __asm__ volatile (" ":: "X" (ptr) :"memory"  );
+  VERROU_START_INSTRUMENTATION;  
+  __asm__ volatile (" ":: "X" (ptr) :"memory"  );
+}
+
+
+
+template<class REALTYPE>
+void computeCos(int nb){
+  const REALTYPE step= M_PI / (REALTYPE)(nb);
+  REALTYPE accSin(0.);
+  REALTYPE accCos(0.);
+  std::cout << "nb:" <<nb<<std::endl;
+  for(int i=2700; i<nb; i++){
+    const REALTYPE x(i*step);
+    //    std::cout << "x: " << x<<std::endl;
+    //    verrou_stop(&x);
+    REALTYPE cosx(cos(x));
+    REALTYPE sinx(sin(x));
+    //    verrou_start(&cosx);
+    //    sinx=FORCE_EVAL_DOUBLE(sinx);
+    //    cosx=FORCE_EVAL_DOUBLE(cosx);
+
+    
+    accSin+=sinx;
+    accCos+=cosx;
+    
+    if((cosx > 1.)  ||  (cosx < -1.) ) {
+      std::cout << "cosx:" << cosx<< " i:"<<i<<std::endl;
+      std::cout << "sinx:" << sinx<< " i:"<<i<<std::endl;
+      exit(42);
+    }    
+    if((sinx > 1.)|| (sinx < -1.)) {
+      std::cout << "cosx:" << cosx<< " si:"<<i<<std::endl;;
+      std::cout << "sinx:" << sinx<< " si:"<<i<<std::endl;;
+      exit(42);
+    }
+
+    
+  }
+  std::cout << "accCos:" <<accCos<<std::endl;
+  std::cout << "accSin:" <<accSin<<std::endl;
+};
+
+
+
+int main(int argc, char** argv){
+  int nb=10000;
+  std::cout << " computeCos<double>"<<std::endl;
+  computeCos<double>(nb);
+  
+  //  std::cout << " computeCos<float>"<<std::endl;
+  //  computeCos<float>(nb);
+
+  return EXIT_SUCCESS;
+}
+
+
diff --git a/verrou/unitTest/nocheckExemples/fma.cxx b/verrou/unitTest/nocheckExemples/fma.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..ef28756d742956e782339632d6803842047c0639
--- /dev/null
+++ b/verrou/unitTest/nocheckExemples/fma.cxx
@@ -0,0 +1,68 @@
+
+#include <iostream>
+#include <math.h>
+#include <cstdlib>
+
+
+#include  <immintrin.h>
+//#include <avxintrin_emu.h>
+int main(int argc, char** argv){
+  double a,b,c,d,e ;
+  float af,bf,cf,df,ef ;
+  if(argc==4){
+    a=atof(argv[1]);af=a;
+    b=atof(argv[2]);bf=b;
+    c=atof(argv[3]);cf=c;
+  }else{
+    std::cerr << "demande 3 argument"<<std::endl;
+    return EXIT_FAILURE;
+  }
+  bool fast;
+  //#ifdef FP_FAST_FMA
+  //  fast=true;
+  //  d=fma(a,b,c);
+  //#else
+  fast=false;
+
+  {
+  __m128d ai, bi,ci,di,ei ;
+  ai = _mm_load_sd(&a);
+  bi = _mm_load_sd(&b);
+  ei=_mm_add_sd(ai,bi);
+  e=_mm_cvtsd_f64(ei);
+  std::cout << "e computed" << std::endl; 
+  }
+  {
+  d=a+b;
+  std::cout << "resultat intri add: " << e << ","<< d<<std::endl;
+  }
+  {
+    __m128d ai, bi,ci,di,ei ;
+  ai = _mm_load_sd(&a);
+  bi = _mm_load_sd(&b);
+  ci = _mm_load_sd(&c);
+  di=_mm_fmadd_sd(ai,bi,ci);
+  d=_mm_cvtsd_f64(di);
+  }
+  
+     {
+       __m128 ai, bi,ci,di,ei ;
+  ai = _mm_load_ss(&af);
+  bi = _mm_load_ss(&bf);
+  ci = _mm_load_ss(&cf);
+  di=_mm_fmadd_ss(ai,bi,ci);
+  df=_mm_cvtss_f32(di);
+
+     }
+  //  d=di;
+  //#endif
+
+  
+  std::cout << " a, b, c : " << a <<","<< b <<","<< c <<std::endl;
+  std::cout << "resultat intri fma double: " << d << std::endl;
+  std::cout << "resultat intri fma float: " << df << std::endl;
+
+
+
+  return EXIT_SUCCESS;
+}
diff --git a/verrou/unitTest/testPerf/README b/verrou/unitTest/testPerf/README
new file mode 100644
index 0000000000000000000000000000000000000000..3a793ea341055f44ccd449bd3827196e96a980f9
--- /dev/null
+++ b/verrou/unitTest/testPerf/README
@@ -0,0 +1,9 @@
+
+#compile the 4 stencil executables ([O0,03]*[float,double])
+make
+
+#run the 4 executables with PREFIX
+PREFIX="valgrind --tool=verrou  --rounding-mode=random " ./run.sh 2>/dev/null
+
+#to get improved perf (warning unsafe)
+PREFIX="valgrind --tool=verrou  --rounding-mode=random --vr-unsafe-llo-optim=yes " ./run.sh 2>/dev/null
\ No newline at end of file
diff --git a/verrou/unitTest/testPerf/run.sh b/verrou/unitTest/testPerf/run.sh
new file mode 100755
index 0000000000000000000000000000000000000000..4fbb1b27e37c146c489a1c2085750c7b41d1671d
--- /dev/null
+++ b/verrou/unitTest/testPerf/run.sh
@@ -0,0 +1,21 @@
+
+
+BIN=stencil
+
+BINO3DOUBLE=$BIN-O3-DOUBLE
+BINO0DOUBLE=$BIN-O0-DOUBLE
+BINO3FLOAT=$BIN-O3-FLOAT
+BINO0FLOAT=$BIN-O0-FLOAT
+
+BINS="./$BINO3DOUBLE ./$BINO0DOUBLE ./$BINO3FLOAT ./$BINO0FLOAT"
+
+OPT="--scale=1 2"
+
+
+
+for i in $BINS
+do
+    echo $i
+    $PREFIX $i $OPT
+
+done;
diff --git a/verrou/unitTest/testPerf/stencil.cpp b/verrou/unitTest/testPerf/stencil.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..e8ed672a1103f5c50bfcfcba6bfcf7760c7cc08e
--- /dev/null
+++ b/verrou/unitTest/testPerf/stencil.cpp
@@ -0,0 +1,195 @@
+/*
+  Copyright (c) 2010-2014, Intel Corporation
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above copyright
+      notice, this list of conditions and the following disclaimer in the
+      documentation and/or other materials provided with the distribution.
+
+    * Neither the name of Intel Corporation nor the names of its
+      contributors may be used to endorse or promote products derived from
+      this software without specific prior written permission.
+
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+   IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+   TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+   PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.  
+*/
+
+#ifdef _MSC_VER
+#define _CRT_SECURE_NO_WARNINGS
+#define NOMINMAX
+#pragma warning (disable: 4244)
+#pragma warning (disable: 4305)
+#endif
+
+#include <cstdlib>
+#include <stdio.h>
+#include <algorithm>
+#include <string.h>
+#include <math.h>
+#include "./timing.h"
+#include <iostream>
+#include <fstream>
+#include <iomanip>
+#include "valgrind/verrou.h"
+//#include "stencil_ispc.h"
+//using namespace ispc;
+
+#ifdef FLOAT
+typedef  float RealType;
+#else
+typedef double RealType;
+#endif
+
+extern void loop_stencil_serial(int t0, int t1, int x0, int x1,
+                                int y0, int y1, int z0, int z1,
+                                int Nx, int Ny, int Nz,
+                                const RealType coef[5], 
+                                const RealType vsq[],
+                                double Aeven[], RealType Aodd[]);
+
+
+static void
+stencil_step(int x0, int x1,
+             int y0, int y1,
+             int z0, int z1,
+             int Nx, int Ny, int Nz,
+             const RealType coef[4], const RealType vsq[],
+             const RealType Ain[], RealType Aout[]) {
+    int Nxy = Nx * Ny;
+
+    for (int z = z0; z < z1; ++z) {
+        for (int y = y0; y < y1; ++y) {
+            for (int x = x0; x < x1; ++x) {
+                int index = (z * Nxy) + (y * Nx) + x;
+#define A_cur(x, y, z) Ain[index + (x) + ((y) * Nx) + ((z) * Nxy)]
+#define A_next(x, y, z) Aout[index + (x) + ((y) * Nx) + ((z) * Nxy)]
+                RealType div = coef[0] * A_cur(0, 0, 0) +
+                            coef[1] * (A_cur(+1, 0, 0) + A_cur(-1, 0, 0) +
+                                       A_cur(0, +1, 0) + A_cur(0, -1, 0) +
+                                       A_cur(0, 0, +1) + A_cur(0, 0, -1)) +
+                            coef[2] * (A_cur(+2, 0, 0) + A_cur(-2, 0, 0) +
+                                       A_cur(0, +2, 0) + A_cur(0, -2, 0) +
+                                       A_cur(0, 0, +2) + A_cur(0, 0, -2)) +
+                            coef[3] * (A_cur(+3, 0, 0) + A_cur(-3, 0, 0) +
+                                       A_cur(0, +3, 0) + A_cur(0, -3, 0) +
+                                       A_cur(0, 0, +3) + A_cur(0, 0, -3));
+
+                A_next(0, 0, 0) = 2 * A_cur(0, 0, 0) - A_next(0, 0, 0) + 
+                    vsq[index] * div;
+            }
+        }
+    }
+}
+
+
+void loop_stencil_serial(int t0, int t1, 
+                         int x0, int x1,
+                         int y0, int y1,
+                         int z0, int z1,
+                         int Nx, int Ny, int Nz,
+                         const RealType coef[4], 
+                         const RealType vsq[],
+                         RealType Aeven[], RealType Aodd[])
+{
+    for (int t = t0; t < t1; ++t) {
+        if ((t & 1) == 0)
+            stencil_step(x0, x1, y0, y1, z0, z1, Nx, Ny, Nz, coef, vsq, 
+                         Aeven, Aodd);
+        else
+            stencil_step(x0, x1, y0, y1, z0, z1, Nx, Ny, Nz, coef, vsq, 
+                         Aodd, Aeven);
+    }
+}
+
+
+
+void InitData(int Nx, int Ny, int Nz, RealType *A[2], RealType *vsq) {
+    int offset = 0;
+    for (int z = 0; z < Nz; ++z)
+        for (int y = 0; y < Ny; ++y)
+            for (int x = 0; x < Nx; ++x, ++offset) {
+                A[0][offset] = (x < Nx / 2) ? x / RealType(Nx) : y / RealType(Ny);
+                A[1][offset] = 0;
+                vsq[offset] = x*y*z / RealType(Nx * Ny * Nz);
+            }
+}
+
+
+int main(int argc, char *argv[]) {
+    static unsigned int test_iterations=1;
+    int Nx = 256, Ny = 256, Nz = 256;
+    int width = 4;
+
+    if (argc > 1) {
+        if (strncmp(argv[1], "--scale=", 8) == 0) {
+            RealType scale = atof(argv[1] + 8);
+            Nx = Nx * scale;
+            Ny = Ny * scale;
+            Nz = Nz * scale;
+        }
+    }
+    if ((argc == 3)) {
+      test_iterations = atoi(argv[2]);
+    }
+
+    //Allocation and initialisation
+    RealType *Aserial[2];
+    Aserial[0] = new RealType [Nx * Ny * Nz];
+    Aserial[1] = new RealType [Nx * Ny * Nz];
+    RealType *vsq = new RealType [Nx * Ny * Nz];
+
+    RealType coeff[4] = { 0.5, -.25, .125, -.0625 }; 
+
+
+    InitData(Nx, Ny, Nz, Aserial, vsq);
+
+
+
+    double minTimeSerial = 1e30;
+    for (unsigned int i = 0; i < test_iterations; ++i) {
+        reset_and_start_timer();
+	VERROU_START_INSTRUMENTATION;
+        loop_stencil_serial(0, 6, width, Nx-width, width, Ny - width,
+                            width, Nz - width, Nx, Ny, Nz, coeff, vsq,
+                            Aserial[0], Aserial[1]);
+	VERROU_STOP_INSTRUMENTATION;
+        double dt = get_elapsed_sec();
+	printf("@time of serial run:\t\t\t[%.3f] secondes\n", dt);
+	minTimeSerial = std::min(minTimeSerial, dt);	
+    }
+    printf("@mintime of serial run:\t\t\t[%.3f] secondes\n", minTimeSerial);
+
+      
+    // printf("\t\t\t\t(%.2fx speedup from ISPC, %.2fx speedup from ISPC + tasks)\n", 
+    //        minTimeSerial / minTimeISPC, minTimeSerial / minTimeISPCTasks);
+
+    // Check for agreement
+    int offset = 0;
+    RealType norm=0;
+    for (int z = 0; z < Nz; ++z){
+      for (int y = 0; y < Ny; ++y){
+	for (int x = 0; x < Nx; ++x, ++offset) {
+	  RealType value= Aserial[1][offset];
+	  norm += value*value;
+	}
+      }
+    }
+    std::cout << std::setprecision(16)<< "norm: " << sqrt(norm)<<std::endl;
+}
diff --git a/verrou/unitTest/testPerf/study.sh b/verrou/unitTest/testPerf/study.sh
new file mode 100755
index 0000000000000000000000000000000000000000..198ffd99d11f838503e545fa7cc1c9cc9e59aefb
--- /dev/null
+++ b/verrou/unitTest/testPerf/study.sh
@@ -0,0 +1,10 @@
+
+
+PREFIX=" " ./run.sh 2>/dev/null > resNative
+PREFIX="valgrind --tool=verrou " ./run.sh 2>/dev/null >resNearest
+PREFIX="valgrind --tool=verrou  --rounding-mode=random " ./run.sh 2>/dev/null >resRandom
+PREFIX="valgrind --tool=verrou  --rounding-mode=average " ./run.sh 2>/dev/null >resAverage
+
+PREFIX="valgrind --tool=verrou  --rounding-mode=float " ./run.sh 2>/dev/null >resFloat
+
+
diff --git a/verrou/unitTest/testPerf/timing.h b/verrou/unitTest/testPerf/timing.h
new file mode 100644
index 0000000000000000000000000000000000000000..7d7106babe494b19611e295a5282c38b871181cb
--- /dev/null
+++ b/verrou/unitTest/testPerf/timing.h
@@ -0,0 +1,68 @@
+/*
+  Copyright (c) 2010-2011, Intel Corporation
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above copyright
+      notice, this list of conditions and the following disclaimer in the
+      documentation and/or other materials provided with the distribution.
+
+    * Neither the name of Intel Corporation nor the names of its
+      contributors may be used to endorse or promote products derived from
+      this software without specific prior written permission.
+
+
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+   IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+   TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+   PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
+   OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+   EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+   PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+   PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+   LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+   NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+   SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.  
+*/
+
+#include <stdint.h>
+
+#include <sys/time.h>
+
+
+#include <sys/time.h>
+static struct timeval beginTV;
+static struct timeval endTV;
+
+static inline void begin(void)
+{
+  gettimeofday(&beginTV,NULL);
+}
+
+static inline void end(void)
+{
+  gettimeofday(&endTV,NULL);
+}
+
+
+static inline void reset_and_start_timer()
+{
+  begin();
+}
+
+
+
+static inline double get_elapsed_sec()
+{
+  end();
+  double etime =  (double) (endTV.tv_sec - beginTV.tv_sec) +
+    1.e-6* (double) (endTV.tv_usec - beginTV.tv_usec);
+  return etime;
+}
+
diff --git a/verrou/verrou.bib b/verrou/verrou.bib
new file mode 100644
index 0000000000000000000000000000000000000000..778708f62e8058d3a9d75f09c088bac7fe4484ff
--- /dev/null
+++ b/verrou/verrou.bib
@@ -0,0 +1,41 @@
+@inproceedings{fevotte2019,
+  author = {François Févotte and Bruno Lathuilière},
+  title = {Debugging and Optimization of {HPC} Programs with the {Verrou} Tool},
+  booktitle = {International Workshop on Software Correctness for HPC Applications (Correctness)},
+  year = {2019},
+  month = nov,
+  address = {Denver, CO, USA},
+  doi = {10.1109/Correctness49594.2019.00006}
+}
+
+@article{grasland2019,
+  author = {Grasland, Hadrien and F\'evotte, Fran\c{c}ois and Lathuili\`ere, Bruno and Chamont, David},
+  title = {Floating-point profiling of {ACTS} using {V}errou},
+  doi = {10.1051/epjconf/201921405025},
+  journal = {EPJ Web Conf.},
+  year = 2019,
+  volume = 214
+}
+
+@inproceedings{fevotte2017a,
+  author = {F{\'e}votte, Fran{\c{c}}ois and Lathuili{\`e}re, Bruno},
+  title = {Studying the Numerical Quality of an Industrial Computing Code: A Case
+                  Study on code{\_}aster},
+  booktitle = {10th International Workshop on Numerical Software Verification (NSV)},
+  year = 2017,
+  pages = {61--80},
+  address = {Heidelberg, Germany},
+  month = jul,
+  isbn = {978-3-319-63501-9},
+  doi = {10.1007/978-3-319-63501-9_5},
+}
+
+@inproceedings{fevotte2016,
+  author = {François Févotte and Bruno Lathuilière},
+  title = { {VERROU}: a {CESTAC} evaluation without recompilation},
+  booktitle = {International Symposium on Scientific Computing, Computer
+                  Arithmetics and Verified Numerics~(SCAN)},
+  year = 2016,
+  month = sep,
+  address = {Uppsala, Sweden}
+}
diff --git a/verrou/verrou.h b/verrou/verrou.h
new file mode 100644
index 0000000000000000000000000000000000000000..c1a26aeaab9dac1a8a820039b37efac2a40ad95d
--- /dev/null
+++ b/verrou/verrou.h
@@ -0,0 +1,79 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Headers for the public API.                                  ---*/
+/*---                                                     verrou.h ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#ifndef __VERROU_H
+#define __VERROU_H
+
+#include "valgrind.h"
+
+/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
+   This enum comprises an ABI exported by Valgrind to programs
+   which use client requests.  DO NOT CHANGE THE ORDER OF THESE
+   ENTRIES, NOR DELETE ANY -- add new ones at the end.
+ */
+
+typedef
+enum {
+  VR_USERREQ__START_INSTRUMENTATION = VG_USERREQ_TOOL_BASE('V', 'R'),
+  VR_USERREQ__STOP_INSTRUMENTATION,
+  VR_USERREQ__START_DETERMINISTIC,
+  VR_USERREQ__STOP_DETERMINISTIC,
+  VR_USERREQ__DISPLAY_COUNTERS,
+  VR_USERREQ__DUMP_COVER
+} Vg_VerrouClientRequest;
+
+#define VERROU_START_INSTRUMENTATION                                 \
+  VALGRIND_DO_CLIENT_REQUEST_STMT(VR_USERREQ__START_INSTRUMENTATION, \
+                                  0, 0, 0, 0, 0)
+
+#define VERROU_STOP_INSTRUMENTATION                                  \
+  VALGRIND_DO_CLIENT_REQUEST_STMT(VR_USERREQ__STOP_INSTRUMENTATION,  \
+                                  0, 0, 0, 0, 0)
+
+#define VERROU_START_DETERMINISTIC(LEVEL)                            \
+  VALGRIND_DO_CLIENT_REQUEST_STMT(VR_USERREQ__START_DETERMINISTIC,   \
+                                  LEVEL, 0, 0, 0, 0)
+
+#define VERROU_STOP_DETERMINISTIC(LEVEL)                             \
+  VALGRIND_DO_CLIENT_REQUEST_STMT(VR_USERREQ__STOP_DETERMINISTIC,    \
+                                  LEVEL, 0, 0, 0, 0)
+
+#define VERROU_DISPLAY_COUNTERS                                      \
+  VALGRIND_DO_CLIENT_REQUEST_STMT(VR_USERREQ__DISPLAY_COUNTERS,      \
+                                  0, 0, 0, 0, 0)
+
+#define VERROU_DUMP_COVER \
+  (unsigned int)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */,	      \
+					    VR_USERREQ__DUMP_COVER,   \
+					    0, 0, 0, 0, 0)
+
+#endif /* __VERROU_H */
diff --git a/verrou/vr_clo.c b/verrou/vr_clo.c
new file mode 100644
index 0000000000000000000000000000000000000000..36b30233ef2b834cfc0411c3f4e6fb844bf16337
--- /dev/null
+++ b/verrou/vr_clo.c
@@ -0,0 +1,249 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- This file contains code related command-line options.        ---*/
+/*---                                                     vr_clo.c ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#include "vr_main.h"
+//#include "backend_verrou/vr_rand.h"
+#include "backend_verrou/interflop_verrou.h"
+#include "backend_mcaquad/interflop_mcaquad.h"
+
+void vr_env_clo (const HChar* env, const HChar *clo) {
+  HChar* val = VG_(getenv)(env);
+  if (val) {
+    HChar tmp[256];
+    VG_(snprintf)(tmp, 255, "%s=%s", clo, val);
+    if (!vr_process_clo(tmp)) {
+      VG_(umsg)("WARNING: unknown command-line option `%s'\n", tmp);
+    }
+  }
+}
+
+void vr_clo_defaults (void) {
+  vr.backend = vr_verrou;
+  vr.roundingMode = VR_NEAREST;
+  vr.count = True;
+  vr.instr_scalar = False;
+  vr.instrument = VR_INSTR_ON;
+  vr.verbose = False;
+  vr.unsafe_llo_optim = False;
+
+  vr.genExclude = False;
+  vr.exclude = NULL;
+  //  vr.genAbove = NULL;
+
+  vr.genIncludeSource = False;
+  vr.includeSource = NULL;
+  vr.sourceActivated= False;
+
+  vr.genTrace=False;
+  vr.includeTrace = NULL;
+
+  int opIt;
+  for(opIt=0 ; opIt<VR_OP ; opIt++){
+    vr.instr_op[opIt]=False;
+  }
+
+  vr.firstSeed=(unsigned int)(-1);
+  vr.mca_precision_double=53;
+  vr.mca_precision_float=24;
+  vr.mca_mode=MCAMODE_MCA;
+
+  vr.checknan=True;
+
+  vr.checkCancellation=False;
+  vr.cc_threshold_float=18;
+  vr.cc_threshold_double=40;
+
+  vr.dumpCancellation=False;
+  vr.cancellationSource=NULL;
+
+
+}
+
+Bool vr_process_clo (const HChar *arg) {
+  Bool bool_val;
+  const HChar * str;
+  //Option --backend=
+  if      (VG_XACT_CLOM (cloPD, arg, "--backend=verrou",
+                         vr.backend, vr_verrou)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--backend=mcaquad",
+                         vr.backend, vr_mcaquad)) {}
+
+  //Option --rounding-mode=
+  else if (VG_XACT_CLOM (cloPD, arg, "--rounding-mode=random",
+                         vr.roundingMode, VR_RANDOM)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--rounding-mode=average",
+                         vr.roundingMode, VR_AVERAGE)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--rounding-mode=nearest",
+                         vr.roundingMode, VR_NEAREST)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--rounding-mode=upward",
+                         vr.roundingMode, VR_UPWARD)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--rounding-mode=downward",
+                         vr.roundingMode, VR_DOWNWARD)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--rounding-mode=toward_zero",
+                         vr.roundingMode, VR_ZERO)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--rounding-mode=farthest",
+                         vr.roundingMode, VR_FARTHEST)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--rounding-mode=float",
+                         vr.roundingMode, VR_FLOAT)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--rounding-mode=native",
+                         vr.roundingMode, VR_NATIVE)) {}
+
+  //Option mcaquad
+  else if (VG_INT_CLOM  (cloPD, arg, "--mca-precision-double",
+                         vr.mca_precision_double)){}
+  else if (VG_INT_CLOM  (cloPD, arg, "--mca-precision-float",
+                         vr.mca_precision_float)){}
+  else if (VG_XACT_CLOM (cloPD, arg, "--mca-mode=rr",
+                         vr.mca_mode, MCAMODE_RR)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--mca-mode=pb",
+                         vr.mca_mode, MCAMODE_PB)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--mca-mode=mca",
+                         vr.mca_mode, MCAMODE_MCA)) {}
+  else if (VG_XACT_CLOM (cloPD, arg, "--mca-mode=ieee",
+                         vr.mca_mode, MCAMODE_IEEE)) {}
+
+  //Options to choose op to instrument
+  else if (VG_XACT_CLO (arg, "--vr-instr=add",
+                        vr.instr_op[VR_OP_ADD] , True)) {}
+  else if (VG_XACT_CLO (arg, "--vr-instr=sub",
+                        vr.instr_op[VR_OP_SUB] , True)) {}
+  else if (VG_XACT_CLO (arg, "--vr-instr=mul",
+                        vr.instr_op[VR_OP_MUL] , True)) {}
+  else if (VG_XACT_CLO (arg, "--vr-instr=div",
+                        vr.instr_op[VR_OP_DIV] , True)) {}
+  else if (VG_XACT_CLO (arg, "--vr-instr=mAdd",
+                        vr.instr_op[VR_OP_MADD] , True)) {}
+  else if (VG_XACT_CLO (arg, "--vr-instr=mSub",
+                        vr.instr_op[VR_OP_MSUB] , True)) {}
+  else if (VG_XACT_CLO (arg, "--vr-instr=conv",
+                        vr.instr_op[VR_OP_CONV] , True)) {}
+
+  //Option to enable check-cancellation backend
+  else if (VG_BOOL_CLO (arg, "--check-cancellation", bool_val)) {
+     vr.checkCancellation= bool_val;
+  }
+  else if (VG_INT_CLO (arg, "--cc-threshold-double",
+                       vr.cc_threshold_double)){}
+  else if (VG_INT_CLO (arg, "--cc-threshold-float",
+                       vr.cc_threshold_float)){}
+
+  else if (VG_BOOL_CLO (arg, "--check-nan", bool_val)) {
+     vr.checknan= bool_val;
+  }
+
+  //Options to choose op to instrument
+  else if (VG_BOOL_CLO (arg, "--vr-instr-scalar", bool_val)) {
+    vr.instr_scalar= bool_val;
+  }
+
+  //Option --vr-verbose (to avoid verbose of valgrind)
+  else if (VG_BOOL_CLO (arg, "--vr-verbose", bool_val)) {
+    vr.verbose = bool_val;
+  }
+
+  //Option --vr-unsafe-llo-optim (performance optimization)
+  else if (VG_BOOL_CLO (arg, "--vr-unsafe-llo-optim", bool_val)) {
+    vr.unsafe_llo_optim = bool_val;
+  }
+
+  //Option --count-op
+  else if (VG_BOOL_CLO (arg, "--count-op", bool_val)) {
+    vr.count = bool_val;
+  }
+
+  // Instrumentation at start
+  else if (VG_BOOL_CLOM (cloPD, arg, "--instr-atstart", bool_val)) {
+    vr.instrument = bool_val ? VR_INSTR_ON : VR_INSTR_OFF;
+  }
+
+  // Exclusion of specified symbols
+  else if (VG_STR_CLOM (cloPD, arg, "--gen-exclude", str)) {
+    //vr.excludeFile = VG_(strdup)("vr.process_clo.gen-exclude", str);
+    vr.excludeFile = VG_(expand_file_name)("vr.process_clo.gen-exclude", str);
+    vr.genExclude = True;
+  }
+  /* else if (VG_STR_CLOM (cloPD, arg, "--gen-above", str)) { */
+  /*   vr.genAbove = VG_(strdup)("vr.process_clo.gen-above", str); */
+  /* } */
+  else if (VG_STR_CLOM (cloPD, arg, "--exclude", str)) {
+    vr.exclude = vr_loadExcludeList(vr.exclude, str);
+  }
+
+  else if (VG_XACT_CLOM (cloPD, arg, "--gen-trace",
+                         vr.genTrace, True)) {}
+  else if (VG_STR_CLOM  (cloPD, arg, "--trace", str)) {
+    vr.includeTrace = vr_loadIncludeTraceList(vr.includeTrace, str);
+    vr.genTrace = True;
+  }
+
+  // Instrumentation of only specified source lines
+  else if (VG_STR_CLOM (cloPD, arg, "--gen-source", str)) {
+    //vr.includeSourceFile = VG_(strdup)("vr.process_clo.gen-source", str);
+    vr.includeSourceFile = VG_(expand_file_name)("vr.process_clo.gen-source", str);
+    vr.genIncludeSource = True;
+  }
+  else if (VG_STR_CLOM (cloPD, arg, "--source", str)) {
+    vr.includeSource = vr_loadIncludeSourceList(vr.includeSource, str);
+    vr.sourceActivated = True;
+  }
+
+  else if (VG_STR_CLOM (cloPD, arg, "--cc-gen-file", str)) {
+     vr.cancellationDumpFile = VG_(expand_file_name)("vr.process_clo.cc-file", str);
+     vr.dumpCancellation = True;
+  }
+  // Set the pseudo-Random Number Generator
+  else if (VG_STR_CLOM (cloPD, arg, "--vr-seed", str)) {
+    //vr_rand_setSeed (&vr_rand, VG_(strtoull10)(str, NULL));
+    vr.firstSeed=VG_(strtoull10)(str, NULL);
+    if(vr.firstSeed==(unsigned int)(-1)){
+      VG_(tool_panic) ( "--vr-seed=-1 no taken into account\n");
+    }
+  }
+
+  // Unknown option
+  else {
+    return False;
+  }
+
+  return True;
+}
+
+void vr_print_usage (void) {
+  VG_(printf)
+    (
+#include "vr_clo.txt"
+);
+}
+
+void vr_print_debug_usage (void) {
+  vr_print_usage();
+}
diff --git a/verrou/vr_clo.txt b/verrou/vr_clo.txt
new file mode 100644
index 0000000000000000000000000000000000000000..4b9f87033dbd3cae8c0494e91da469f37d92535a
--- /dev/null
+++ b/verrou/vr_clo.txt
@@ -0,0 +1,139 @@
+"    General options\n"
+"      --vr-verbose=<yes|no> [default=no]\n"
+"          Toggle verbosity: prints messages for x387 instructions and client\n"
+"          requests.\n"
+"\n"
+"      --count-op=<yes|no> [default=yes]\n"
+"          Toggle floating-point operations counting.\n"
+"\n"
+"      --backend=<verrou|mcaquad> [default=verrou]\n"
+"          Select the verrou or mcaquad backend.  verrou enables to perform\n"
+"          several rounding mode (See --rounding-mode option).  mcaquad\n"
+"          enables to perform MCA (Monte Carlo Arithmetics) based on extented\n"
+"          precision in quad (See --mca-mode and --mca-precision options). The\n"
+"          integration mcaquad backend in the frontend verrou is still\n"
+"          considered as experimental.\n"
+"\n"
+"    Perturbation of floating-point operations\n"
+"      --rounding-mode=<random|average|upward|downward|toward_zero|farthest|float>\n"
+"      [default=nearest]\n"
+"          Emulate the given rounding mode for operations instrumented with\n"
+"          the verrou backend. If this option is not provided, Verrou always\n"
+"          rounds to the nearest floating-point. Supported rounding modes are:\n"
+"\n"
+"          ·   Random rounding modes: random, average.\n"
+"\n"
+"          ·   IEEE-754 rounding modes: nearest (default), upward, downward,\n"
+"              toward_zero.\n"
+"\n"
+"          ·   Other: farthest, float.\n"
+"\n"
+"      --mca-mode=<mca|rr|pb|ieee> [default=mca]\n"
+"          Emulate the given MCA mode for operations instrumented with the\n"
+"          mcaquad backend. Supported mca modes are:\n"
+"\n"
+"          ·   mca : full mca (default)\n"
+"\n"
+"          ·   rr : random rounding\n"
+"\n"
+"          ·   pb : precision bounding\n"
+"\n"
+"          ·   ieee : ieee (rounding to nearest)\n"
+"\n"
+"          The mcaquad backend implementation come from Verificarlo : More\n"
+"          information on Verificalo github[1]\n"
+"\n"
+"      --mca-precision= [default=53]\n"
+"          Configure the magnitude of inexact function used by mcaquad\n"
+"          backend.\n"
+"\n"
+"      --vr-seed=RNG_SEED [default=automatically generated]\n"
+"          If present, this option allows setting the seed of the\n"
+"          pseudo-Random Number Generator used for the random or average\n"
+"          rounding modes. This helps reproducing the behaviour of a program\n"
+"          under Verrou.\n"
+"\n"
+"          If this option is omitted, the pRNG is seeded with a value based on\n"
+"          the current time and process id, so that it should change at each\n"
+"          execution.\n"
+"\n"
+"      --vr-instr=<add|sub|mul|div|mAdd|mSub|conv> [default=all]\n"
+"          Toggle instrumentation of floating-point additions, subtractions,\n"
+"          multiplications, divisions, fused multiply additions, fused\n"
+"          multiply subtractions, conversions (only double to float cast)\n"
+"          respectively. This option can be set multiple times to instrument\n"
+"          multiple types of operations.\n"
+"\n"
+"          If this option is not provided, all supported operations types are\n"
+"          instrumented.\n"
+"\n"
+"      --vr-instr-scalar=<yes|no> [default=no]\n"
+"          Toggle instrumentation of x387 scalar instructions.\n"
+"\n"
+"    Instrumentation scope\n"
+"      --instr-atstart=<yes|no> [default=yes]\n"
+"          Toggle instrumentation state on or off at program start. Useful in\n"
+"          combination with client requests.\n"
+"\n"
+"      --exclude=FILE\n"
+"          Symbols listed in FILE will be left uninstrumented.\n"
+"\n"
+"      --gen-exclude=FILE\n"
+"          Generate in FILE a list of all symbols (which contain perturbated\n"
+"          floating point instruction) encountered during program execution.\n"
+"          This is useful to build an exclusion list.\n"
+"\n"
+"          In combination with --exclude, only list symbols which were not\n"
+"          already present in the provided exclusion list.\n"
+"\n"
+"          WARNING: in order to generate a correct list, the whole binary\n"
+"          (including symbols listed in the list provided using --exclude)\n"
+"          must be instrumented. When using both --gen-exclude and --exclude,\n"
+"          it is advised to avoid perturbating rounding-modes using\n"
+"          --rounding-mode=nearest.\n"
+"\n"
+"      --source=FILE\n"
+"          When this option is present, only instructions coming from source\n"
+"          code lines listed in FILE are instrumented.\n"
+"\n"
+"      --gen-source=FILE\n"
+"          Generate in FILE the list of all source code lines (which contain\n"
+"          perturbated floating point instruction) encountered during program\n"
+"          execution.\n"
+"\n"
+"          In combination with --source, only list source code lines which\n"
+"          were not already present in the provided list.\n"
+"\n"
+"    Detection options\n"
+"      --check-nan=<yes|no> [default=yes]\n"
+"          Activate NaN detection. NaN produces a valgrind error. This\n"
+"          functionnality requires the verrou backend.\n"
+"\n"
+"      --check-cancellation=<yes|no> [default=no]\n"
+"          Activate cancellation detection. Cancellation produces a valgrind\n"
+"          error. This functionnality is available for the verrou and mcaquad\n"
+"          backends. The level of detected cancellations can be configured\n"
+"          with --cc-threshold-float and --cc-threshold-double.\n"
+"\n"
+"      --cc-gen-file=<FILE>\n"
+"          Generate in FILE with the source format for each code source line\n"
+"          which produces at least one cancellation. This functionnality is\n"
+"          available for verrou and mcaquad backends. The level of detected\n"
+"          cancellations can be configured with --cc-threshold-float and --cc-\n"
+"          threshold-double.\n"
+"\n"
+"      --cc-threshold-float=<integer> [default=24]\n"
+"          Configure the cancellation dectection threshold for float\n"
+"          operations. Default value is still experimental and could have to\n"
+"          change.\n"
+"\n"
+"      --cc-threshold-double=<integer> [default=40]\n"
+"          Configure the cancellation dectection threshold for double\n"
+"          operations. Default value is still experimental and could have to\n"
+"          change.\n"
+"\n"
+"    Performance optimization\n"
+"      --vr-unsafe-llo-optim=<yes|no> [default=no]\n"
+"          Activate faster instrumentation process but unsafe when binary\n"
+"          mixes llo and vect instructions.\n"
+"\n"
diff --git a/verrou/vr_clreq.c b/verrou/vr_clreq.c
new file mode 100644
index 0000000000000000000000000000000000000000..cb39d4a2646919adf7c70cea9e0ab8a66c71fe8f
--- /dev/null
+++ b/verrou/vr_clreq.c
@@ -0,0 +1,249 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- This file contains code related to client requests handling. ---*/
+/*---                                                   vr_clreq.c ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#include "vr_main.h"
+#include "pub_tool_transtab.h"       // VG_(discard_translations_safely)
+// * Start-stop instrumentation
+
+void vr_set_instrument_state (const HChar* reason, Vr_Instr state, Bool discard) {
+  if (vr.instrument == state) {
+    if(vr.verbose){
+      VG_(message)(Vg_DebugMsg,"%s: instrumentation already %s\n",
+		   reason, (state==VR_INSTR_ON) ? "ON" : "OFF");
+    }
+
+    return;
+  }
+
+  vr.instrument = state;
+
+  if(discard){
+     VG_(discard_translations_safely)( (Addr)0x1000, ~(SizeT)0xfff, "verrou");
+  }
+     /* if(vr.instrument == VR_INSTR_ON){ */
+  /*   verrou_begin_instr(); */
+  /* }else{ */
+  /*   verrou_end_instr(); */
+  /* } */
+
+  if(vr.verbose){
+    VG_(message)(Vg_DebugMsg, "%s: instrumentation switched %s\n",
+		 reason, (state==VR_INSTR_ON) ? "ON" : "OFF");
+  }
+}
+
+// * Enter/leave deterministic section
+
+static void vr_deterministic_section_name (unsigned int level,
+                                           HChar * name,
+                                           unsigned int len)
+{
+  Addr ips[8];
+  const HChar* fnname;
+  const HChar* filename;
+  UInt  linenum;
+  Addr  addr;
+  DiEpoch de = VG_(current_DiEpoch)();
+
+  VG_(get_StackTrace)(VG_(get_running_tid)(),
+                      ips, 8,
+                      NULL, NULL,
+                      0);
+  addr = ips[level];
+
+  //fnname[0] = 0;
+  VG_(get_fnname)(de, addr, &fnname);
+
+  //  filename[0] = 0;
+  VG_(get_filename_linenum)(de,
+                            addr,
+                            &filename,
+                            NULL,
+                            &linenum);
+  VG_(snprintf)(name, len,
+                "%s (%s:%u)", fnname, filename, linenum);
+}
+
+static unsigned int vr_deterministic_section_hash (HChar const*const name)
+{
+  unsigned int hash = VG_(getpid)();
+  int i = 0;
+  while (name[i] != 0) {
+    hash += i * name[i];
+    ++i;
+  }
+  return hash;
+}
+
+static void vr_start_deterministic_section (unsigned int level) {
+  HChar name[256];
+  unsigned int hash;
+
+  vr_deterministic_section_name (level, name, 256);
+
+  hash = vr_deterministic_section_hash (name);
+  verrou_set_seed (hash);
+#ifdef USE_VERROU_QUAD
+  mcaquad_set_seed (hash);
+#endif
+  VG_(message)(Vg_DebugMsg, "Entering deterministic section %u: %s\n",
+               hash, name);
+}
+
+static void vr_stop_deterministic_section (unsigned int level) {
+  HChar name[256];
+  vr_deterministic_section_name (level, name, 256);
+
+  VG_(message)(Vg_DebugMsg, "Leaving deterministic section: %s\n",
+               name);
+  verrou_set_random_seed ();
+#ifdef USE_VERROU_QUAD 
+  mcaquad_set_random_seed ();
+#endif
+}
+
+
+
+
+
+// * Handle client requests
+
+// ** GDB monitor commands
+
+static void vr_handle_monitor_instrumentation_print (void) {
+  VG_(gdb_printf) ("instrumentation: %s\n",
+                   vr.instrument==VR_INSTR_ON ? "on" : "off");
+}
+
+static Bool vr_handle_monitor_instrumentation (HChar ** ssaveptr) {
+  HChar * arg = VG_(strtok_r)(0, " ", ssaveptr);
+
+  if (!arg) { /* no argument */
+    vr_handle_monitor_instrumentation_print();
+    return True;
+  }
+
+  switch (VG_(keyword_id) ("on off", arg, kwd_report_duplicated_matches)) {
+  case -2: /* multiple matches */
+    return True;
+  case -1: /* not found */
+    return False;
+  case 0: /* on */
+     vr_set_instrument_state("Monitor", VR_INSTR_ON, True);
+    vr_handle_monitor_instrumentation_print();
+    return True;
+  case 1: /* off */
+     vr_set_instrument_state("Monitor", VR_INSTR_OFF, True);
+    vr_handle_monitor_instrumentation_print();
+    return True;
+  }
+  return False;
+}
+
+static Bool vr_handle_monitor_help (void) {
+  VG_(gdb_printf)("\n");
+  VG_(gdb_printf)("verrou monitor commands:\n");
+  VG_(gdb_printf)("  help                     : print this help\n");
+  VG_(gdb_printf)("  count                    : print instruction counters\n");
+  VG_(gdb_printf)("  instrumentation          : get instrumentation state\n");
+  VG_(gdb_printf)("  instrumentation [on|off] : set instrumentation state\n");
+  VG_(gdb_printf)("\n");
+  return True;
+}
+
+static Bool vr_handle_monitor_command (HChar * req) {
+    HChar * wcmd;
+    HChar s[VG_(strlen(req)) + 1];
+    HChar *ssaveptr;
+
+    VG_(strcpy)(s, req);
+
+    wcmd = VG_(strtok_r)(s, " ", &ssaveptr);
+    switch (VG_(keyword_id) ("help instrumentation count",
+                             wcmd, kwd_report_duplicated_matches)) {
+    case -2: /* multiple matches */
+      return True;
+    case -1: /* not found */
+      return False;
+    case 0: /* help */
+      return vr_handle_monitor_help ();
+    case 1: /* instrumentation */
+      return vr_handle_monitor_instrumentation (&ssaveptr);
+    case 2: /* count */
+      vr_ppOpCount();
+      return True;
+    }
+    return False;
+}
+
+// ** Client requests entry point
+
+Bool vr_handle_client_request (ThreadId tid, UWord *args, UWord *ret) {
+  if (!VG_IS_TOOL_USERREQ('V','R', args[0])
+      && VG_USERREQ__GDB_MONITOR_COMMAND != args[0])
+    return False;
+
+  switch (args[0]) {
+  case VR_USERREQ__START_INSTRUMENTATION:
+     vr_set_instrument_state ("Client Request", True, True);
+    *ret = 0; /* meaningless */
+    break;
+  case VR_USERREQ__STOP_INSTRUMENTATION:
+     vr_set_instrument_state ("Client Request", False, True);
+    *ret = 0; /* meaningless */
+    break;
+  case VR_USERREQ__START_DETERMINISTIC:
+    vr_start_deterministic_section (args[1]);
+    *ret = 0; /* meaningless */
+    break;
+  case VR_USERREQ__STOP_DETERMINISTIC:
+    vr_stop_deterministic_section (args[1]);
+    *ret = 0; /* meaningless */
+    break;
+  case VR_USERREQ__DISPLAY_COUNTERS:
+    vr_ppOpCount();
+    *ret = 0; /* meaningless */
+    break;
+  case VR_USERREQ__DUMP_COVER:
+    *ret=vr_traceBB_dumpCov();
+    break;
+  case VG_USERREQ__GDB_MONITOR_COMMAND:
+    if (vr_handle_monitor_command((HChar*)args[1])) {
+      *ret = 1;
+      return True;
+    } else {
+      *ret = 0;
+      return False;
+    }
+  }
+  return True;
+}
diff --git a/verrou/vr_error.c b/verrou/vr_error.c
new file mode 100644
index 0000000000000000000000000000000000000000..87a42efeb5e7d64d71d3f745962333a52be9460e
--- /dev/null
+++ b/verrou/vr_error.c
@@ -0,0 +1,235 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- This file contains code related errors handling.             ---*/
+/*---                                                   vr_error.c ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#include "vr_main.h"
+
+typedef struct Vr_InstrError_ Vr_InstrError;
+struct Vr_InstrError_ {
+  IROp op;
+};
+
+typedef union Vr_Error_ Vr_Error;
+union Vr_Error_ {
+  Vr_InstrError instr;
+};
+
+
+static const HChar* vr_error_name (Vr_ErrorKind kind) {
+  switch (kind) {
+  case VR_ERROR_UNCOUNTED:
+    return "Uncounted operation";
+  case VR_ERROR_SCALAR:
+    return "Scalar instruction";
+  case VR_ERROR_NAN:
+    return "NaN";
+  case VR_ERROR_CC:
+    return "Cancellation";
+
+  default:
+    return NULL;
+  }
+}
+
+
+// * Errors at the instruction level
+
+void vr_maybe_record_ErrorOp (Vr_ErrorKind kind, IROp op) {
+  ThreadId tid = VG_(get_running_tid)();
+  Addr addr;
+  VG_(get_StackTrace)(tid, &addr, 1, NULL, NULL, 0);
+
+  HChar string[10];
+  VG_(snprintf)(string, 10, "%u", op);
+
+  Vr_Error extra;
+  extra.instr.op = op;
+  VG_(maybe_record_error)(tid,
+                          kind,
+                          addr,
+                          string,
+                          &extra);
+}
+
+static void vr_pp_ErrorOp (const Error* err) {
+  Vr_Error *extra = VG_(get_error_extra)(err);
+
+  VG_(umsg)("%s: ", vr_get_error_name(err));
+  VG_(message_flush)();
+  ppIROp (extra->instr.op);
+  VG_(printf)(" (%s)", VG_(get_error_string)(err));
+  VG_(umsg)("\n");
+  VG_(pp_ExeContext)(VG_(get_error_where)(err));
+}
+
+
+// * Errors happening at run time
+
+void vr_maybe_record_ErrorRt (Vr_ErrorKind kind) {
+  ThreadId tid = VG_(get_running_tid)();
+  Addr addr;
+  VG_(get_StackTrace)(tid, &addr, 1, NULL, NULL, 0);
+
+  HChar string[1];
+  string[0] = 0;
+
+  VG_(maybe_record_error)(tid,
+                          kind,
+                          addr,
+                          string,
+                          NULL);
+}
+
+void vr_handle_NaN () {
+   if(vr.checknan){
+      vr_maybe_record_ErrorRt(VR_ERROR_NAN);
+   }
+}
+void vr_handle_CC (int unused) {
+   ThreadId tid = VG_(get_running_tid)();
+   Addr addr;
+   VG_(get_StackTrace)(tid, &addr, 1, NULL, NULL, 0);
+
+   if(vr.dumpCancellation){
+      DiEpoch di=VG_(current_DiEpoch)();
+      const HChar* fileName;
+      const HChar* dirName;
+      const HChar* symName;
+      UInt lineNum;
+      //UInt errorName=
+      VG_(get_filename_linenum)(di,addr,
+                                &fileName,
+                                &dirName,
+                                &lineNum );
+      VG_(get_fnname)(di, addr, &symName);
+//      VG_(umsg)("test ? %s - %s : %u   --> %u \n", symName,fileName, lineNum,errorName);
+      vr_includeSource_generate (&vr.cancellationSource , symName, fileName, lineNum);
+   }
+
+   if(vr.checkCancellation){
+      HChar string[1];
+      string[0] = 0;
+      VG_(maybe_record_error)(tid,
+                              VR_ERROR_CC,
+                              addr,
+                              string,
+                              NULL);
+   }
+}
+
+
+static void vr_pp_ErrorRt (const Error* err) {
+  VG_(umsg)("%s: ", vr_get_error_name(err));
+  VG_(message_flush)();
+  VG_(umsg)("\n");
+  VG_(pp_ExeContext)(VG_(get_error_where)(err));
+}
+
+
+// * Standard tools interface
+
+const HChar* vr_get_error_name (const Error* err) {
+  return vr_error_name (VG_(get_error_kind)(err));
+}
+
+Bool vr_recognised_suppression (const HChar* name, Supp* su) {
+  Vr_ErrorKind kind;
+  for (kind = 0 ; kind < VR_ERROR ; ++kind) {
+    const HChar* errorName = vr_error_name(kind);
+    if (errorName && VG_(strcmp)(name, errorName) == 0)
+      break;
+  }
+
+  if (kind == VR_ERROR) {
+    return False;
+  } else {
+    VG_(set_supp_kind)(su, 0);
+    return True;
+  }
+}
+
+void vr_before_pp_Error (const Error* err) {}
+
+void vr_pp_Error (const Error* err) {
+  switch (VG_(get_error_kind)(err)) {
+  case VR_ERROR_UNCOUNTED:
+  case VR_ERROR_SCALAR:
+    vr_pp_ErrorOp (err);
+    break;
+  case VR_ERROR_NAN:
+    vr_pp_ErrorRt (err);
+    break;
+  case VR_ERROR_CC:
+     vr_pp_ErrorRt (err);
+    break;
+  }
+}
+
+Bool vr_eq_Error (VgRes res, const Error* e1, const Error* e2) {
+  return VG_(get_error_address)(e1) == VG_(get_error_address)(e2);
+}
+
+UInt vr_update_extra (const Error* err) {
+  return sizeof(Vr_Error);
+}
+
+Bool vr_error_matches_suppression (const Error* err, const Supp* su) {
+  if (VG_(get_error_kind)(err) != VG_(get_supp_kind)(su)) {
+    return False;
+  }
+
+  if (VG_(strcmp)(VG_(get_error_string)(err), VG_(get_supp_string)(su)) != 0) {
+    return False;
+  }
+
+  return True;
+}
+
+Bool vr_read_extra_suppression_info (Int fd, HChar** bufpp, SizeT* nBufp,
+                                     Int* lineno, Supp* su) {
+  VG_(get_line)(fd, bufpp, nBufp, lineno);
+  VG_(set_supp_string)(su, VG_(strdup)("vr.resi.1", *bufpp));
+  return True;
+}
+
+SizeT vr_print_extra_suppression_info (const Error* err,
+                                      /*OUT*/HChar* buf, Int nBuf) {
+  HChar* res=VG_(strncpy)(buf, VG_(get_error_string)(err), nBuf);
+  SizeT len=VG_(strlen)(res);
+  return len ;
+}
+
+SizeT vr_print_extra_suppression_use (const Supp* su,
+                                     /*OUT*/HChar* buf, Int nBuf) {
+  return (SizeT)0; //False
+}
+
+void vr_update_extra_suppression_use (const Error* err, const Supp* su) {}
diff --git a/verrou/vr_exclude.c b/verrou/vr_exclude.c
new file mode 100644
index 0000000000000000000000000000000000000000..651ec05ed6784bd87c9693ef57f412ddf09b405a
--- /dev/null
+++ b/verrou/vr_exclude.c
@@ -0,0 +1,326 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- This file contains code allowing to exclude some symbols     ---*/
+/*--- from the instrumentation.                                    ---*/
+/*---                                                 vr_exclude.c ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#include "vr_main.h"
+
+#define LINE_SIZEMAX VR_FNNAME_BUFSIZE
+
+#define UNAMED_FUNCTION_VERROU "unamed_function_verrou"
+#define UNAMED_OBJECT_VERROU "unamed_object_verrou"
+
+static Vr_Exclude* vr_addExclude (Vr_Exclude* list, const HChar * fnname, const HChar * objname) {
+  Vr_Exclude * cell = VG_(malloc)("vr.addExclude.1", sizeof(Vr_Exclude));
+  cell->fnname  = VG_(strdup)("vr.addExclude.2", fnname);
+  cell->objname = VG_(strdup)("vr.addExclude.3", objname);
+  cell->used    = False;
+  cell->next    = list;
+  return cell;
+}
+
+static Vr_Exclude *
+vr_findExclude (Vr_Exclude* list, const HChar * fnname, const HChar * objname) {
+  Vr_Exclude * exclude;
+  for (exclude = list ; exclude != NULL ; exclude = exclude->next) {
+    if (exclude->fnname[0] != '*'
+	&& VG_(strcmp)(exclude->fnname, fnname) != 0)
+      continue;
+
+    if (exclude->objname[0] != '*'
+	&& VG_(strcmp)(exclude->objname, objname) != 0)
+      continue;
+
+    return exclude;
+  }
+
+  return NULL;
+}
+
+void vr_freeExcludeList (Vr_Exclude* list) {
+  while (list != NULL) {
+    Vr_Exclude *next = list->next;
+    VG_(free)(list->fnname);
+    VG_(free)(list->objname);
+    VG_(free)(list);
+    list = next;
+  }
+}
+
+void
+vr_dumpExcludeList (Vr_Exclude* list, Vr_Exclude* end, const HChar* fname) {
+  Int fd = VG_(fd_open)(fname,
+			VKI_O_CREAT|VKI_O_TRUNC|VKI_O_WRONLY,
+			VKI_S_IRUSR|VKI_S_IWUSR|VKI_S_IRGRP|VKI_S_IWGRP);
+  VG_(umsg)("Dumping exclusions list to `%s'... ", fname);
+  if (fd == -1) {
+    VG_(umsg)("ERROR!\n");
+    return;
+  }
+
+  Vr_Exclude * exclude;
+  for (exclude = list ; exclude != end ; exclude = exclude->next) {
+    VG_(write)(fd, exclude->fnname, VG_(strlen)(exclude->fnname));
+    VG_(write)(fd, "\t", 1);
+    VG_(write)(fd, exclude->objname, VG_(strlen)(exclude->objname));
+    VG_(write)(fd, "\n", 1);
+  }
+  VG_(close)(fd);
+
+  VG_(umsg)("OK.\n");
+}
+
+Vr_Exclude * vr_loadExcludeList (Vr_Exclude * list, const HChar * fname) {
+  VG_(umsg)("Loading exclusions list from `%s'... ", fname);
+  Int fd = VG_(fd_open)(fname,VKI_O_RDONLY, 0);
+  if (fd == -1) {
+    VG_(umsg)("ERROR (open)\n");
+    return list;
+  }
+
+  SizeT nLine = LINE_SIZEMAX;
+  HChar *line = VG_(malloc)("vr.loadExcludes.1", nLine*sizeof(HChar));
+  Int lineno = 0;
+
+  while (! VG_(get_line)(fd, &line, &nLine, &lineno)) {
+    HChar * c;
+
+    // Skip non-blank characters
+    for (c = line;
+	 c<line+LINE_SIZEMAX && *c != 0 && *c != '\t' && *c != ' ';
+	 ++c) {}
+    if (*c == 0 || c>line+LINE_SIZEMAX-1) {
+      VG_(umsg)("ERROR (parse) :%s \n",line);
+      return list;
+    }
+    *c = 0;
+
+    // Skip blank characters
+    for (++c;
+	 c<line+LINE_SIZEMAX && *c != 0 && (*c == '\t' || *c == ' ');
+	 ++c) {}
+
+    list = vr_addExclude (list,
+			  line, /*fnname=*/
+			  c);/*objname*/
+  }
+
+  VG_(free)(line);
+  VG_(close)(fd);
+
+  VG_(umsg)("OK.\n");
+
+  return list;
+}
+
+Bool vr_excludeIRSB (const HChar** fnnamePtr, const HChar **objnamePtr) {
+  // Never exclude anything when generating the list
+  if (vr.genExclude)
+    return False;
+
+  // Never exclude functions / objects unless they are explicitly listed
+  Vr_Exclude *exclude = vr_findExclude (vr.exclude, *fnnamePtr, *objnamePtr);
+  if (exclude == NULL) {
+    return False;
+  }
+
+
+
+  // Inform the first time a rule is used
+  if (!exclude->used) {
+    VG_(umsg)("Using exclusion rule: %s\t%s\n", exclude->fnname, exclude->objname);
+    exclude->used = True;
+  }
+
+  return True;
+}
+
+
+void
+vr_excludeIRSB_generate (const HChar** fnnamePtr, const HChar **objnamePtr) {
+
+  // Never exclude functions / objects unless they are explicitly listed
+  Vr_Exclude *exclude = vr_findExclude (vr.exclude, *fnnamePtr, *objnamePtr);
+  if(exclude==NULL){
+    vr.exclude = vr_addExclude (vr.exclude, *fnnamePtr, *objnamePtr);
+  }
+}
+
+
+
+static Vr_IncludeSource*
+vr_addIncludeSource (Vr_IncludeSource* list, const HChar* fnname,
+		     const HChar * filename, UInt linenum) {
+  Vr_IncludeSource * cell = VG_(malloc)("vr.addIncludeSource.1", sizeof(Vr_IncludeSource));
+  cell->fnname   = VG_(strdup)("vr.addIncludeSource.2", fnname);
+  cell->filename = VG_(strdup)("vr.addIncludeSource.3", filename);
+  cell->linenum  = linenum;
+  cell->next     = list;
+  return cell;
+}
+
+static Vr_IncludeSource *
+vr_findIncludeSource (Vr_IncludeSource* list,
+		      const HChar* fnname,
+		      const HChar * filename, UInt linenum) {
+  Vr_IncludeSource * cell;
+  for (cell = list ; cell != NULL ; cell = cell->next) {
+    if (cell->linenum != linenum)
+      continue;
+
+    if (VG_(strcmp)(cell->filename, filename) != 0)
+      continue;
+
+    if (VG_(strcmp)(cell->fnname, fnname) != 0)
+      continue;
+
+    return cell;
+  }
+
+  return NULL;
+}
+
+void vr_freeIncludeSourceList (Vr_IncludeSource* list) {
+  while (list != NULL) {
+    Vr_IncludeSource *next = list->next;
+    VG_(free)(list->fnname);
+    VG_(free)(list->filename);
+    VG_(free)(list);
+    list = next;
+  }
+}
+
+void vr_dumpIncludeSourceList (Vr_IncludeSource * list, Vr_IncludeSource* end,
+			       const HChar * fname) {
+  Int fd = VG_(fd_open)(fname,
+			VKI_O_CREAT|VKI_O_TRUNC|VKI_O_WRONLY,
+			VKI_S_IRUSR|VKI_S_IWUSR|VKI_S_IRGRP|VKI_S_IWGRP);
+  VG_(umsg)("Dumping list of included sources to `%s'... ", fname);
+  if (fd == -1) {
+    VG_(umsg)("ERROR!\n");
+    return;
+  }
+
+  HChar linenum_[256];
+  Vr_IncludeSource * cell;
+  for (cell = list ; cell != end ; cell = cell->next) {
+    VG_(write)(fd, cell->filename, VG_(strlen)(cell->filename));
+    VG_(write)(fd, "\t", 1);
+    VG_(snprintf)(linenum_, 255, "%u", cell->linenum);
+    VG_(write)(fd, linenum_, VG_(strlen)(linenum_));
+    VG_(write)(fd, "\t", 1);
+    VG_(write)(fd, cell->fnname, VG_(strlen)(cell->fnname));
+    VG_(write)(fd, "\n", 1);
+  }
+  VG_(close)(fd);
+
+  VG_(umsg)("OK.\n");
+}
+
+Vr_IncludeSource *
+vr_loadIncludeSourceList (Vr_IncludeSource * list, const HChar * fname) {
+  VG_(umsg)("Loading list of included sources from `%s'... ", fname);
+  Int fd = VG_(fd_open)(fname,VKI_O_RDONLY, 0);
+  if (fd == -1) {
+    VG_(umsg)("ERROR (open)\n");
+    return list;
+  }
+
+  SizeT nLine = LINE_SIZEMAX ;
+  HChar *line = VG_(malloc)("vr.loadIncludeSources.1", nLine*sizeof(HChar));
+  Int lineno = 0;
+
+  while (! VG_(get_line)(fd, &line, &nLine, &lineno)) {
+    HChar * c;
+
+    HChar* filename = line;
+    // Skip non-blank characters
+    for (c = line;
+	 c<line+LINE_SIZEMAX && *c != 0 && *c != '\t' && *c != ' ';
+	 ++c) {}
+    if (*c == 0 || c>line+LINE_SIZEMAX-1) {
+      VG_(umsg)("ERROR (parse1) : %s\n",line);
+      return list;
+    }
+    *c = 0;
+
+    // Skip blank characters
+    for (++c;
+	 c<line+LINE_SIZEMAX && *c != 0 && (*c == '\t' || *c == ' ');
+	 ++c) {}
+    HChar* linenum_ = c;
+    // Skip non-blank characters
+    for (;
+	 c<line+LINE_SIZEMAX && *c != 0 && *c != '\t' && *c != ' ';
+	 ++c) {}
+    if (c>line+LINE_SIZEMAX-1) {
+      VG_(umsg)("ERROR (parse2) : %s\n",line);
+      return list;
+    }
+    if (*c==0) {
+      c = line + LINE_SIZEMAX;
+    } else {
+      *c = 0;
+      ++c;
+    }
+    UInt linenum = VG_(strtoull10)(linenum_,NULL);
+
+    // Skip blank characters
+    for (;
+	 c<line+LINE_SIZEMAX && *c != 0 && (*c == '\t' || *c == ' ');
+	 ++c) {}
+    HChar* fnname = c;
+
+    list = vr_addIncludeSource (list,fnname,filename,linenum);
+  }
+
+  VG_(free)(line);
+  VG_(close)(fd);
+
+  VG_(umsg)("OK.\n");
+
+  return list;
+}
+
+void
+vr_includeSource_generate (Vr_IncludeSource** list,
+			   const HChar* fnname,
+			   const HChar* filename, UInt linenum){
+  if (vr_findIncludeSource(*list, fnname, filename, linenum) == NULL) {
+    *list = vr_addIncludeSource (*list, fnname, filename, linenum);
+  }
+}
+
+Bool
+vr_includeSource (Vr_IncludeSource** list,
+		  const HChar* fnname, const HChar* filename, UInt linenum) {
+  return vr_findIncludeSource(*list, fnname, filename, linenum) != NULL;
+}
diff --git a/verrou/vr_generated_from_templates.h b/verrou/vr_generated_from_templates.h
new file mode 100644
index 0000000000000000000000000000000000000000..03fa9f344b36f87d687fef4f2e840508ba821334
--- /dev/null
+++ b/verrou/vr_generated_from_templates.h
@@ -0,0 +1,1073 @@
+//Generated by './generateBackendInterOperator.py'
+// generation of operation cast backend verrou
+
+
+static VG_REGPARM(3) Int vr_verroucast64FTo32F (Long a) {
+  double *arg1 = (double*)(&a);
+  float res;
+  interflop_verrou_cast_double_to_float(*arg1, &res,backend_verrou_context);
+  Int *d = (Int*)(&res);
+  return *d;
+}
+// generation of operation cast backend mcaquad
+
+
+static VG_REGPARM(3) Int vr_mcaquadcast64FTo32F (Long a) {
+  double *arg1 = (double*)(&a);
+  float res;
+  interflop_mcaquad_cast_double_to_float(*arg1, &res,backend_mcaquad_context);
+  Int *d = (Int*)(&res);
+  return *d;
+}
+// generation of operation add backend verrou
+
+
+static VG_REGPARM(2) Long vr_verrouadd64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_verrou_add_double(*arg1, *arg2, &res, backend_verrou_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verrouadd64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_verrou_add_double(arg1[0], arg2[0], res, backend_verrou_context);
+  interflop_verrou_add_double(arg1[1], arg2[1], res+1, backend_verrou_context);
+}
+
+static VG_REGPARM(3) void vr_verrouadd64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_verrou_add_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_verrouadd32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_verrou_add_float(*arg1, *arg2, &res, backend_verrou_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verrouadd32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_verrou_add_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_verrouadd32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_verrou_add_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+
+// generation of operation sub backend verrou
+
+
+static VG_REGPARM(2) Long vr_verrousub64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_verrou_sub_double(*arg1, *arg2, &res, backend_verrou_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verrousub64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_verrou_sub_double(arg1[0], arg2[0], res, backend_verrou_context);
+  interflop_verrou_sub_double(arg1[1], arg2[1], res+1, backend_verrou_context);
+}
+
+static VG_REGPARM(3) void vr_verrousub64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_verrou_sub_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_verrousub32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_verrou_sub_float(*arg1, *arg2, &res, backend_verrou_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verrousub32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_verrou_sub_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_verrousub32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_verrou_sub_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+
+// generation of operation mul backend verrou
+
+
+static VG_REGPARM(2) Long vr_verroumul64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_verrou_mul_double(*arg1, *arg2, &res, backend_verrou_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verroumul64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_verrou_mul_double(arg1[0], arg2[0], res, backend_verrou_context);
+  interflop_verrou_mul_double(arg1[1], arg2[1], res+1, backend_verrou_context);
+}
+
+static VG_REGPARM(3) void vr_verroumul64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_verrou_mul_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_verroumul32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_verrou_mul_float(*arg1, *arg2, &res, backend_verrou_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verroumul32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_verrou_mul_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_verroumul32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_verrou_mul_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+
+// generation of operation div backend verrou
+
+
+static VG_REGPARM(2) Long vr_verroudiv64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_verrou_div_double(*arg1, *arg2, &res, backend_verrou_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verroudiv64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_verrou_div_double(arg1[0], arg2[0], res, backend_verrou_context);
+  interflop_verrou_div_double(arg1[1], arg2[1], res+1, backend_verrou_context);
+}
+
+static VG_REGPARM(3) void vr_verroudiv64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_verrou_div_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_verroudiv32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_verrou_div_float(*arg1, *arg2, &res, backend_verrou_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verroudiv32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_verrou_div_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_verroudiv32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_verrou_div_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+  }
+}
+
+
+// generation of operation add backend mcaquad
+
+
+static VG_REGPARM(2) Long vr_mcaquadadd64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_mcaquad_add_double(*arg1, *arg2, &res, backend_mcaquad_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadadd64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_mcaquad_add_double(arg1[0], arg2[0], res, backend_mcaquad_context);
+  interflop_mcaquad_add_double(arg1[1], arg2[1], res+1, backend_mcaquad_context);
+}
+
+static VG_REGPARM(3) void vr_mcaquadadd64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_mcaquad_add_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_mcaquadadd32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_mcaquad_add_float(*arg1, *arg2, &res, backend_mcaquad_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadadd32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_mcaquad_add_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_mcaquadadd32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_mcaquad_add_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+
+// generation of operation sub backend mcaquad
+
+
+static VG_REGPARM(2) Long vr_mcaquadsub64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_mcaquad_sub_double(*arg1, *arg2, &res, backend_mcaquad_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadsub64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_mcaquad_sub_double(arg1[0], arg2[0], res, backend_mcaquad_context);
+  interflop_mcaquad_sub_double(arg1[1], arg2[1], res+1, backend_mcaquad_context);
+}
+
+static VG_REGPARM(3) void vr_mcaquadsub64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_mcaquad_sub_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_mcaquadsub32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_mcaquad_sub_float(*arg1, *arg2, &res, backend_mcaquad_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadsub32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_mcaquad_sub_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_mcaquadsub32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_mcaquad_sub_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+
+// generation of operation mul backend mcaquad
+
+
+static VG_REGPARM(2) Long vr_mcaquadmul64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_mcaquad_mul_double(*arg1, *arg2, &res, backend_mcaquad_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadmul64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_mcaquad_mul_double(arg1[0], arg2[0], res, backend_mcaquad_context);
+  interflop_mcaquad_mul_double(arg1[1], arg2[1], res+1, backend_mcaquad_context);
+}
+
+static VG_REGPARM(3) void vr_mcaquadmul64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_mcaquad_mul_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_mcaquadmul32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_mcaquad_mul_float(*arg1, *arg2, &res, backend_mcaquad_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadmul32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_mcaquad_mul_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_mcaquadmul32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_mcaquad_mul_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+
+// generation of operation div backend mcaquad
+
+
+static VG_REGPARM(2) Long vr_mcaquaddiv64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_mcaquad_div_double(*arg1, *arg2, &res, backend_mcaquad_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquaddiv64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_mcaquad_div_double(arg1[0], arg2[0], res, backend_mcaquad_context);
+  interflop_mcaquad_div_double(arg1[1], arg2[1], res+1, backend_mcaquad_context);
+}
+
+static VG_REGPARM(3) void vr_mcaquaddiv64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_mcaquad_div_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_mcaquaddiv32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_mcaquad_div_float(*arg1, *arg2, &res, backend_mcaquad_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquaddiv32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_mcaquad_div_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_mcaquaddiv32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_mcaquad_div_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+  }
+}
+
+
+// generation of operation add backend verrou
+
+
+static VG_REGPARM(2) Long vr_verroucheckcancellationadd64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_verrou_add_double(*arg1, *arg2, &res, backend_verrou_context);
+  interflop_checkcancellation_add_double(*arg1, *arg2, &res, backend_checkcancellation_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verroucheckcancellationadd64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_verrou_add_double(arg1[0], arg2[0], res, backend_verrou_context);
+  interflop_checkcancellation_add_double(arg1[0], arg2[0], res, backend_checkcancellation_context);
+  interflop_verrou_add_double(arg1[1], arg2[1], res+1, backend_verrou_context);
+  interflop_checkcancellation_add_double(arg1[1], arg2[1], res+1, backend_checkcancellation_context);
+}
+
+static VG_REGPARM(3) void vr_verroucheckcancellationadd64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_verrou_add_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_verrou_context);
+     interflop_checkcancellation_add_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_verroucheckcancellationadd32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_verrou_add_float(*arg1, *arg2, &res, backend_verrou_context);
+  interflop_checkcancellation_add_float(*arg1, *arg2, &res, backend_checkcancellation_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verroucheckcancellationadd32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_verrou_add_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+     interflop_checkcancellation_add_float(arg1[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_verroucheckcancellationadd32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_verrou_add_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+     interflop_checkcancellation_add_float(arg1[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+
+// generation of operation sub backend verrou
+
+
+static VG_REGPARM(2) Long vr_verroucheckcancellationsub64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_verrou_sub_double(*arg1, *arg2, &res, backend_verrou_context);
+  interflop_checkcancellation_sub_double(*arg1, *arg2, &res, backend_checkcancellation_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verroucheckcancellationsub64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_verrou_sub_double(arg1[0], arg2[0], res, backend_verrou_context);
+  interflop_checkcancellation_sub_double(arg1[0], arg2[0], res, backend_checkcancellation_context);
+  interflop_verrou_sub_double(arg1[1], arg2[1], res+1, backend_verrou_context);
+  interflop_checkcancellation_sub_double(arg1[1], arg2[1], res+1, backend_checkcancellation_context);
+}
+
+static VG_REGPARM(3) void vr_verroucheckcancellationsub64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_verrou_sub_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_verrou_context);
+     interflop_checkcancellation_sub_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_verroucheckcancellationsub32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_verrou_sub_float(*arg1, *arg2, &res, backend_verrou_context);
+  interflop_checkcancellation_sub_float(*arg1, *arg2, &res, backend_checkcancellation_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_verroucheckcancellationsub32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_verrou_sub_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+     interflop_checkcancellation_sub_float(arg1[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_verroucheckcancellationsub32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_verrou_sub_float(arg1[i], arg2[i], res+i, backend_verrou_context);
+     interflop_checkcancellation_sub_float(arg1[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+
+// generation of operation add backend mcaquad
+
+
+static VG_REGPARM(2) Long vr_mcaquadcheckcancellationadd64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_mcaquad_add_double(*arg1, *arg2, &res, backend_mcaquad_context);
+  interflop_checkcancellation_add_double(*arg1, *arg2, &res, backend_checkcancellation_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadcheckcancellationadd64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_mcaquad_add_double(arg1[0], arg2[0], res, backend_mcaquad_context);
+  interflop_checkcancellation_add_double(arg1[0], arg2[0], res, backend_checkcancellation_context);
+  interflop_mcaquad_add_double(arg1[1], arg2[1], res+1, backend_mcaquad_context);
+  interflop_checkcancellation_add_double(arg1[1], arg2[1], res+1, backend_checkcancellation_context);
+}
+
+static VG_REGPARM(3) void vr_mcaquadcheckcancellationadd64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_mcaquad_add_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_mcaquad_context);
+     interflop_checkcancellation_add_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_mcaquadcheckcancellationadd32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_mcaquad_add_float(*arg1, *arg2, &res, backend_mcaquad_context);
+  interflop_checkcancellation_add_float(*arg1, *arg2, &res, backend_checkcancellation_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadcheckcancellationadd32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_mcaquad_add_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+     interflop_checkcancellation_add_float(arg1[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_mcaquadcheckcancellationadd32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_mcaquad_add_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+     interflop_checkcancellation_add_float(arg1[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+
+// generation of operation sub backend mcaquad
+
+
+static VG_REGPARM(2) Long vr_mcaquadcheckcancellationsub64F (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  interflop_mcaquad_sub_double(*arg1, *arg2, &res, backend_mcaquad_context);
+  interflop_checkcancellation_sub_double(*arg1, *arg2, &res, backend_checkcancellation_context);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadcheckcancellationsub64Fx2(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  interflop_mcaquad_sub_double(arg1[0], arg2[0], res, backend_mcaquad_context);
+  interflop_checkcancellation_sub_double(arg1[0], arg2[0], res, backend_checkcancellation_context);
+  interflop_mcaquad_sub_double(arg1[1], arg2[1], res+1, backend_mcaquad_context);
+  interflop_checkcancellation_sub_double(arg1[1], arg2[1], res+1, backend_checkcancellation_context);
+}
+
+static VG_REGPARM(3) void vr_mcaquadcheckcancellationsub64Fx4 (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     interflop_mcaquad_sub_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_mcaquad_context);
+     interflop_checkcancellation_sub_double(arg1CopyAvxDouble[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+static VG_REGPARM(2) Int vr_mcaquadcheckcancellationsub32F (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  interflop_mcaquad_sub_float(*arg1, *arg2, &res, backend_mcaquad_context);
+  interflop_checkcancellation_sub_float(*arg1, *arg2, &res, backend_checkcancellation_context);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void vr_mcaquadcheckcancellationsub32Fx8 (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     interflop_mcaquad_sub_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+     interflop_checkcancellation_sub_float(arg1[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+static VG_REGPARM(3) void vr_mcaquadcheckcancellationsub32Fx4 (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     interflop_mcaquad_sub_float(arg1[i], arg2[i], res+i, backend_mcaquad_context);
+     interflop_checkcancellation_sub_float(arg1[i], arg2[i], res+i, backend_checkcancellation_context);
+  }
+}
+
+
+// generation of operation madd backend verrou
+//FMA Operator
+static VG_REGPARM(3) Long vr_verroumadd64F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double *arg3 = (double*)(&c);
+  double res;
+  interflop_verrou_madd_double(*arg1, *arg2,  *arg3, &res, backend_verrou_context);
+#else
+  double res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Long *d = (Long*)(&res);
+  return *d;
+}
+
+static VG_REGPARM(3) Int vr_verroumadd32F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float *arg3 = (float*)(&c);
+  float res;
+  interflop_verrou_madd_float(*arg1, *arg2,  *arg3, &res, backend_verrou_context);
+#else
+  float res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Int *d = (Int*)(&res);
+  return *d;
+}
+// generation of operation msub backend verrou
+//FMA Operator
+static VG_REGPARM(3) Long vr_verroumsub64F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double *arg3 = (double*)(&c);
+  double res;
+  interflop_verrou_madd_double(*arg1, *arg2, - *arg3, &res, backend_verrou_context);
+#else
+  double res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Long *d = (Long*)(&res);
+  return *d;
+}
+
+static VG_REGPARM(3) Int vr_verroumsub32F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float *arg3 = (float*)(&c);
+  float res;
+  interflop_verrou_madd_float(*arg1, *arg2, - *arg3, &res, backend_verrou_context);
+#else
+  float res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Int *d = (Int*)(&res);
+  return *d;
+}
+// generation of operation madd backend mcaquad
+//FMA Operator
+static VG_REGPARM(3) Long vr_mcaquadmadd64F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double *arg3 = (double*)(&c);
+  double res;
+  interflop_mcaquad_madd_double(*arg1, *arg2,  *arg3, &res, backend_mcaquad_context);
+#else
+  double res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Long *d = (Long*)(&res);
+  return *d;
+}
+
+static VG_REGPARM(3) Int vr_mcaquadmadd32F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float *arg3 = (float*)(&c);
+  float res;
+  interflop_mcaquad_madd_float(*arg1, *arg2,  *arg3, &res, backend_mcaquad_context);
+#else
+  float res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Int *d = (Int*)(&res);
+  return *d;
+}
+// generation of operation msub backend mcaquad
+//FMA Operator
+static VG_REGPARM(3) Long vr_mcaquadmsub64F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double *arg3 = (double*)(&c);
+  double res;
+  interflop_mcaquad_madd_double(*arg1, *arg2, - *arg3, &res, backend_mcaquad_context);
+#else
+  double res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Long *d = (Long*)(&res);
+  return *d;
+}
+
+static VG_REGPARM(3) Int vr_mcaquadmsub32F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float *arg3 = (float*)(&c);
+  float res;
+  interflop_mcaquad_madd_float(*arg1, *arg2, - *arg3, &res, backend_mcaquad_context);
+#else
+  float res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Int *d = (Int*)(&res);
+  return *d;
+}
+// generation of operation madd backend verrou
+//FMA Operator
+static VG_REGPARM(3) Long vr_verroucheckcancellationmadd64F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double *arg3 = (double*)(&c);
+  double res;
+  interflop_verrou_madd_double(*arg1, *arg2,  *arg3, &res, backend_verrou_context);
+  interflop_checkcancellation_madd_double(*arg1, *arg2,  *arg3, &res, backend_checkcancellation_context);
+#else
+  double res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Long *d = (Long*)(&res);
+  return *d;
+}
+
+static VG_REGPARM(3) Int vr_verroucheckcancellationmadd32F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float *arg3 = (float*)(&c);
+  float res;
+  interflop_verrou_madd_float(*arg1, *arg2,  *arg3, &res, backend_verrou_context);
+  interflop_checkcancellation_madd_float(*arg1, *arg2,  *arg3, &res, backend_checkcancellation_context);
+#else
+  float res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Int *d = (Int*)(&res);
+  return *d;
+}
+// generation of operation msub backend verrou
+//FMA Operator
+static VG_REGPARM(3) Long vr_verroucheckcancellationmsub64F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double *arg3 = (double*)(&c);
+  double res;
+  interflop_verrou_madd_double(*arg1, *arg2, - *arg3, &res, backend_verrou_context);
+  interflop_checkcancellation_madd_double(*arg1, *arg2, - *arg3, &res, backend_checkcancellation_context);
+#else
+  double res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Long *d = (Long*)(&res);
+  return *d;
+}
+
+static VG_REGPARM(3) Int vr_verroucheckcancellationmsub32F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float *arg3 = (float*)(&c);
+  float res;
+  interflop_verrou_madd_float(*arg1, *arg2, - *arg3, &res, backend_verrou_context);
+  interflop_checkcancellation_madd_float(*arg1, *arg2, - *arg3, &res, backend_checkcancellation_context);
+#else
+  float res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Int *d = (Int*)(&res);
+  return *d;
+}
+// generation of operation madd backend mcaquad
+//FMA Operator
+static VG_REGPARM(3) Long vr_mcaquadcheckcancellationmadd64F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double *arg3 = (double*)(&c);
+  double res;
+  interflop_mcaquad_madd_double(*arg1, *arg2,  *arg3, &res, backend_mcaquad_context);
+  interflop_checkcancellation_madd_double(*arg1, *arg2,  *arg3, &res, backend_checkcancellation_context);
+#else
+  double res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Long *d = (Long*)(&res);
+  return *d;
+}
+
+static VG_REGPARM(3) Int vr_mcaquadcheckcancellationmadd32F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float *arg3 = (float*)(&c);
+  float res;
+  interflop_mcaquad_madd_float(*arg1, *arg2,  *arg3, &res, backend_mcaquad_context);
+  interflop_checkcancellation_madd_float(*arg1, *arg2,  *arg3, &res, backend_checkcancellation_context);
+#else
+  float res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Int *d = (Int*)(&res);
+  return *d;
+}
+// generation of operation msub backend mcaquad
+//FMA Operator
+static VG_REGPARM(3) Long vr_mcaquadcheckcancellationmsub64F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double *arg3 = (double*)(&c);
+  double res;
+  interflop_mcaquad_madd_double(*arg1, *arg2, - *arg3, &res, backend_mcaquad_context);
+  interflop_checkcancellation_madd_double(*arg1, *arg2, - *arg3, &res, backend_checkcancellation_context);
+#else
+  double res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Long *d = (Long*)(&res);
+  return *d;
+}
+
+static VG_REGPARM(3) Int vr_mcaquadcheckcancellationmsub32F (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float *arg3 = (float*)(&c);
+  float res;
+  interflop_mcaquad_madd_float(*arg1, *arg2, - *arg3, &res, backend_mcaquad_context);
+  interflop_checkcancellation_madd_float(*arg1, *arg2, - *arg3, &res, backend_checkcancellation_context);
+#else
+  float res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Int *d = (Int*)(&res);
+  return *d;
+}
diff --git a/verrou/vr_include_trace.c b/verrou/vr_include_trace.c
new file mode 100644
index 0000000000000000000000000000000000000000..2803a14b015023ded6c5e1a94541c6bb91d7545b
--- /dev/null
+++ b/verrou/vr_include_trace.c
@@ -0,0 +1,128 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- This file contains code allowing to exclude some symbols     ---*/
+/*--- from the instrumentation.                                    ---*/
+/*---                                                 vr_exclude.c ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#include "vr_main.h"
+
+#define LINE_SIZEMAX VR_FNNAME_BUFSIZE
+
+static Vr_Include_Trace* vr_addIncludeTrace (Vr_Include_Trace* list, const HChar * fnname, const HChar * objname) {
+  Vr_Include_Trace * cell = VG_(malloc)("vr.addIncludeTrace.1", sizeof(Vr_Include_Trace));
+  cell->fnname  = VG_(strdup)("vr.addIncludeTrace.2", fnname);
+  cell->objname = VG_(strdup)("vr.addIncludeTrace.3", objname);
+  cell->next    = list;
+  return cell;
+}
+
+static Vr_Include_Trace * vr_findIncludeTrace (Vr_Include_Trace* list, const HChar * fnname, const HChar * objname) {
+  Vr_Include_Trace * include;
+  for (include = list ; include != NULL ; include = include->next) {
+    if (include->fnname[0] != '*'
+        && VG_(strcmp)(include->fnname, fnname) != 0)
+      continue;
+
+    if (include->objname[0] != '*'
+        && VG_(strcmp)(include->objname, objname) != 0)
+      continue;
+
+    return include;
+  }
+
+  return NULL;
+}
+
+void vr_freeIncludeTraceList (Vr_Include_Trace* list) {
+  while (list != NULL) {
+    Vr_Include_Trace *next = list->next;
+    VG_(free)(list->fnname);
+    VG_(free)(list->objname);
+    VG_(free)(list);
+    list = next;
+  }
+}
+
+
+Vr_Include_Trace * vr_loadIncludeTraceList (Vr_Include_Trace * list, const HChar * fname) {
+  VG_(umsg)("Loading inclusion trace list from `%s'... ", fname);
+  Int fd = VG_(fd_open)(fname,VKI_O_RDONLY, 0);
+  if (fd == -1) {
+    VG_(umsg)("ERROR (open)\n");
+    return list;
+  }
+
+  SizeT nLine = LINE_SIZEMAX;
+  HChar *line = VG_(malloc)("vr.loadIncludeTrace.1", nLine*sizeof(HChar));
+  Int lineno = 0;
+
+  while (! VG_(get_line)(fd, &line, &nLine, &lineno)) {
+    HChar * c;
+
+    // Skip non-blank characters
+    for (c = line;
+         c<line+LINE_SIZEMAX && *c != 0 && *c != '\t' && *c != ' ';
+         ++c) {}
+    if (*c == 0 || c>line+LINE_SIZEMAX-1) {
+      VG_(umsg)("ERROR (parse) :%s \n",line);
+      return list;
+    }
+    *c = 0;
+
+    // Skip blank characters
+    for (++c;
+         c<line+LINE_SIZEMAX && *c != 0 && (*c == '\t' || *c == ' ');
+         ++c) {}
+    list = vr_addIncludeTrace (list,
+			       /*fnname=*/ line,
+			       /*objname*/ c);;
+  }
+  VG_(free)(line);
+  VG_(close)(fd);
+  VG_(umsg)("OK.\n");
+  return list;
+}
+
+
+
+Bool vr_includeTraceIRSB (const HChar** fnname, const HChar **objname) {
+  if (** fnname == 0) {
+    return False;
+  }
+  if (** objname == 0) {
+    return False;
+  }
+  // Never exclude functions / objects unless they are explicitly listed
+  Vr_Include_Trace *include = vr_findIncludeTrace (vr.includeTrace, *fnname, *objname);
+  if (include != NULL) {
+    return True;
+  }
+  return False;
+}
diff --git a/verrou/vr_instrumentOp_impl.h b/verrou/vr_instrumentOp_impl.h
new file mode 100644
index 0000000000000000000000000000000000000000..1ae06759abf5b0a64662eebc6fe3ff981c5a032b
--- /dev/null
+++ b/verrou/vr_instrumentOp_impl.h
@@ -0,0 +1,340 @@
+
+
+
+    switch (op) {
+      // Addition
+      // - Double precision
+    case Iop_AddF64: // Scalar
+       return vr_replaceBinFpOpScal (sb, stmt, expr, bcNameWithCC(add64F), VR_OP_ADD, VR_PREC_DBL, VR_VEC_SCAL);
+
+    case Iop_Add64F0x2: // 128b vector, lowest-lane-only
+       return vr_replaceBinFpOpLLO (sb, stmt, expr, bcNameWithCC(add64F), VR_OP_ADD, VR_PREC_DBL, VR_VEC_LLO);
+
+    case Iop_Add64Fx2: // 128b vector, 2 lanes
+       return vr_replaceBinFullSSE (sb, stmt, expr, bcNameWithCC(add64Fx2), VR_OP_ADD, VR_PREC_DBL, VR_VEC_FULL2);
+
+    case Iop_AddF32: // Scalar
+       return vr_replaceBinFpOpScal (sb, stmt, expr, bcNameWithCC(add32F), VR_OP_ADD, VR_PREC_FLT, VR_VEC_SCAL);
+
+    case Iop_Add32F0x4: // 128b vector, lowest-lane-only
+       return vr_replaceBinFpOpLLO (sb, stmt, expr, bcNameWithCC(add32F), VR_OP_ADD, VR_PREC_FLT, VR_VEC_LLO);
+
+    case Iop_Add32Fx4: // 128b vector, 4 lanes
+       return vr_replaceBinFullSSE (sb, stmt, expr, bcNameWithCC(add32Fx4), VR_OP_ADD, VR_PREC_FLT, VR_VEC_FULL4);
+
+    case Iop_Add64Fx4: //AVX double
+       return vr_replaceBinFullAVX(sb, stmt, expr, bcNameWithCC(add64Fx4), VR_OP_ADD, VR_PREC_DBL, VR_VEC_FULL4);
+
+    case Iop_Add32Fx8: //AVX Float
+       return vr_replaceBinFullAVX(sb, stmt, expr, bcNameWithCC(add32Fx8), VR_OP_ADD, VR_PREC_FLT, VR_VEC_FULL8);
+
+
+      // Subtraction
+
+      // - Double precision
+    case Iop_SubF64: // Scalar
+       return vr_replaceBinFpOpScal (sb, stmt, expr, bcNameWithCC(sub64F), VR_OP_SUB, VR_PREC_DBL, VR_VEC_SCAL);
+
+    case Iop_Sub64F0x2: // 128b vector, lowest-lane only
+      return vr_replaceBinFpOpLLO (sb, stmt, expr, bcNameWithCC(sub64F), VR_OP_SUB, VR_PREC_DBL, VR_VEC_LLO);
+
+    case Iop_Sub64Fx2:
+       return vr_replaceBinFullSSE (sb, stmt, expr, bcNameWithCC(sub64Fx2), VR_OP_SUB, VR_PREC_DBL, VR_VEC_FULL2);
+
+    case Iop_SubF32: // Scalar
+       return vr_replaceBinFpOpScal (sb, stmt, expr, bcNameWithCC(sub32F), VR_OP_SUB, VR_PREC_FLT, VR_VEC_SCAL);
+
+    case Iop_Sub32F0x4: // 128b vector, lowest-lane-only
+       return vr_replaceBinFpOpLLO (sb, stmt, expr, bcNameWithCC(sub32F), VR_OP_SUB, VR_PREC_FLT, VR_VEC_LLO);
+
+    case Iop_Sub32Fx4: // 128b vector, 4 lanes
+       return vr_replaceBinFullSSE (sb, stmt, expr, bcNameWithCC(sub32Fx4), VR_OP_SUB, VR_PREC_FLT, VR_VEC_FULL4);
+
+    case Iop_Sub64Fx4: //AVX double
+       return vr_replaceBinFullAVX(sb, stmt, expr, bcNameWithCC(sub64Fx4), VR_OP_SUB, VR_PREC_DBL, VR_VEC_FULL4);
+
+    case Iop_Sub32Fx8: //AVX Float
+       return vr_replaceBinFullAVX(sb, stmt, expr, bcNameWithCC(sub32Fx8), VR_OP_SUB, VR_PREC_FLT, VR_VEC_FULL8);
+
+      // Multiplication
+
+      // - Double precision
+    case Iop_MulF64: // Scalar
+       return vr_replaceBinFpOpScal (sb, stmt, expr, bcName(mul64F), VR_OP_MUL, VR_PREC_DBL, VR_VEC_SCAL);
+
+    case Iop_Mul64F0x2: // 128b vector, lowest-lane-only
+       return vr_replaceBinFpOpLLO (sb, stmt, expr, bcName(mul64F), VR_OP_MUL, VR_PREC_DBL, VR_VEC_LLO);
+
+    case Iop_Mul64Fx2: // 128b vector, 2 lanes
+       return vr_replaceBinFullSSE (sb, stmt, expr, bcName(mul64Fx2), VR_OP_MUL, VR_PREC_DBL, VR_VEC_FULL2);
+
+    case Iop_MulF32: // Scalar
+       return vr_replaceBinFpOpScal (sb, stmt, expr, bcName(mul32F), VR_OP_MUL, VR_PREC_FLT, VR_VEC_SCAL);
+
+    case Iop_Mul32F0x4: // 128b vector, lowest-lane-only
+       return vr_replaceBinFpOpLLO (sb, stmt, expr, bcName(mul32F), VR_OP_MUL, VR_PREC_FLT, VR_VEC_LLO);
+
+    case Iop_Mul32Fx4: // 128b vector, 4 lanes
+       return vr_replaceBinFullSSE (sb, stmt, expr, bcName(mul32Fx4), VR_OP_MUL, VR_PREC_FLT, VR_VEC_FULL4);
+
+    case Iop_Mul64Fx4: //AVX double
+       return vr_replaceBinFullAVX(sb, stmt, expr, bcName(mul64Fx4), VR_OP_MUL, VR_PREC_DBL, VR_VEC_FULL4);
+
+    case Iop_Mul32Fx8: //AVX Float
+       return vr_replaceBinFullAVX(sb, stmt, expr, bcName(mul32Fx8), VR_OP_MUL, VR_PREC_FLT, VR_VEC_FULL8);
+
+    case Iop_DivF32:
+       return vr_replaceBinFpOpScal (sb, stmt, expr, bcName(div32F), VR_OP_DIV, VR_PREC_FLT, VR_VEC_SCAL);
+
+    case Iop_Div32F0x4: // 128b vector, lowest-lane-only
+       return vr_replaceBinFpOpLLO (sb, stmt, expr, bcName(div32F), VR_OP_DIV, VR_PREC_FLT, VR_VEC_LLO);
+
+    case Iop_Div32Fx4: // 128b vector, 4 lanes
+       return vr_replaceBinFullSSE (sb, stmt, expr, bcName(div32Fx4), VR_OP_DIV, VR_PREC_FLT, VR_VEC_FULL4);
+
+    case Iop_DivF64: // Scalar
+       return vr_replaceBinFpOpScal (sb, stmt, expr, bcName(div64F), VR_OP_DIV, VR_PREC_DBL, VR_VEC_SCAL);
+
+    case Iop_Div64F0x2: // 128b vector, lowest-lane-only
+       return vr_replaceBinFpOpLLO (sb, stmt, expr, bcName(div64F), VR_OP_DIV, VR_PREC_DBL, VR_VEC_LLO);
+
+    case Iop_Div64Fx2: // 128b vector, 2 lanes
+       return vr_replaceBinFullSSE(sb, stmt, expr, bcName(div64Fx2), VR_OP_DIV, VR_PREC_DBL, VR_VEC_FULL2);
+
+    case Iop_Div64Fx4: //AVX double
+       return vr_replaceBinFullAVX(sb, stmt, expr, bcName(div64Fx4), VR_OP_DIV, VR_PREC_DBL, VR_VEC_FULL4);
+
+    case Iop_Div32Fx8: //AVX Float
+       return vr_replaceBinFullAVX(sb, stmt, expr, bcName(div32Fx8), VR_OP_DIV, VR_PREC_FLT, VR_VEC_FULL8);
+
+
+
+    case Iop_MAddF32:
+#ifndef IGNOREFMA
+       return vr_replaceFMA (sb, stmt, expr, bcNameWithCC(madd32F), VR_OP_MADD, VR_PREC_FLT);
+#else
+       vr_countOp (sb, VR_OP_MADD, VR_PREC_FLT, VR_VEC_SCAL,False);
+       addStmtToIRSB (sb, stmt);
+       break;
+#endif
+    case Iop_MSubF32:
+#ifndef IGNOREFMA
+          return vr_replaceFMA (sb, stmt, expr, bcNameWithCC(msub32F), VR_OP_MSUB, VR_PREC_FLT);
+#else
+       vr_countOp (sb, VR_OP_MSUB, VR_PREC_FLT, VR_VEC_SCAL,False);
+       addStmtToIRSB (sb, stmt);
+       break;
+#endif
+    case Iop_MAddF64:
+#ifndef IGNOREFMA
+          return vr_replaceFMA (sb, stmt, expr, bcNameWithCC(madd64F), VR_OP_MADD, VR_PREC_DBL);
+#else
+       vr_countOp (sb, VR_OP_MADD, VR_PREC_DBL, VR_VEC_SCAL,False);
+       addStmtToIRSB (sb, stmt);
+       break;
+#endif
+    case Iop_MSubF64:
+#ifndef IGNOREFMA
+          return vr_replaceFMA (sb, stmt, expr, bcNameWithCC(msub64F), VR_OP_MSUB,  VR_PREC_DBL);
+#else
+       vr_countOp (sb, VR_OP_MSUB, VR_PREC_DBL, VR_VEC_SCAL,False);
+       addStmtToIRSB (sb, stmt);
+       break;
+#endif
+      //   Other FP operations
+    case Iop_Add32Fx2:
+      vr_countOp (sb, VR_OP_ADD, VR_PREC_FLT, VR_VEC_FULL2,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+
+    case Iop_Sub32Fx2:
+      vr_countOp (sb, VR_OP_SUB, VR_PREC_FLT, VR_VEC_FULL2,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+    case Iop_CmpF64:
+      vr_countOp (sb, VR_OP_CMP, VR_PREC_DBL, VR_VEC_SCAL,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+    case Iop_CmpF32:
+      vr_countOp (sb, VR_OP_CMP, VR_PREC_FLT, VR_VEC_SCAL,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+    case Iop_F32toF64:  /*                       F32 -> F64 */
+      vr_countOp (sb, VR_OP_CONV, VR_PREC_FLT_TO_DBL, VR_VEC_SCAL,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+    case Iop_F64toF32:
+#ifndef IGNORECAST
+       return vr_replaceCast (sb, stmt, expr, bcName(cast64FTo32F), VR_OP_CONV, VR_PREC_DBL_TO_FLT);
+#else
+       vr_countOp (sb, VR_OP_CONV, VR_PREC_DBL_TO_FLT, VR_VEC_SCAL,False);
+       addStmtToIRSB (sb, stmt);
+#endif
+       break;
+
+    case Iop_F64toI64S: /* IRRoundingMode(I32) x F64 -> signed I64 */
+      vr_countOp (sb, VR_OP_CONV, VR_PREC_DBL_TO_INT, VR_VEC_SCAL,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+    case Iop_F64toI64U: /* IRRoundingMode(I32) x F64 -> unsigned I64 */
+      vr_countOp (sb, VR_OP_CONV, VR_PREC_DBL_TO_INT, VR_VEC_SCAL,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+    case Iop_F64toI32S: /* IRRoundingMode(I32) x F64 -> signed I32 */
+      vr_countOp (sb, VR_OP_CONV, VR_PREC_DBL_TO_SHT, VR_VEC_SCAL,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+    case Iop_F64toI32U: /* IRRoundingMode(I32) x F64 -> unsigned I32 */
+      vr_countOp (sb, VR_OP_CONV, VR_PREC_DBL_TO_SHT, VR_VEC_SCAL,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+      /******/
+    case Iop_Max32Fx4:
+      vr_countOp (sb, VR_OP_MAX, VR_PREC_FLT, VR_VEC_FULL4,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+    case Iop_Max32F0x4:
+      vr_countOp (sb, VR_OP_MAX, VR_PREC_FLT, VR_VEC_LLO,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+    case Iop_Max64Fx2:
+        vr_countOp (sb, VR_OP_MAX, VR_PREC_DBL, VR_VEC_FULL2,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+    case Iop_Max64F0x2:
+      vr_countOp (sb, VR_OP_MAX, VR_PREC_DBL, VR_VEC_LLO,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+
+
+    case Iop_Min32Fx4:
+      vr_countOp (sb, VR_OP_MIN, VR_PREC_FLT, VR_VEC_FULL4,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+    case Iop_Min32F0x4:
+      vr_countOp (sb, VR_OP_MIN, VR_PREC_FLT, VR_VEC_LLO,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+    case Iop_Min64Fx2:
+        vr_countOp (sb, VR_OP_MIN, VR_PREC_DBL, VR_VEC_FULL2,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+    case Iop_Min64F0x2:
+      vr_countOp (sb, VR_OP_MIN, VR_PREC_DBL, VR_VEC_LLO,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+
+    case Iop_CmpEQ64Fx2: case Iop_CmpLT64Fx2:
+    case Iop_CmpLE64Fx2: case Iop_CmpUN64Fx2:
+      vr_countOp (sb, VR_OP_CMP, VR_PREC_DBL, VR_VEC_FULL2,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+    case Iop_CmpEQ64F0x2: case Iop_CmpLT64F0x2:
+    case Iop_CmpLE64F0x2: case Iop_CmpUN64F0x2:
+      vr_countOp (sb, VR_OP_CMP, VR_PREC_DBL, VR_VEC_LLO,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+    case Iop_CmpEQ32Fx4: case Iop_CmpLT32Fx4:
+    case Iop_CmpLE32Fx4: case Iop_CmpUN32Fx4:
+    case Iop_CmpGT32Fx4: case Iop_CmpGE32Fx4:
+      vr_countOp (sb, VR_OP_CMP, VR_PREC_FLT, VR_VEC_FULL4,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+    case Iop_CmpEQ32F0x4: case Iop_CmpLT32F0x4:
+    case Iop_CmpLE32F0x4: case Iop_CmpUN32F0x4:
+      vr_countOp (sb, VR_OP_CMP, VR_PREC_FLT, VR_VEC_LLO,False);
+      addStmtToIRSB (sb, stmt);
+      break;
+
+    case Iop_ReinterpF64asI64:
+    case Iop_ReinterpI64asF64:
+    case Iop_ReinterpF32asI32:
+    case Iop_ReinterpI32asF32:
+    case Iop_NegF64:
+    case Iop_AbsF64:
+    case Iop_NegF32:
+    case Iop_AbsF32:
+    case Iop_Abs64Fx2:
+    case Iop_Neg64Fx2:
+      //ignored : not counted and not instrumented
+      addStmtToIRSB (sb, stmt);
+      break;
+
+      //operation with 64bit register with 32bit rounding
+    case Iop_AddF64r32:
+    case Iop_SubF64r32:
+    case Iop_MulF64r32:
+    case Iop_DivF64r32:
+    case Iop_MAddF64r32:
+    case Iop_MSubF64r32:
+
+      //operation with 128bit
+    case Iop_AddF128:
+    case Iop_SubF128:
+    case Iop_MulF128:
+    case Iop_DivF128:
+
+    case Iop_SqrtF128:
+    case Iop_SqrtF64:
+    case Iop_SqrtF32:
+    case Iop_Sqrt32Fx4:
+    case Iop_Sqrt64Fx2:
+    case  Iop_AtanF64:       /* FPATAN,  arctan(arg1/arg2)       */
+    case  Iop_Yl2xF64:       /* FYL2X,   arg1 * log2(arg2)       */
+    case  Iop_Yl2xp1F64:     /* FYL2XP1, arg1 * log2(arg2+1.0)   */
+    case  Iop_PRemF64:       /* FPREM,   non-IEEE remainder(arg1/arg2)    */
+    case  Iop_PRemC3210F64:  /* C3210 flags resulting from FPREM: :: I32 */
+    case  Iop_PRem1F64:      /* FPREM1,  IEEE remainder(arg1/arg2)    */
+    case  Iop_PRem1C3210F64: /* C3210 flags resulting from FPREM1, :: I32 */
+    case  Iop_ScaleF64:      /* FSCALE,  arg1 * (2^RoundTowardsZero(arg2)) */
+    case  Iop_SinF64:    /* FSIN */
+    case  Iop_CosF64:    /* FCOS */
+    case  Iop_TanF64:    /* FTAN */
+    case  Iop_2xm1F64:   /* (2^arg - 1.0) */
+
+    case Iop_RSqrtEst5GoodF64: /* reciprocal square root estimate, 5 good bits */
+
+    case Iop_RecipStep32Fx4:
+    case Iop_RSqrtEst32Fx4:
+    case Iop_RSqrtStep32Fx4:
+    case Iop_RecipEst32F0x4:
+    case Iop_Sqrt32F0x4:
+    case Iop_RSqrtEst32F0x4:
+
+      /*AVX*/
+    case Iop_Sqrt32Fx8:
+    case Iop_Sqrt64Fx4:
+    case Iop_RSqrtEst32Fx8:
+    case Iop_RecipEst32Fx8:
+
+    case Iop_RoundF64toF64_NEAREST: /* frin */
+    case Iop_RoundF64toF64_NegINF:  /* frim */
+    case Iop_RoundF64toF64_PosINF:  /* frip */
+    case Iop_RoundF64toF64_ZERO:    /* friz */
+
+    case Iop_F128toF64:  /* IRRoundingMode(I32) x F128 -> F64         */
+    case Iop_F128toF32:  /* IRRoundingMode(I32) x F128 -> F32         */
+    case Iop_F64toI16S: /* IRRoundingMode(I32) x F64 -> signed I16 */
+
+    case Iop_CmpF128:
+
+    case Iop_PwMax32Fx4: case Iop_PwMin32Fx4:
+      vr_maybe_record_ErrorOp (VR_ERROR_UNCOUNTED, op);
+
+    default:
+      //      ppIRStmt (stmt);
+      addStmtToIRSB (sb, stmt);
+      break;
+    }
+    return False;
diff --git a/verrou/vr_interp_operator_impl.h b/verrou/vr_interp_operator_impl.h
new file mode 100644
index 0000000000000000000000000000000000000000..88930300494975bd018c750b2bdf329ba31f14dc
--- /dev/null
+++ b/verrou/vr_interp_operator_impl.h
@@ -0,0 +1,39 @@
+
+/*Tools for copy : workaround to avoid limitation of 6args*/
+
+#ifndef INTERFLOP_VECTO
+static double arg1CopyAvxDouble[4];
+static VG_REGPARM(3) void vr_AvxDoubleCopyFirstArg (ULong a0, ULong a1, ULong a2,ULong a3) {
+  arg1CopyAvxDouble[0] =*((double*)(&a0));
+  arg1CopyAvxDouble[1] =*((double*)(&a1));
+  arg1CopyAvxDouble[2] =*((double*)(&a2));
+  arg1CopyAvxDouble[3] =*((double*)(&a3));    
+}
+
+static float arg1CopyAvxFloat[8];
+static VG_REGPARM(3) void vr_AvxFloatCopyFirstArg (ULong a0, ULong a1, ULong a2,ULong a3) {
+  V256* reg1=(V256*)(&arg1CopyAvxFloat) ;
+  reg1->w64[0]=a0;
+  reg1->w64[1]=a1;
+  reg1->w64[2]=a2;
+  reg1->w64[3]=a3;
+}
+#else
+static doublex4 arg1CopyAvxDouble;
+
+static VG_REGPARM(3) void vr_AvxDoubleCopyFirstArg (ULong a0, ULong a1, ULong a2,ULong a3) {
+  arg1CopyAvxDouble= (doublex4){*((double*)(&a0)),*((double*)(&a1)),*((double*)(&a2)),*((double*)(&a3))};
+}
+
+static floatx8 arg1CopyAvxFloat;
+
+static VG_REGPARM(3) void vr_AvxFloatCopyFirstArg (ULong a0, ULong a1, ULong a2,ULong a3) {
+  V256* reg1=(V256*)(&arg1CopyAvxFloat) ;
+  reg1->w64[0]=a0;
+  reg1->w64[1]=a1;
+  reg1->w64[2]=a2;
+  reg1->w64[3]=a3;
+  arg1CopyAvxFloat=*reg1;
+}
+
+#endif
diff --git a/verrou/vr_interp_operator_template_2args.h b/verrou/vr_interp_operator_template_2args.h
new file mode 100644
index 0000000000000000000000000000000000000000..5b6f6a9910b9383ab887da806467bb8e90c65e32
--- /dev/null
+++ b/verrou/vr_interp_operator_template_2args.h
@@ -0,0 +1,63 @@
+
+
+static VG_REGPARM(2) Long FCTNAME(64F,) (Long a, Long b) {
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double res;
+  BACKENDFUNC(double)(*arg1, *arg2, &res, CONTEXT);
+  Long *c = (Long*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void FCTNAME(64Fx2,)(/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  double arg1[2] = {*((double*)(&aLo)),*((double*)(&aHi))} ;
+  double arg2[2] = {*((double*)(&bLo)),*((double*)(&bHi))} ;
+  double* res=(double*) output;
+  BACKENDFUNC(double)(arg1[0], arg2[0], res, CONTEXT);
+  BACKENDFUNC(double)(arg1[1], arg2[1], res+1, CONTEXT);
+}
+
+static VG_REGPARM(3) void FCTNAME(64Fx4,) (/*OUT*/V256* output,
+                                           ULong b0, ULong b1, ULong b2,ULong b3) {
+
+  double arg2[4] = {*((double*)(&b0)),*((double*)(&b1)), *((double*)(&b2)),*((double*)(&b3))} ;
+  double* res=(double*) output;
+  for(int i=0; i<4; i++){
+     BACKENDFUNC(double)(arg1CopyAvxDouble[i], arg2[i], res+i, CONTEXT);
+  }
+}
+
+static VG_REGPARM(2) Int FCTNAME(32F,) (Long a, Long b) {
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float res;
+  BACKENDFUNC(float)(*arg1, *arg2, &res, CONTEXT);
+  Int *c = (Int*)(&res);
+  return *c;
+}
+
+static VG_REGPARM(3) void FCTNAME(32Fx8,) (/*OUT*/V256* output,
+					   ULong b0, ULong b1, ULong b2,ULong b3) {
+  V256 reg2;   reg2.w64[0]=b0;   reg2.w64[1]=b1;   reg2.w64[2]=b2;   reg2.w64[3]=b3;
+  float* res=(float*) output;
+  float* arg1=arg1CopyAvxFloat;
+  float* arg2=(float*) &reg2;
+  for(int i=0; i<8; i++){
+     BACKENDFUNC(float)(arg1[i], arg2[i], res+i, CONTEXT);
+  }
+}
+
+static VG_REGPARM(3) void FCTNAME(32Fx4,) (/*OUT*/V128* output, ULong aHi, ULong aLo, ULong bHi,ULong bLo) {
+  V128 reg1; reg1.w64[0]=aLo; reg1.w64[1]=aHi;
+  V128 reg2; reg2.w64[0]=bLo; reg2.w64[1]=bHi;
+
+  float* res=(float*) output;
+  float* arg1=(float*) &reg1;
+  float* arg2=(float*) &reg2;
+
+  for(int i=0; i<4;i++){
+     BACKENDFUNC(float)(arg1[i], arg2[i], res+i, CONTEXT);
+  }
+}
+
+
diff --git a/verrou/vr_interp_operator_template_3args.h b/verrou/vr_interp_operator_template_3args.h
new file mode 100644
index 0000000000000000000000000000000000000000..b8d59154a8d0f4a5587dd9e7db62fa00ac943afa
--- /dev/null
+++ b/verrou/vr_interp_operator_template_3args.h
@@ -0,0 +1,30 @@
+//FMA Operator
+static VG_REGPARM(3) Long FCTNAME(64F,) (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  double *arg1 = (double*)(&a);
+  double *arg2 = (double*)(&b);
+  double *arg3 = (double*)(&c);
+  double res;
+  BACKENDFUNC(double)(*arg1, *arg2, SIGN *arg3, &res, CONTEXT);
+#else
+  double res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Long *d = (Long*)(&res);
+  return *d;
+}
+
+static VG_REGPARM(3) Int FCTNAME(32F,) (Long a, Long b, Long c) {
+#ifdef USE_VERROU_FMA
+  float *arg1 = (float*)(&a);
+  float *arg2 = (float*)(&b);
+  float *arg3 = (float*)(&c);
+  float res;
+  BACKENDFUNC(float)(*arg1, *arg2, SIGN *arg3, &res, CONTEXT);
+#else
+  float res=0.;
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif
+  Int *d = (Int*)(&res);
+  return *d;
+}
diff --git a/verrou/vr_interp_operator_template_cast.h b/verrou/vr_interp_operator_template_cast.h
new file mode 100644
index 0000000000000000000000000000000000000000..92178333effaa5602e1e479c81dfaa74339ed8ad
--- /dev/null
+++ b/verrou/vr_interp_operator_template_cast.h
@@ -0,0 +1,9 @@
+
+
+static VG_REGPARM(3) Int FCTNAME(64FTo32F,) (Long a) {
+  double *arg1 = (double*)(&a);
+  float res;
+  BACKENDFUNC(double_to_float)(*arg1, &res,CONTEXT);
+  Int *d = (Int*)(&res);
+  return *d;
+}
diff --git a/verrou/vr_main.c b/verrou/vr_main.c
new file mode 100644
index 0000000000000000000000000000000000000000..c2b46a82d19f95683275c92827964bee2786ba71
--- /dev/null
+++ b/verrou/vr_main.c
@@ -0,0 +1,1241 @@
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                vr_main.c ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#include "vr_main.h"
+#include "float.h"
+#include "pub_tool_libcfile.h"
+#include "coregrind/pub_core_transtab.h"
+//#pragma STDC FENV_ACCESS ON
+
+Vr_State vr;
+
+
+
+struct interflop_backend_interface_t backend_verrou;
+void* backend_verrou_context;
+
+struct interflop_backend_interface_t backend_mcaquad;
+void* backend_mcaquad_context;
+
+struct interflop_backend_interface_t backend_checkcancellation;
+void* backend_checkcancellation_context;
+
+
+
+VgFile * vr_outCancellationFile;
+
+// * Floating-point operations counter
+
+
+// ** Operation categories
+
+
+// *** Operation type
+
+
+static const HChar* vr_ppOp (Vr_Op op) {
+  switch (op) {
+  case VR_OP_ADD:
+    return "add";
+  case VR_OP_SUB:
+    return "sub";
+  case VR_OP_MUL:
+    return "mul";
+  case VR_OP_DIV:
+    return "div";
+  case VR_OP_MADD:
+    return "mAdd";
+  case VR_OP_MSUB:
+    return "mSub";
+  case VR_OP_CMP:
+    return "cmp";
+  case VR_OP_CONV:
+    return "conv";
+  case VR_OP_MAX:
+    return "max";
+  case VR_OP_MIN:
+    return "min";
+  case VR_OP:
+    break;
+  }
+  return "unknown";
+}
+
+// *** Operation precision
+
+typedef enum {
+  VR_PREC_FLT,  // Single
+  VR_PREC_DBL,  // Double
+  VR_PREC_DBL_TO_FLT,
+  VR_PREC_FLT_TO_DBL,
+  VR_PREC_DBL_TO_INT,
+  VR_PREC_DBL_TO_SHT,
+  VR_PREC_FLT_TO_INT,
+  VR_PREC_FLT_TO_SHT,
+  VR_PREC
+} Vr_Prec;
+
+static const HChar* vr_ppPrec (Vr_Prec prec) {
+  switch (prec) {
+  case VR_PREC_FLT:
+    return "flt";
+  case VR_PREC_DBL:
+    return "dbl";
+  case VR_PREC_DBL_TO_FLT:
+    return "dbl=>flt";
+  case VR_PREC_FLT_TO_DBL:
+    return "flt=>dbl";
+  case VR_PREC_DBL_TO_INT:
+    return "dbl=>int";
+  case VR_PREC_FLT_TO_INT:
+    return "flt=>int";
+  case VR_PREC_DBL_TO_SHT:
+    return "dbl=>sht";
+  case VR_PREC_FLT_TO_SHT:
+    return "flt=>sht";
+  case VR_PREC:
+    break;
+  }
+  return "unknown";
+}
+
+// *** Vector operations
+
+typedef enum {
+  VR_VEC_SCAL,  // Scalar operation
+  VR_VEC_LLO,   // Vector operation, lowest lane only
+  VR_VEC_FULL2,  // Vector operation
+  VR_VEC_FULL4,  // Vector operation
+  VR_VEC_FULL8,  // Vector operation
+  VR_VEC
+} Vr_Vec;
+
+static const HChar* vr_ppVec (Vr_Vec vec) {
+  switch (vec) {
+  case VR_VEC_SCAL:
+    return "scal";
+  case VR_VEC_LLO:
+    return "llo ";
+  case VR_VEC_FULL2:
+    return "vec2 ";
+  case VR_VEC_FULL4:
+    return "vec4 ";
+  case VR_VEC_FULL8:
+    return "vec8 ";
+
+  default:
+    return "unknown";
+  }
+}
+
+// ** Counter handling
+
+
+static ULong vr_opCount[VR_OP][VR_PREC][VR_VEC][VR_INSTR];
+static VG_REGPARM(2) void vr_incOpCount (ULong* counter, SizeT increment) {
+  counter[vr.instrument] += increment;
+}
+
+static VG_REGPARM(2) void vr_incUnstrumentedOpCount (ULong* counter, SizeT increment) {
+  counter[VR_INSTR_OFF] += increment;
+}
+
+static void vr_countOp (IRSB* sb, Vr_Op op, Vr_Prec prec, Vr_Vec vec, Bool instr) {
+  if(!vr.count){
+    return;
+  }
+
+  IRExpr** argv;
+  IRDirty* di;
+  SizeT increment = 1;
+  if (vec == VR_VEC_FULL2) {
+    increment =2;
+  }
+  if(vec == VR_VEC_FULL4) {
+    increment =4;
+  }
+  if(vec == VR_VEC_FULL8) {
+    increment =8;
+  }
+
+  if(instr){
+    argv = mkIRExprVec_2 (mkIRExpr_HWord ((HWord)&vr_opCount[op][prec][vec]),
+			  mkIRExpr_HWord (increment));
+    di = unsafeIRDirty_0_N( 2,
+                          "vr_incOpCount",
+			    VG_(fnptr_to_fnentry)( &vr_incOpCount ),
+			    argv);
+
+  }else{
+    argv = mkIRExprVec_2 (mkIRExpr_HWord ((HWord)&vr_opCount[op][prec][vec]),
+			  mkIRExpr_HWord (increment));
+
+    di = unsafeIRDirty_0_N( 2,
+			    "vr_incUnstrumentedOpCount",
+			    VG_(fnptr_to_fnentry)( &vr_incUnstrumentedOpCount ),
+			    argv);
+
+  }
+
+  addStmtToIRSB (sb, IRStmt_Dirty (di));
+}
+
+
+
+static unsigned int vr_frac (ULong a, ULong b) {
+  unsigned int q = (100*a)/(a+b);
+  if (100*a - (a+b)*q > (a+b)/2) {q++;}
+  return q;
+}
+
+void vr_ppOpCount (void) {
+  if(!vr.count)return ;
+  Vr_Op op;
+  Vr_Prec prec;
+  Vr_Vec vec;
+
+  VG_(umsg)(" ---------------------------------------------------------------------\n");
+  VG_(umsg)(" Operation                            Instruction count\n");
+  VG_(umsg)("  `- Precision\n");
+  VG_(umsg)("      `- Vectorization          Total             Instrumented\n");
+  VG_(umsg)(" ---------------------------------------------------------------------\n");
+  for (op = 0 ; op<VR_OP ; ++op) {
+    ULong countOp[VR_INSTR];
+    countOp[VR_INSTR_ON]  = 0;
+    countOp[VR_INSTR_OFF] = 0;
+
+    for (prec = 0 ; prec < VR_PREC ; ++prec) {
+      for (vec = 0 ; vec < VR_VEC ; ++vec) {
+        countOp[VR_INSTR_ON]  += vr_opCount[op][prec][vec][VR_INSTR_ON];
+        countOp[VR_INSTR_OFF] += vr_opCount[op][prec][vec][VR_INSTR_OFF];
+      }
+    }
+
+    if (countOp[VR_INSTR_ON] + countOp[VR_INSTR_OFF] > 0) {
+      VG_(umsg)(" %-6s       %15llu          %15llu          (%3u%%)\n",
+                vr_ppOp(op),
+                countOp[VR_INSTR_ON] + countOp[VR_INSTR_OFF],
+                countOp[VR_INSTR_ON],
+                vr_frac (countOp[VR_INSTR_ON], countOp[VR_INSTR_OFF]));
+
+      for (prec = 0 ; prec<VR_PREC ; ++prec) {
+        ULong countPrec[VR_INSTR];
+        countPrec[VR_INSTR_ON]  = 0;
+        countPrec[VR_INSTR_OFF] = 0;
+
+        for (vec = 0 ; vec<VR_VEC ; ++vec) {
+          countPrec[VR_INSTR_ON]  += vr_opCount[op][prec][vec][VR_INSTR_ON];
+          countPrec[VR_INSTR_OFF] += vr_opCount[op][prec][vec][VR_INSTR_OFF];
+        }
+
+        if (countPrec[VR_INSTR_ON] + countPrec[VR_INSTR_OFF] > 0) {
+          VG_(umsg)("  `- %-8s     %15llu          %15llu      (%3u%%)\n",
+                    vr_ppPrec(prec),
+                    countPrec[VR_INSTR_ON] + countPrec[VR_INSTR_OFF],
+                    countPrec[VR_INSTR_ON],
+                    vr_frac (countPrec[VR_INSTR_ON], countPrec[VR_INSTR_OFF]));
+
+          for (vec = 0 ; vec<VR_VEC ; ++vec) {
+            ULong * count = vr_opCount[op][prec][vec];
+            if (count[VR_INSTR_ON] + count[VR_INSTR_OFF] > 0) {
+              VG_(umsg)("      `- %-6s       %15llu          %15llu  (%3u%%)\n",
+                        vr_ppVec(vec),
+                        vr_opCount[op][prec][vec][VR_INSTR_ON] + vr_opCount[op][prec][vec][VR_INSTR_OFF],
+                        vr_opCount[op][prec][vec][VR_INSTR_ON],
+                        vr_frac (vr_opCount[op][prec][vec][VR_INSTR_ON], vr_opCount[op][prec][vec][VR_INSTR_OFF]));
+            }
+          }
+        }
+      }
+      VG_(umsg)(" ---------------------------------------------------------------------\n");
+    }
+  }
+}
+
+#include "vr_traceBB.c"
+
+
+// * Floating point operations overload
+
+
+// ** Overloaded operators
+
+
+#include "vr_interp_operator_impl.h"
+#include "vr_generated_from_templates.h"
+
+
+// *** Helpers
+
+
+/* Return the Lowest Lane of a given packed temporary register */
+static IRExpr* vr_getLLFloat (IRSB* sb, IRExpr* expr) {
+  IRTemp tmp = newIRTemp (sb->tyenv, Ity_I32);
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp, IRExpr_Unop (Iop_V128to32, expr)));
+  return IRExpr_RdTmp(tmp);
+}
+/* Return the Lowest Lane of a given packed temporary register */
+static IRExpr* vr_getLLDouble (IRSB* sb, IRExpr* expr) {
+  IRTemp tmp = newIRTemp (sb->tyenv, Ity_I64);
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp, IRExpr_Unop (Iop_V128to64, expr)));
+  return IRExpr_RdTmp(tmp);
+ }
+
+/* Return the Highest Lane of a given packed temporary register */
+static IRExpr* vr_getHLDouble (IRSB* sb, IRExpr* expr) {
+  IRTemp tmp = newIRTemp (sb->tyenv, Ity_I64);
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp, IRExpr_Unop (Iop_V128HIto64, expr)));
+  return IRExpr_RdTmp(tmp);
+ }
+
+
+
+/* Return the Highest Lane of a given packed temporary register */
+static void vr_getTabArgAVX (IRSB* sb, IRExpr* expr, IRExpr** tab) {
+  IRTemp tmp0 = newIRTemp (sb->tyenv, Ity_I64);
+  IRTemp tmp1 = newIRTemp (sb->tyenv, Ity_I64);
+  IRTemp tmp2 = newIRTemp (sb->tyenv, Ity_I64);
+  IRTemp tmp3 = newIRTemp (sb->tyenv, Ity_I64);
+
+
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp0, IRExpr_Unop (Iop_V256to64_0, expr)));
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp1, IRExpr_Unop (Iop_V256to64_1, expr)));
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp2, IRExpr_Unop (Iop_V256to64_2, expr)));
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp3, IRExpr_Unop (Iop_V256to64_3, expr)));
+
+
+  tab[0]=IRExpr_RdTmp(tmp0);
+  tab[1]=IRExpr_RdTmp(tmp1);
+  tab[2]=IRExpr_RdTmp(tmp2);
+  tab[3]=IRExpr_RdTmp(tmp3);
+ }
+
+
+
+
+
+
+static IRExpr* vr_I32toI64 (IRSB* sb, IRExpr* expr) {
+  IRTemp tmp = newIRTemp (sb->tyenv, Ity_I64);
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp, IRExpr_Unop (Iop_32Uto64, expr)));
+  return IRExpr_RdTmp (tmp);
+}
+
+static IRExpr* vr_I64toI32 (IRSB* sb, IRExpr* expr) {
+  IRTemp tmp = newIRTemp (sb->tyenv, Ity_I32);
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp, IRExpr_Unop (Iop_64to32, expr)));
+  return IRExpr_RdTmp (tmp);
+}
+
+
+
+static IRExpr* vr_F64toI64 (IRSB* sb, IRExpr* expr) {
+  IRTemp tmp = newIRTemp (sb->tyenv, Ity_I64);
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp, IRExpr_Unop (Iop_ReinterpF64asI64, expr)));
+  return IRExpr_RdTmp (tmp);
+}
+
+
+static IRExpr* vr_F32toI64 (IRSB* sb, IRExpr* expr) {
+  IRTemp tmp = newIRTemp (sb->tyenv, Ity_I32);
+  addStmtToIRSB (sb, IRStmt_WrTmp (tmp, IRExpr_Unop (Iop_ReinterpF32asI32, expr)));
+  return vr_I32toI64(sb, IRExpr_RdTmp (tmp));
+}
+
+
+/* Get the operation from an expression
+   return False if the expression is not an operation
+ */
+static Bool vr_getOp (const IRExpr * expr, /*OUT*/ IROp * op) {
+  switch (expr->tag) {
+  case Iex_Unop:
+    *op = expr->Iex.Unop.op;
+    break;
+  case Iex_Binop:
+    *op = expr->Iex.Binop.op;
+    break;
+  case Iex_Triop:
+    *op = expr->Iex.Triop.details->op;
+    break;
+  case Iex_Qop:
+    *op = expr->Iex.Qop.details->op;
+    break;
+  default:
+    return False;
+  }
+  return True;
+}
+
+/* Replace a given binary operation by a call to a function
+ */
+static Bool vr_replaceBinFpOpScal (IRSB* sb, IRStmt* stmt, IRExpr* expr,
+				   const HChar* functionName, void* function,
+				   Vr_Op op,
+				   Vr_Prec prec,
+				   Vr_Vec vec) {
+  //instrumentation to count operation
+
+  if(vr.verbose){
+    IROp irop;
+    if (vr_getOp (expr, &irop))
+      vr_maybe_record_ErrorOp (VR_ERROR_SCALAR, irop);
+  }
+
+  if(!vr.instr_op[op] ) {
+    vr_countOp (sb,  op, prec,vec, False);
+    addStmtToIRSB (sb, stmt);
+    return False;
+  }
+  if(!vr.instr_scalar) {
+    vr_countOp (sb,  op, prec,vec, False);
+    addStmtToIRSB (sb, stmt);
+    return False;
+  }
+  vr_countOp (sb,  op, prec,vec, True);
+
+  //conversion before call
+  IRExpr * arg1;
+  IRExpr * arg2;
+
+  arg1 = expr->Iex.Triop.details->arg2;
+  //shift arg is not a bug :  IRRoundingMode(I32) x F64 x F64 -> F64 */
+  arg2 = expr->Iex.Triop.details->arg3;
+
+  IRType ity=Ity_I64; //type of call result
+  if (prec==VR_PREC_FLT) {
+    arg1=vr_F32toI64(sb,arg1);
+    arg2=vr_F32toI64(sb,arg2);
+    ity=Ity_I32;
+  }
+  if (prec==VR_PREC_DBL) {
+    arg1=vr_F64toI64(sb,arg1);
+    arg2=vr_F64toI64(sb,arg2);
+  }
+
+  //call
+  IRTemp res= newIRTemp (sb->tyenv, ity);
+  addStmtToIRSB (sb,
+		 IRStmt_Dirty(unsafeIRDirty_1_N (res, 2,
+						 functionName, VG_(fnptr_to_fnentry)(function),
+						 mkIRExprVec_2 (arg1, arg2))));
+
+  //conversion after call
+  if(prec==VR_PREC_FLT){
+      IRExpr* conv=vr_I64toI32(sb, IRExpr_RdTmp(res ));
+      addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp,
+				     IRExpr_Unop (Iop_ReinterpI32asF32, conv )));
+  }
+  if(prec==VR_PREC_DBL){
+      addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp,
+				       IRExpr_Unop (Iop_ReinterpI64asF64, IRExpr_RdTmp(res))));
+  }
+  return True;
+}
+
+
+
+static Bool vr_replaceBinFpOpLLO_slow_safe (IRSB* sb, IRStmt* stmt, IRExpr* expr,
+					    const HChar* functionName, void* function,
+					    Vr_Op op,
+					    Vr_Prec prec,
+					    Vr_Vec vec){
+  //instrumentation to count operation
+  if(!vr.instr_op[op] || !vr.instrument) {
+    vr_countOp (sb,  op, prec,vec,False);
+    addStmtToIRSB (sb, stmt);
+    return False;
+  }
+  vr_countOp (sb,  op, prec,vec, True);
+  //conversion before call
+  IRExpr * arg1LL=NULL;
+  IRExpr * arg1;
+  IRExpr * arg2LL;
+  IRType ity=Ity_I64;//type of call result
+
+  arg1 = expr->Iex.Binop.arg1;
+  arg2LL = expr->Iex.Binop.arg2;
+  if (prec==VR_PREC_FLT) {
+    arg1LL = vr_getLLFloat (sb, arg1);
+    arg2LL = vr_getLLFloat (sb, arg2LL);
+    arg1LL = vr_I32toI64 (sb, arg1LL);
+    arg2LL = vr_I32toI64 (sb, arg2LL);
+    ity=Ity_I32;
+  }
+  if (prec==VR_PREC_DBL) {
+    arg1LL = vr_getLLDouble (sb, arg1);
+    arg2LL = vr_getLLDouble (sb, arg2LL);
+  }
+
+  //call
+  IRTemp res=newIRTemp (sb->tyenv, ity);
+  addStmtToIRSB (sb,
+                 IRStmt_Dirty(unsafeIRDirty_1_N (res, 2,
+                                                 functionName, VG_(fnptr_to_fnentry)(function),
+                                                 mkIRExprVec_2 (arg1LL, arg2LL))));
+
+  //update after call
+  if (prec==VR_PREC_FLT){
+    addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp,
+				     IRExpr_Binop (Iop_SetV128lo32, arg1,IRExpr_RdTmp(res))));
+  }
+  if (prec==VR_PREC_DBL){
+    addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp,
+				     IRExpr_Binop (Iop_SetV128lo64,arg1,IRExpr_RdTmp(res))));
+  }
+  return True;
+}
+
+
+static Bool vr_replaceBinFpOpLLO_fast_unsafe (IRSB* sb, IRStmt* stmt, IRExpr* expr,
+					      const HChar* functionName, void* function,
+					      Vr_Op op,
+					      Vr_Prec prec,
+					      Vr_Vec vec){
+  //instrumentation to count operation
+  if(!vr.instr_op[op] ) {
+    vr_countOp (sb,  op, prec,vec,False);
+    addStmtToIRSB (sb, stmt);
+    return False;
+  }
+  vr_countOp (sb,  op, prec,vec, True);
+  //conversion before call
+
+  IRExpr * arg1;
+  IRExpr * arg2;
+  
+  IRType ity=Ity_I64;//type of call result
+
+  arg1 = expr->Iex.Binop.arg1;
+  arg2 = expr->Iex.Binop.arg2;
+  if (prec==VR_PREC_FLT) {
+    arg1 = vr_getLLFloat (sb, arg1);
+    arg2 = vr_getLLFloat (sb, arg2);
+    arg1 = vr_I32toI64 (sb, arg1);
+    arg2 = vr_I32toI64 (sb, arg2);
+    ity=Ity_I32;
+  }
+  if (prec==VR_PREC_DBL) {
+    arg1 = vr_getLLDouble (sb, arg1);
+    arg2 = vr_getLLDouble (sb, arg2);
+  }
+
+  //call
+  IRTemp res=newIRTemp (sb->tyenv, ity);
+  addStmtToIRSB (sb,
+                 IRStmt_Dirty(unsafeIRDirty_1_N (res, 2,
+                                                 functionName, VG_(fnptr_to_fnentry)(function),
+                                                 mkIRExprVec_2 (arg1, arg2))));
+
+  //update after call
+  IROp opReg;
+  if (prec==VR_PREC_FLT) opReg = Iop_32UtoV128;
+  if (prec==VR_PREC_DBL) opReg = Iop_64UtoV128;
+  addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp,
+				   IRExpr_Unop (opReg, IRExpr_RdTmp(res))));
+  return True;
+}
+
+
+static Bool vr_replaceBinFpOpLLO(IRSB* sb, IRStmt* stmt, IRExpr* expr,
+				 const HChar* functionName, void* function,
+				 Vr_Op op,
+				 Vr_Prec prec,
+				 Vr_Vec vec){
+  if(vr.unsafe_llo_optim){
+    return vr_replaceBinFpOpLLO_fast_unsafe(sb,stmt,expr,functionName,function,op,prec,vec);
+  }else{
+    return vr_replaceBinFpOpLLO_slow_safe(sb,stmt,expr,functionName,function,op,prec,vec);
+  }
+}
+
+
+
+
+static Bool vr_replaceBinFullSSE (IRSB* sb, IRStmt* stmt, IRExpr* expr,
+				  const HChar* functionName, void* function,
+				  Vr_Op op,
+				  Vr_Prec prec,
+				  Vr_Vec vec) {
+  if(!vr.instr_op[op] || !vr.instrument) {
+    vr_countOp (sb,  op, prec,vec, False);
+    addStmtToIRSB (sb, stmt);
+    return False;
+  }
+  vr_countOp (sb,  op, prec,vec, True);
+
+  if(!(
+	 (vec==VR_VEC_FULL2 && prec==VR_PREC_DBL)
+     ||
+         (vec==VR_VEC_FULL4 && prec==VR_PREC_FLT)
+       )){
+    VG_(tool_panic) ( "vr_replaceBinFullSSE requires SSE instructions...  \n");
+  }
+
+  //conversion before call
+  IRExpr * arg1 = expr->Iex.Triop.details->arg2;
+  IRExpr * arg2 = expr->Iex.Triop.details->arg3;
+
+
+  IRExpr *arg1Lo=vr_getLLDouble (sb, arg1);
+  IRExpr *arg1Hi=vr_getHLDouble (sb, arg1);
+  IRExpr *arg2Lo=vr_getLLDouble (sb, arg2);
+  IRExpr *arg2Hi=vr_getHLDouble (sb, arg2);
+
+  IRTemp res= newIRTemp (sb->tyenv, Ity_V128);
+
+
+
+  //call
+  addStmtToIRSB (sb,
+                 IRStmt_Dirty(unsafeIRDirty_1_N (res, 3,
+                                                 "", VG_(fnptr_to_fnentry)(function),
+                                                 mkIRExprVec_5 (IRExpr_VECRET(),
+								arg1Hi,arg1Lo,
+								arg2Hi,arg2Lo))));
+  //conversion after call
+  addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp, IRExpr_RdTmp(res)));
+  return True;
+}
+
+
+
+
+static Bool vr_replaceBinFullAVX (IRSB* sb, IRStmt* stmt, IRExpr* expr,
+				  const HChar* functionName, void* function,
+				  Vr_Op op,
+				  Vr_Prec prec,
+				  Vr_Vec vec) {
+  if(!vr.instr_op[op] || !vr.instrument) {
+    vr_countOp (sb,  op, prec,vec,False);
+    addStmtToIRSB (sb, stmt);
+    return False;
+  }
+  vr_countOp (sb,  op, prec,vec,True);
+
+
+  if(!(
+	 (vec==VR_VEC_FULL4 && prec==VR_PREC_DBL)
+     ||
+         (vec==VR_VEC_FULL8 && prec==VR_PREC_FLT)
+       )){
+    VG_(tool_panic) ( "vr_replaceBinFullAVX requires AVX instructions...  \n");
+  }
+
+  //conversion before call
+  IRExpr * arg1 = expr->Iex.Triop.details->arg2;
+  IRExpr * arg2 = expr->Iex.Triop.details->arg3;
+
+
+  IRExpr* arg1Tab[4];
+  IRExpr* arg2Tab[4];
+  vr_getTabArgAVX (sb, arg1, arg1Tab);
+  vr_getTabArgAVX (sb, arg2, arg2Tab);
+
+  IRTemp res= newIRTemp (sb->tyenv, Ity_V256);
+
+
+
+  //call
+
+  /* 1 call avx
+    addStmtToIRSB (sb,
+                 IRStmt_Dirty(unsafeIRDirty_1_N (res, 1,
+                                                 functionName, VG_(fnptr_to_fnentry)(function),
+                                                 mkIRExprVec_9 (IRExpr_VECRET(),
+								arg1Tab[0],arg1Tab[1], arg1Tab[2],arg1Tab[3],
+								arg2Tab[0],arg2Tab[1], arg2Tab[2],arg2Tab[3]
+								)
+								)));*/
+
+
+  if( prec==VR_PREC_DBL){
+    addStmtToIRSB (sb,
+		   IRStmt_Dirty(unsafeIRDirty_0_N (1,
+						   "vr_AvxDoubleCopyFirstArg", VG_(fnptr_to_fnentry)(&vr_AvxDoubleCopyFirstArg),
+						   mkIRExprVec_4 (arg1Tab[0],arg1Tab[1], arg1Tab[2],arg1Tab[3])
+						   )));
+  }else if(prec==VR_PREC_FLT){
+     addStmtToIRSB (sb,
+		   IRStmt_Dirty(unsafeIRDirty_0_N (1,
+						   "vr_AvxFloatCopyFirstArg", VG_(fnptr_to_fnentry)(&vr_AvxFloatCopyFirstArg),
+						   mkIRExprVec_4 (arg1Tab[0],arg1Tab[1], arg1Tab[2],arg1Tab[3])
+						   )));
+  }
+
+
+  addStmtToIRSB (sb,
+                 IRStmt_Dirty(unsafeIRDirty_1_N (res, 1,
+                                                 functionName, VG_(fnptr_to_fnentry)(function),
+                                                 mkIRExprVec_5 (IRExpr_VECRET(),
+								arg2Tab[0],arg2Tab[1], arg2Tab[2],arg2Tab[3]
+								)
+						 )));
+
+
+  //conversion after call
+  addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp, IRExpr_RdTmp(res)));
+  return True;
+}
+
+
+
+
+static Bool vr_replaceFMA (IRSB* sb, IRStmt* stmt, IRExpr* expr,
+			   const HChar* functionName, void* function,
+			   Vr_Op   Op,
+			   Vr_Prec Prec) {
+  if(!vr.instr_op[Op] || !vr.instrument) {
+    vr_countOp (sb,  Op, Prec, VR_VEC_LLO,False);
+    addStmtToIRSB (sb, stmt);
+    return False;
+  }
+  vr_countOp (sb,  Op, Prec, VR_VEC_LLO,True);
+
+#ifdef USE_VERROU_FMA
+  //  IRExpr * arg1 = expr->Iex.Qop.details->arg1; Rounding mode
+  IRExpr * arg2 = expr->Iex.Qop.details->arg2;
+  IRExpr * arg3 = expr->Iex.Qop.details->arg3;
+  IRExpr * arg4 = expr->Iex.Qop.details->arg4;
+  IRTemp res = newIRTemp (sb->tyenv, Ity_I64);
+  if(Prec== VR_PREC_DBL){
+    arg2=vr_F64toI64(sb,arg2);
+    arg3=vr_F64toI64(sb,arg3);
+    arg4=vr_F64toI64(sb,arg4);
+
+  }
+  if(Prec==VR_PREC_FLT){
+    arg2=vr_F32toI64(sb,arg2);
+    arg3=vr_F32toI64(sb,arg3);
+    arg4=vr_F32toI64(sb,arg4);
+  }
+
+  IRDirty* id= unsafeIRDirty_1_N (res, 3,
+				  functionName, VG_(fnptr_to_fnentry)(function),
+				  mkIRExprVec_3 (arg2, arg3,arg4));
+
+  addStmtToIRSB (sb,IRStmt_Dirty(id));
+
+
+
+  if(Prec==VR_PREC_FLT){
+    IRExpr* conv=vr_I64toI32(sb, IRExpr_RdTmp(res ));
+    addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp,
+    				     IRExpr_Unop (Iop_ReinterpI32asF32, conv )));
+  }
+  if(Prec==VR_PREC_DBL){
+    addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp,
+				     IRExpr_Unop (Iop_ReinterpI64asF64, IRExpr_RdTmp(res))));
+  }
+  return True;
+#else //USE_VERROU_FMA
+  //should not happed
+  VG_(tool_panic) ( "Verrou needs to be compiled with FMA support \n");
+#endif //USE_VERROU_FMA
+}
+
+
+
+static Bool vr_replaceCast (IRSB* sb, IRStmt* stmt, IRExpr* expr,
+			    const HChar* functionName, void* function,
+			    Vr_Op   Op,
+			    Vr_Prec Prec) {
+  if(!vr.instr_op[Op] || !vr.instrument ) {
+    vr_countOp (sb,  Op, Prec, VR_VEC_SCAL,False);
+    addStmtToIRSB (sb, stmt);
+    return False;
+  }
+  vr_countOp (sb,  Op, Prec, VR_VEC_SCAL,True);
+
+  IRExpr * arg2 = expr->Iex.Binop.arg2;
+
+  IRTemp res = newIRTemp (sb->tyenv, Ity_I64);
+
+  arg2=vr_F64toI64(sb,arg2);
+
+  IRDirty* id= unsafeIRDirty_1_N (res, 1,
+				  functionName, VG_(fnptr_to_fnentry)(function),
+				  mkIRExprVec_1 (arg2));
+
+  addStmtToIRSB (sb,IRStmt_Dirty(id));
+
+
+
+  IRExpr* conv=vr_I64toI32(sb, IRExpr_RdTmp(res ));
+  addStmtToIRSB (sb, IRStmt_WrTmp (stmt->Ist.WrTmp.tmp,
+				   IRExpr_Unop (Iop_ReinterpI32asF32, conv )));
+  return True;
+}
+
+
+
+
+
+static Bool vr_instrumentOp (IRSB* sb, IRStmt* stmt, IRExpr * expr, IROp op, vr_backend_name_t bc) {
+   Bool checkCancellation= (vr.checkCancellation || vr.dumpCancellation);
+   if(vr.backend==vr_verrou && !checkCancellation){
+#define bcName(OP) "vr_verrou"#OP, vr_verrou##OP
+#define bcNameWithCC(OP) "vr_verrou"#OP, vr_verrou##OP
+#include "vr_instrumentOp_impl.h"
+#undef bcName
+#undef bcNameWithCC
+   }
+   if(vr.backend==vr_verrou && checkCancellation){
+#define bcName(OP) "vr_verrou"#OP, vr_verrou##OP
+#define bcNameWithCC(OP) "vr_verroucheckcancellation"#OP, vr_verroucheckcancellation##OP
+#include "vr_instrumentOp_impl.h"
+#undef bcName
+#undef bcNameWithCC
+   }
+
+#ifdef USE_VERROU_QUAD
+   if(vr.backend==vr_mcaquad && !checkCancellation){
+#define bcName(OP) "vr_mcaquad"#OP, vr_mcaquad##OP
+#define bcNameWithCC(OP) "vr_mcaquad"#OP, vr_mcaquad##OP
+#include "vr_instrumentOp_impl.h"
+#undef bcName
+#undef bcNameWithCC
+   }
+   if(vr.backend==vr_mcaquad && checkCancellation){
+#define bcName(OP) "vr_mcaquad"#OP, vr_mcaquad##OP
+#define bcNameWithCC(OP) "vr_mcaquadcheckcancellation"#OP, vr_mcaquadcheckcancellation##OP
+#include "vr_instrumentOp_impl.h"
+#undef bcName
+#undef bcNameWithCC
+   }
+#else
+  if(vr.backend==vr_mcaquad){
+    VG_(tool_panic) ( "Verrou compiled without quad support...  \n");
+  }
+#endif //USE_VERROU_QUAD
+
+/*Exemple for partial backend implementation*/
+/*    if(vr.backend==vr_mcaquad){ */
+/* #define bcName(OP) "vr_mcaquad"#OP, vr_mcaquad##OP */
+/* //#define IGNOREFMA */
+/* //#define IGNORECAST */
+/* #include "vr_instrumentOp_impl.h" */
+/* //#undef IGNORECAST */
+/* //#undef IGNOREFMA */
+/* #undef bcName */
+/*    } */
+   return False;
+}
+
+static Bool vr_instrumentExpr (IRSB* sb, IRStmt* stmt, IRExpr* expr) {
+  IROp op;
+  //  ppIRStmt(stmt);VG_(printf)("\n");
+  if (vr_getOp (expr, &op)) {
+     return vr_instrumentOp (sb, stmt, expr, op, vr.backend);
+  } else {
+    addStmtToIRSB (sb, stmt);
+    return False;
+  }
+}
+
+// * Valgrind tool interface
+#define UNAMED_FUNCTION_VERROU "unamed_function_verrou"
+#define UNAMED_OBJECT_VERROU "unamed_object_verrou"
+#define UNAMED_FILENAME_VERROU "unamed_filename_verrou"
+
+
+
+
+static HChar const * fnnoname=UNAMED_FUNCTION_VERROU;
+static HChar const * objnoname=UNAMED_OBJECT_VERROU;
+static HChar const * filenamenoname=UNAMED_FILENAME_VERROU;
+
+
+static
+IRSB* vr_instrument ( VgCallbackClosure* closure,
+                      IRSB* sbIn,
+                      const VexGuestLayout* layout,
+                      const VexGuestExtents* vge,
+                      const VexArchInfo* archinfo_host,
+                      IRType gWordTy, IRType hWordTy )
+{
+  /*Recuperation of the name of symbol and object of the IRSB*/
+  /*No externalized in a function because the call to get_fnname forbid other call 
+    in the function vr_instrument : to use fnname after the end of call you have to use
+    strdup
+  */
+
+  HChar const * fnname;
+  HChar const * objname;
+  HChar const ** fnnamePtr=&fnname;
+  HChar const ** objnamePtr=&objname;
+
+  Addr ips[256];
+  VG_(get_StackTrace)(VG_(get_running_tid)(),ips, 256,
+		      NULL, NULL,0);
+  Addr addr = ips[0];
+  DiEpoch de = VG_(current_DiEpoch)();
+
+  Bool errorFnname=VG_(get_fnname)(de, addr, fnnamePtr);
+  if(!errorFnname || **fnnamePtr==0){
+    fnnamePtr=&fnnoname;
+  }
+
+  if (VG_(strlen)(*fnnamePtr) == VR_FNNAME_BUFSIZE-1) {
+    VG_(umsg)("WARNING: Function name too long: %s\n", *fnnamePtr);
+  }
+
+  Bool errorObjName=VG_(get_objname)(de, addr, objnamePtr);
+  if (!errorObjName || **objnamePtr == 0) {
+    objnamePtr=&objnoname;
+  }
+  /*End of recuperation*/
+
+  /*Early exit if not instrumented*/
+  if (vr_excludeIRSB (fnnamePtr, objnamePtr)){
+    return sbIn;
+  }
+
+
+  /*Instrumentation begin*/
+  UInt i;
+  IRSB* sbOut = deepCopyIRSBExceptStmts(sbIn);
+
+  Bool doIRSBFContainFloat=False;
+
+  /*Data for Imark localisation*/
+  Bool includeSource = True;
+  Bool doLineContainFloat=False;
+
+  const HChar * filename=NULL;
+  const HChar ** filenamePtr=&filenamenoname;
+  UInt  linenum;
+  UInt*  linenumPtr=&linenum;
+
+  /*Data for trace/coverage generation*/
+  traceBB_t* traceBB=NULL;
+  Bool genIRSBTrace=vr.genTrace &&  vr_includeTraceIRSB(&fnname,&objname);
+  if(genIRSBTrace){
+    traceBB=getNewTraceBB(sbIn);
+    vr_traceIRSB(sbOut,traceBB->index, &(traceBB->counter), instrCount);
+    vr_traceBB_trace_backtrace(traceBB);
+  }
+
+
+  /*Loop over instructions*/
+  for (i=0 ; i<sbIn->stmts_used ; ++i) {
+    IRStmt* st = sbIn->stmts[i];
+
+    switch (st->tag) {
+    case Ist_IMark: {
+      if(vr.genIncludeSource && doLineContainFloat){
+	  vr_includeSource_generate (&vr.includeSource, *fnnamePtr, *filenamePtr, *linenumPtr);
+      }
+      doLineContainFloat=False;
+
+      Addr  addrMark;
+      addrMark = st->Ist.IMark.addr;
+
+      //      filename[0] = 0;
+      filenamePtr=&filename;
+      Bool success=VG_(get_filename_linenum)(VG_(current_DiEpoch)(),
+					     addrMark,
+					     filenamePtr,
+					     NULL,
+					     linenumPtr);
+      if(! success || (**filenamePtr)==0){
+	filenamePtr=&filenamenoname;
+      }
+      if(genIRSBTrace){
+	vr_traceBB_trace_imark(traceBB,*fnnamePtr, *filenamePtr,*linenumPtr);
+      }
+      if(!vr.genIncludeSource){
+	includeSource =(!vr.sourceActivated) || (vr.sourceActivated&&  vr_includeSource (&vr.includeSource, *fnnamePtr, *filenamePtr, *linenumPtr));
+      }
+
+      addStmtToIRSB (sbOut, sbIn->stmts[i]); //required to be able to use breakpoint with gdb
+    }
+      break;
+    case Ist_WrTmp:
+      if (includeSource) {
+        Bool doInstrContainFloat= vr_instrumentExpr (sbOut, st, st->Ist.WrTmp.data);
+	doLineContainFloat=doLineContainFloat   || doInstrContainFloat;
+	doIRSBFContainFloat=doIRSBFContainFloat || doInstrContainFloat;
+        break;
+      }
+    default:
+      addStmtToIRSB (sbOut, sbIn->stmts[i]);
+    }
+  }
+
+  if(vr.genIncludeSource && doLineContainFloat &&filename !=NULL){
+    vr_includeSource_generate (&vr.includeSource, *fnnamePtr, *filenamePtr, *linenumPtr);
+
+
+  }
+  if(vr.genExclude && doIRSBFContainFloat){
+    vr_excludeIRSB_generate (fnnamePtr, objnamePtr);
+    /* Debug to understand where come from floating point operation in uname symbole
+      if(fnnamePtr==&fnnoname){
+      for(int i=0 ; i< 6; i++){
+	Addr addr = ips[i];
+	Bool errorFnname =VG_(get_fnname)(de, addr, fnnamePtr);
+	Bool errorObjName=VG_(get_objname)(de, addr, objnamePtr);
+	VG_(umsg)("stack %d : %s %s\n", i,*fnnamePtr, *objnamePtr);
+      }
+    }
+    */
+  }
+  return sbOut;
+}
+
+static void vr_fini(Int exitcode)
+{
+
+  //if (vr.checkCancellation) {
+     //VG_(fclose)(vr_outCancellationFile);
+  //}
+
+
+  vr_ppOpCount ();
+  interflop_verrou_finalyze(backend_verrou_context);
+#ifdef USE_VERROU_QUAD
+  interflop_mcaquad_finalyze(backend_mcaquad_context);
+#endif
+  interflop_checkcancellation_finalyze(backend_checkcancellation_context);
+
+
+  if (vr.genExclude) {
+    vr_dumpExcludeList(vr.exclude, vr.genExcludeUntil,
+                       vr.excludeFile);
+  }
+
+  if (vr.genIncludeSource) {
+    vr_dumpIncludeSourceList (vr.includeSource, vr.genIncludeSourceUntil,
+                              vr.includeSourceFile);
+  }
+
+  if(vr.genTrace){
+    vr_traceBB_dumpCov();
+    vr_traceBB_finalyze();
+  }
+  if (vr.dumpCancellation){
+     vr_dumpIncludeSourceList(vr.cancellationSource, NULL, vr.cancellationDumpFile );
+  }
+  vr_freeExcludeList (vr.exclude);
+  vr_freeIncludeSourceList (vr.includeSource);
+  vr_freeIncludeTraceList  (vr.includeTrace );
+  VG_(free)(vr.excludeFile);
+  //  VG_(free)(vr.genAbove);
+}
+
+//void vr_cancellation_handler(int cancelled ){
+//  VG_(fprintf)(vr_outCancellationFile, "C  %d\n", cancelled);
+//}
+
+static void print_op(int nbArg, const char* name, const double* args,const double* res){
+  if(nbArg==1){
+    VG_(umsg)("%s : %f => %f\n", name,args[0],*res);
+  }
+  if(nbArg==2){
+    VG_(umsg)("%s : %f, %f => %f\n", name,args[0], args[1],*res);
+  }
+  if(nbArg==3){
+    VG_(umsg)("%s : %f, %f, %f => %f\n", name, args[0], args[1], args[2], *res);
+  }
+}
+
+
+static void vr_post_clo_init(void)
+{
+   // Values coming from the environment take precedence over CLOs
+   vr_env_clo("VERROU_ROUNDING_MODE", "--rounding-mode");
+   vr_env_clo("VERROU_INSTR_ATSTART", "--instr-atstart");
+   vr_env_clo("VERROU_EXCLUDE",       "--exclude");
+   vr_env_clo("VERROU_GEN_EXCLUDE",   "--gen-exclude");
+   //   vr_env_clo("VERROU_GEN_ABOVE",     "--gen-above");
+   vr_env_clo("VERROU_SOURCE",        "--source");
+   vr_env_clo("VERROU_GEN_SOURCE",    "--gen-source");
+   vr_env_clo("VERROU_MCA_MODE",      "--mca-mode");
+
+   vr_env_clo("VERROU_BACKEND", "--backend");
+   vr_env_clo("VERROU_MCA_PRECISION_DOUBLE", "--mca-precision-double");
+   vr_env_clo("VERROU_MCA_PRECISION_FLOAT", "--mca-precision-float");
+
+   if (vr.genExclude) {
+     vr.genExcludeUntil = vr.exclude;
+   }
+
+   //   if (vr.genAbove == NULL) {
+   //     vr.genAbove = VG_(strdup)("vr.post_clo_init.gen-above", "main");
+   //   }
+
+   //Random Seed initialisation
+   if(vr.firstSeed==(unsigned int )(-1)){
+      struct vki_timeval now;
+      VG_(gettimeofday)(&now, NULL);
+      unsigned int pid = VG_(getpid)();
+      vr.firstSeed = now.tv_usec + pid;
+   }
+   VG_(umsg)("First seed : %u\n", vr.firstSeed);
+
+   //Verrou Backend Initilisation
+   backend_verrou=interflop_verrou_init(&backend_verrou_context);
+   verrou_set_panic_handler(&VG_(tool_panic));
+
+   verrou_set_nan_handler(&vr_handle_NaN);
+
+   verrou_set_debug_print_op(&print_op);//Use only verrou backend is configured to use it
+
+   VG_(umsg)("Backend %s : %s\n", interflop_verrou_get_backend_name() , interflop_verrou_get_backend_version()  );
+
+   interflop_verrou_configure(vr.roundingMode,backend_verrou_context);
+   verrou_set_seed (vr.firstSeed);
+
+
+   /*configuration of MCA backend*/
+#ifdef USE_VERROU_QUAD
+   backend_mcaquad=interflop_mcaquad_init(&backend_mcaquad_context);
+   mcaquad_set_panic_handler(&VG_(tool_panic));
+
+   VG_(umsg)("Backend %s : %s\n", interflop_mcaquad_get_backend_name(), interflop_mcaquad_get_backend_version()  );
+
+
+   mcaquad_conf_t mca_quad_conf;
+   mca_quad_conf.precision_float=vr.mca_precision_float;
+   mca_quad_conf.precision_double=vr.mca_precision_double;
+   mca_quad_conf.mode=vr.mca_mode;
+   interflop_mcaquad_configure(mca_quad_conf, backend_mcaquad_context);
+   mcaquad_set_seed(vr.firstSeed);
+#endif
+
+   /*Init outfile cancellation*/
+   checkcancellation_conf_t checkcancellation_conf;
+   checkcancellation_conf.threshold_float= vr.cc_threshold_float;
+   checkcancellation_conf.threshold_double= vr.cc_threshold_double;
+   backend_checkcancellation=interflop_checkcancellation_init(&backend_checkcancellation_context);
+   interflop_checkcancellation_configure(checkcancellation_conf,backend_checkcancellation_context);
+   if (vr.checkCancellation || vr.dumpCancellation) {
+//     vr_outCancellationFile = VG_(fopen)("vr.log",
+//					 VKI_O_WRONLY | VKI_O_CREAT | VKI_O_TRUNC,
+//					 VKI_S_IRUSR|VKI_S_IWUSR|VKI_S_IRGRP|VKI_S_IROTH);
+     checkcancellation_set_cancellation_handler(&vr_handle_CC); //valgrind error
+
+     VG_(umsg)("Backend %s : %s\n", interflop_checkcancellation_get_backend_name(), interflop_checkcancellation_get_backend_version()  );
+
+   }
+  if(vr.genTrace){
+     vr_traceBB_initialize();
+   }
+
+   /*If no operation selected the default is all*/
+   Bool someThingInstr=False;
+   int opIt;
+   for(opIt=0; opIt< VR_OP ;opIt++){
+     if(vr.instr_op[opIt]) someThingInstr=True;
+   }
+   if(!someThingInstr){
+     for(opIt=0; opIt<  VR_OP_CMP ;opIt++){ // Instruction after VR_OP_CMP (included) are not instrumented
+       vr.instr_op[opIt]=True;
+     }
+     vr.instr_op[VR_OP_CONV]=True;
+   }
+   VG_(umsg)("Instrumented operations :\n");
+   for (opIt=0; opIt< VR_OP ;opIt++){
+     VG_(umsg)("\t%s : ", vr_ppOp(opIt));
+     if(vr.instr_op[opIt]==True) VG_(umsg)("yes\n");
+     else VG_(umsg)("no\n");
+   }
+   VG_(umsg)("Instrumented scalar operations : ");
+   if(vr.instr_scalar) VG_(umsg)("yes\n");
+   else VG_(umsg)("no\n");
+
+   if(!vr.instrument){
+     vr.instrument = True;
+     vr_set_instrument_state ("Program start", False, False);
+   }
+
+   if(vr.backend==vr_verrou){
+      VG_(umsg)("Backend verrou simulating %s rounding mode\n", verrou_rounding_mode_name (vr.roundingMode));
+   }
+   if(vr.backend==vr_mcaquad){
+#ifdef USE_VERROU_QUAD
+     VG_(umsg)("Backend mcaquad simulating mode %s with precision %u for double and %u for float\n", mcaquad_mode_name(vr.mca_mode), vr.mca_precision_double, vr.mca_precision_float );
+#else
+     VG_(tool_panic)("Verrou compiled without quad support");
+#endif
+   }
+}
+
+
+static void vr_pre_clo_init(void)
+{
+   VG_(details_name)            ("Verrou");
+   VG_(details_version)         (NULL);
+   VG_(details_description)     ("Check floating-point rounding errors");
+   VG_(details_copyright_author)(
+      "Copyright (C) 2014-2016, F. Fevotte & B. Lathuiliere.");
+   VG_(details_bug_reports_to)  (VG_BUGS_TO);
+
+   VG_(details_avg_translation_sizeB) ( 275 );
+
+
+   VG_(clo_vex_control).iropt_register_updates_default
+     = VG_(clo_px_file_backed)
+     = VexRegUpdSpAtMemAccess; // overridable by the user.
+
+   VG_(clo_vex_control).iropt_unroll_thresh = 0;   // cannot be overriden.
+
+   VG_(basic_tool_funcs)        (vr_post_clo_init,
+                                 vr_instrument,
+                                 vr_fini);
+
+
+   VG_(needs_command_line_options)(vr_process_clo,
+                                   vr_print_usage,
+                                   vr_print_debug_usage);
+
+   VG_(needs_client_requests)(vr_handle_client_request);
+
+
+   VG_(needs_tool_errors)(vr_eq_Error,
+                          vr_before_pp_Error,
+                          vr_pp_Error,
+			  False,                          //show_ThreadIDs_for_errors
+                          vr_update_extra,
+                          vr_recognised_suppression,
+                          vr_read_extra_suppression_info,
+                          vr_error_matches_suppression,
+                          vr_get_error_name,
+                          vr_print_extra_suppression_info,
+                          vr_print_extra_suppression_use,
+                          vr_update_extra_suppression_use);
+
+   vr_clo_defaults();
+}
+
+VG_DETERMINE_INTERFACE_VERSION(vr_pre_clo_init)
+
+/*--------------------------------------------------------------------*/
+/*--- end                                                          ---*/
+/*--------------------------------------------------------------------*/
diff --git a/verrou/vr_main.h b/verrou/vr_main.h
new file mode 100644
index 0000000000000000000000000000000000000000..49470bbfa3c5acd9b87579d882124a5754e72315
--- /dev/null
+++ b/verrou/vr_main.h
@@ -0,0 +1,251 @@
+
+/*--------------------------------------------------------------------*/
+/*--- Verrou: a FPU instrumentation tool.                          ---*/
+/*--- Header file for core valgrind-based features.                ---*/
+/*---                                                    vr_main.h ---*/
+/*--------------------------------------------------------------------*/
+
+/*
+   This file is part of Verrou, a FPU instrumentation tool.
+
+   Copyright (C) 2014-2016
+     F. Févotte     <francois.fevotte@edf.fr>
+     B. Lathuilière <bruno.lathuiliere@edf.fr>
+
+   This program is free software; you can redistribute it and/or
+   modify it under the terms of the GNU General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   This program is distributed in the hope that it will be useful, but
+   WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   General Public License for more details.
+
+   You should have received a copy of the GNU General Public License
+   along with this program; if not, write to the Free Software
+   Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+   02111-1307, USA.
+
+   The GNU General Public License is contained in the file COPYING.
+*/
+
+#ifndef __VR_MAIN_H
+#define __VR_MAIN_H
+
+#include "pub_tool_basics.h"
+#include "pub_tool_vki.h"
+#include "pub_tool_debuginfo.h"
+#include "pub_tool_libcbase.h"
+#include "pub_tool_libcassert.h"
+#include "pub_tool_libcfile.h"
+#include "pub_tool_libcprint.h"
+#include "pub_tool_libcproc.h"
+#include "pub_tool_machine.h"
+#include "pub_tool_mallocfree.h"
+#include "pub_tool_options.h"
+#include "pub_tool_oset.h"
+#include "pub_tool_tooliface.h"
+#include "pub_tool_xarray.h"
+#include "pub_tool_clientstate.h"
+#include "pub_tool_machine.h"
+#include "pub_tool_stacktrace.h"
+#include "pub_tool_threadstate.h"
+#include "pub_tool_gdbserver.h"
+
+#include "verrou.h"
+
+//backend
+#include "backend_verrou/interflop_verrou.h"
+
+#ifdef USE_VERROU_QUAD
+#include "backend_mcaquad/interflop_mcaquad.h"
+#endif
+typedef enum vr_backend_name{vr_verrou,vr_mcaquad} vr_backend_name_t;
+
+//backend post-treatment
+#include "backend_checkcancellation/interflop_checkcancellation.h"
+typedef enum vr_backendpost_name{vr_nopost,vr_checkcancellation} vr_backendpost_name_t;
+
+
+// * Type declarations
+
+typedef enum {
+  VR_INSTR_OFF,
+  VR_INSTR_ON,
+  VR_INSTR
+} Vr_Instr;
+
+typedef enum {
+  VR_OP_ADD,    // Addition
+  VR_OP_SUB,    // Subtraction
+  VR_OP_MUL,    // Multiplication
+  VR_OP_DIV,    // Division
+  VR_OP_MADD,   // FMA ADD
+  VR_OP_MSUB,   // FMA SUB
+  VR_OP_CMP,    // Comparison
+  VR_OP_CONV,    // Conversion
+  VR_OP_MAX,    // Maximum
+  VR_OP_MIN,    // Minimum
+  VR_OP
+} Vr_Op; //Warning : Operation after   VR_OP_CMP are not instrumented
+
+typedef struct Vr_Exclude_ Vr_Exclude;
+struct Vr_Exclude_ {
+  HChar*      fnname;
+  HChar*      objname;
+  Bool        used;
+  Vr_Exclude* next;
+};
+
+typedef struct Vr_Include_Trace_ Vr_Include_Trace;
+struct Vr_Include_Trace_ {
+  HChar*      fnname;
+  HChar*      objname;
+  Vr_Include_Trace* next;
+};
+
+
+typedef struct Vr_IncludeSource_ Vr_IncludeSource;
+struct Vr_IncludeSource_ {
+  HChar*            fnname;
+  HChar*            filename;
+  UInt              linenum;
+  Vr_IncludeSource* next;
+};
+
+typedef struct {
+  vr_backend_name_t backend;
+  enum vr_RoundingMode roundingMode;
+  Bool count;
+  Bool instr_op[VR_OP];
+  Bool instr_scalar;
+  Vr_Instr instrument;
+  Bool verbose;
+  Bool unsafe_llo_optim;
+
+  UInt firstSeed;
+
+  Bool genExclude;
+  HChar * excludeFile;
+  //  HChar * genAbove;
+  Vr_Exclude * exclude;
+  Vr_Exclude * genExcludeUntil;
+
+  Bool genIncludeSource;
+  HChar* includeSourceFile;
+
+  Bool sourceActivated;
+  Vr_IncludeSource *includeSource;
+  Vr_IncludeSource *genIncludeSourceUntil;
+
+  UInt mca_precision_double;
+  UInt mca_precision_float;
+  UInt mca_mode;
+
+  Bool checknan;
+
+  Bool checkCancellation;
+  UInt cc_threshold_double;
+  UInt cc_threshold_float;
+
+  Bool dumpCancellation;
+  HChar* cancellationDumpFile;
+  Vr_IncludeSource * cancellationSource;
+
+  Bool genTrace;
+  Vr_Include_Trace* includeTrace;
+} Vr_State;
+
+extern Vr_State vr;
+
+
+// * Functions declarations
+
+// ** vr_main.c
+
+void vr_ppOpCount (void);
+void vr_cancellation_handler(int cancelled );
+
+
+// ** vr_clreq.c
+
+Bool vr_handle_client_request (ThreadId tid, UWord *args, UWord *ret);
+void vr_set_instrument_state (const HChar* reason, Vr_Instr state, Bool discard);
+
+
+// ** vr_error.c
+
+typedef enum {
+  VR_ERROR_UNCOUNTED,
+  VR_ERROR_SCALAR,
+  VR_ERROR_NAN,
+  VR_ERROR_CC,
+  VR_ERROR
+} Vr_ErrorKind;
+
+const HChar* vr_get_error_name (const Error* err);
+Bool vr_recognised_suppression (const HChar* name, Supp* su);
+void vr_before_pp_Error (const Error* err) ;
+void vr_pp_Error (const Error* err);
+Bool vr_eq_Error (VgRes res, const Error* e1, const Error* e2);
+UInt vr_update_extra (const Error* err);
+Bool vr_error_matches_suppression (const Error* err, const Supp* su);
+Bool vr_read_extra_suppression_info (Int fd, HChar** bufpp, SizeT* nBuf,
+                                     Int* lineno, Supp* su);
+SizeT vr_print_extra_suppression_info (const Error* er,
+                                      /*OUT*/HChar* buf, Int nBuf);
+SizeT vr_print_extra_suppression_use (const Supp* s,
+                                     /*OUT*/HChar* buf, Int nBuf);
+void vr_update_extra_suppression_use (const Error* err, const Supp* su);
+
+
+void vr_maybe_record_ErrorOp (Vr_ErrorKind kind, IROp op);
+void vr_maybe_record_ErrorRt (Vr_ErrorKind kind);
+void vr_handle_NaN (void);
+void vr_handle_CC (int);
+
+// ** vr_exclude.c
+
+void        vr_freeExcludeList (Vr_Exclude* list);
+void        vr_dumpExcludeList (Vr_Exclude* list, Vr_Exclude* end,
+                                const HChar* filename);
+Vr_Exclude* vr_loadExcludeList (Vr_Exclude * list, const HChar * filename);
+Bool        vr_excludeIRSB(const HChar** fnname, const HChar** objname);
+void        vr_excludeIRSB_generate(const HChar** fnname, const HChar** objname);
+
+void vr_freeIncludeSourceList (Vr_IncludeSource* list);
+void vr_dumpIncludeSourceList (Vr_IncludeSource* list, Vr_IncludeSource* end,
+                               const HChar* fname);
+Vr_IncludeSource * vr_loadIncludeSourceList (Vr_IncludeSource * list, const HChar * fname);
+Bool vr_includeSource (Vr_IncludeSource** list,
+                       const HChar* fnname, const HChar* filename, UInt linenum);
+void vr_includeSource_generate (Vr_IncludeSource** list,
+				const HChar* fnname, const HChar* filename, UInt linenum);
+
+
+// ** vr_include_trace.c
+void vr_freeIncludeTraceList (Vr_Include_Trace* list) ;
+Vr_Include_Trace * vr_loadIncludeTraceList (Vr_Include_Trace * list, const HChar * fname);
+Bool vr_includeTraceIRSB (const HChar** fnname, const HChar **objname);
+
+
+//**  vr_traceBB.c
+
+
+void vr_traceBB_resetCov(void);
+UInt vr_traceBB_dumpCov(void);
+
+#define VR_FNNAME_BUFSIZE 4096
+
+
+// ** vr_clo.c
+
+void vr_env_clo (const HChar* env, const HChar *clo);
+void vr_clo_defaults (void);
+Bool vr_process_clo (const HChar *arg);
+void vr_print_usage (void);
+void vr_print_debug_usage (void);
+
+
+#endif /*ndef __VR_MAIN_H*/
diff --git a/verrou/vr_traceBB.c b/verrou/vr_traceBB.c
new file mode 100644
index 0000000000000000000000000000000000000000..8fdda0a0430452be3efc68375a3a9236c78cd094
--- /dev/null
+++ b/verrou/vr_traceBB.c
@@ -0,0 +1,207 @@
+#include "vr_main.h"
+
+
+struct traceBB_T {
+  IRSB* irsb;
+  UInt index;
+  UInt counter;
+  struct traceBB_T*  next;
+};
+
+typedef struct traceBB_T traceBB_t;
+
+VgFile * vr_out_bb_info = NULL;
+VgFile * vr_out_bb_info_backtrace= NULL;
+VgFile * vr_out_bb_trace= NULL;
+VgFile * vr_out_bb_cov= NULL;
+
+traceBB_t* traceList=NULL ;
+
+/* Trace */
+static void vr_trace_dyn_IRSB(HWord index, HWord counterPtr){
+  VG_(fprintf)(vr_out_bb_trace,"%u\n",(UInt)index);
+}
+
+static void vr_count_dyn_IRSB(HWord index, HWord counterPtr){
+  //  VG_(fprintf)(vr_out_bb_trace,"%u\n",(UInt)index);
+  *((UInt*)counterPtr) +=1;
+}
+
+static void vr_countAndTrace_dyn_IRSB(HWord index, HWord counterPtr){
+  VG_(fprintf)(vr_out_bb_trace,"%u\n",(UInt)index);
+  *((UInt*)counterPtr) +=1;
+}
+
+
+typedef enum typeInstr{ instrTrace, instrCount, instrCountAndTrace} typeInstr_t;
+
+static void vr_traceIRSB (IRSB* out, UInt  index, UInt* counterPtr, typeInstr_t select) {
+
+  IRExpr** argv = mkIRExprVec_2 (mkIRExpr_HWord ((HWord)index),
+				 mkIRExpr_HWord ((HWord)counterPtr));
+  IRDirty* di;
+
+  switch (select){
+  case instrTrace:
+    di = unsafeIRDirty_0_N(2,
+			   "vr_trace_dyn_IRSB",
+			   VG_(fnptr_to_fnentry)( &vr_trace_dyn_IRSB ),
+			   argv);
+    break;
+  case instrCount:
+    di = unsafeIRDirty_0_N(2,
+			   "vr_count_dyn_IRSB",
+			   VG_(fnptr_to_fnentry)( &vr_count_dyn_IRSB ),
+			   argv);
+    break;
+  case instrCountAndTrace:
+    di = unsafeIRDirty_0_N(2,
+			   "vr_countAndTrace_dyn_IRSB",
+			   VG_(fnptr_to_fnentry)( &vr_countAndTrace_dyn_IRSB),
+			   argv);
+    break;
+  }
+
+  addStmtToIRSB (out, IRStmt_Dirty (di));
+}
+
+traceBB_t* getNewTraceBB(IRSB* irsb_in);
+traceBB_t* getNewTraceBB(IRSB* irsb_in){
+  traceBB_t * res = VG_(malloc)("vr.getNewTraceBB", sizeof(traceBB_t));
+  res->next=traceList;
+  res->counter=0;
+  if(res->next !=NULL){
+    res->index=((res->next)->index) + 1;
+  }else{
+    res->index=0;
+  }
+  res->irsb=irsb_in;
+
+  traceList=res;
+  return res;
+}
+
+void freeTraceBBList(void);
+void freeTraceBBList(void){
+    while (traceList != NULL) {
+      traceBB_t* next= traceList->next;
+      VG_(free)(traceList);
+      traceList=next;
+    }
+}
+
+
+void vr_traceBB_initialize(void);
+void vr_traceBB_initialize(void){
+  const HChar * strInfo="trace_bb_info.log-%p";
+  const HChar * strTrace="trace_bb_trace.log-%p";
+  const HChar * strCov="trace_bb_cov.log-%p";
+  const HChar * strInfoBack="trace_bb_info_backtrace.log-%p";
+  const HChar * strExpInfo=   VG_(expand_file_name)("vr.traceBB.strInfo",  strInfo);
+  const HChar * strExpTrace=  VG_(expand_file_name)("vr.traceBB.strTrace", strTrace);
+  const HChar * strExpCov=  VG_(expand_file_name)("vr.traceBB.strCov", strCov);
+  const HChar * strExpBack=  VG_(expand_file_name)("vr.traceBB.strBack", strInfoBack);
+
+  vr_out_bb_info = VG_(fopen)(strExpInfo,
+			      VKI_O_WRONLY | VKI_O_CREAT | VKI_O_TRUNC,
+			      VKI_S_IRUSR|VKI_S_IWUSR|VKI_S_IRGRP|VKI_S_IROTH);
+  vr_out_bb_trace = VG_(fopen)(strExpTrace,
+			       VKI_O_WRONLY | VKI_O_CREAT | VKI_O_TRUNC,
+			       VKI_S_IRUSR|VKI_S_IWUSR|VKI_S_IRGRP|VKI_S_IROTH);
+  vr_out_bb_cov = VG_(fopen)(strExpCov,
+			       VKI_O_WRONLY | VKI_O_CREAT | VKI_O_TRUNC,
+			       VKI_S_IRUSR|VKI_S_IWUSR|VKI_S_IRGRP|VKI_S_IROTH);
+  vr_out_bb_info_backtrace = VG_(fopen)(strExpBack,
+			       VKI_O_WRONLY | VKI_O_CREAT | VKI_O_TRUNC,
+			       VKI_S_IRUSR|VKI_S_IWUSR|VKI_S_IRGRP|VKI_S_IROTH);
+
+  if(vr_out_bb_trace==NULL || vr_out_bb_info==NULL || vr_out_bb_info_backtrace==NULL || vr_out_bb_cov==NULL){
+    VG_(tool_panic)("trace file initialization failed\n");
+  }
+};
+
+void vr_traceBB_finalyze(void);
+void vr_traceBB_finalyze(void){
+   freeTraceBBList();
+
+  if(vr_out_bb_info!=NULL){
+    VG_(fclose)(vr_out_bb_info);
+  }
+  if(vr_out_bb_trace!=NULL){
+    VG_(fclose)(vr_out_bb_trace);
+  }
+  if(vr_out_bb_info_backtrace !=NULL){
+    VG_(fclose)(vr_out_bb_info_backtrace);
+  }
+  if(vr_out_bb_cov !=NULL){
+    VG_(fclose)(vr_out_bb_cov);
+  }
+
+}
+
+
+void vr_traceBB_resetCov(void){
+  traceBB_t* current=traceList;
+  while (current != NULL) {
+    current->counter=0;
+    current = current->next;
+  }
+}
+
+
+
+UInt vr_traceBB_dumpCov(void){
+  static UInt numPartialCov=0;
+
+  VG_(fprintf)(vr_out_bb_cov, "cover-%u\n", numPartialCov);
+  traceBB_t* current=traceList;
+  while (current != NULL) {
+    if(current->counter!=0){
+      VG_(fprintf)(vr_out_bb_cov,"%u:%u\n",(current->index),(current->counter));
+    }
+    current = current->next;
+  }
+  numPartialCov+=1;
+  vr_traceBB_resetCov();
+  return numPartialCov-1;
+}
+
+
+
+
+void vr_traceBB_trace_imark(traceBB_t* tr,const HChar * fnname,const HChar * filename,UInt lineNum);
+void vr_traceBB_trace_imark(traceBB_t* tr,const HChar * fnname,const HChar * filename,UInt lineNum){
+  VG_(fprintf)(vr_out_bb_info, "%u : %s : %s : %u\n", (tr->index), fnname, filename, lineNum);
+}
+
+
+void vr_traceBB_trace_backtrace(traceBB_t* tr);
+void vr_traceBB_trace_backtrace(traceBB_t* tr){
+  Addr ips[256];
+
+  const HChar * fnname;
+  const HChar * objname;
+
+  int n_ips=VG_(get_StackTrace)(VG_(get_running_tid)(),
+				ips, 256,
+				NULL, NULL,
+				0);
+  DiEpoch de = VG_(current_DiEpoch)();
+  VG_(fprintf)(vr_out_bb_info_backtrace, "begin: %p\n",  (void*)(tr->irsb));
+
+  int i;
+  for (i = n_ips - 1; i >= 0; i--) {
+    Vg_FnNameKind kind = VG_(get_fnname_kind_from_IP)(de, ips[i]);
+    if (Vg_FnNameMain == kind || Vg_FnNameBelowMain == kind)
+      n_ips = i + 1;
+    if (Vg_FnNameMain == kind)
+      break;
+  }
+
+  for(i=0; i<n_ips;i++){
+    Addr addr = ips[i];
+    VG_(get_fnname)(de, addr, &fnname);
+    VG_(get_objname)(de, addr, &objname);
+    VG_(fprintf)(vr_out_bb_info_backtrace, "%p : %s - %s\n", (void*)addr, fnname, objname);
+  }
+}