61 exp = expF128UI64( uiA64 );
64 if ( 0x402F <= exp ) {
67 if ( 0x406F <= exp ) {
68 if ( (exp == 0x7FFF) && (fracF128UI64( uiA64 ) | uiA0) ) {
77 roundBitsMask = lastBitMask0 - 1;
82 if ( exp == 0x402F ) {
83 if (
UINT64_C( 0x8000000000000000 ) <= uiZ.v0 ) {
87 && (uiZ.v0 ==
UINT64_C( 0x8000000000000000 ))
94 if ( roundNearEven && !(uiZ.v0 & roundBitsMask) ) {
95 uiZ.v0 &= ~lastBitMask0;
105 uiZ.v0 &= ~roundBitsMask;
106 lastBitMask64 = !lastBitMask0;
110 if ( exp < 0x3FFF ) {
111 if ( !((uiA64 &
UINT64_C( 0x7FFFFFFFFFFFFFFF )) | uiA0) )
return a;
113 uiZ.v64 = uiA64 & packToF128UI64( 1, 0, 0 );
115 switch ( roundingMode ) {
117 if ( !(fracF128UI64( uiA64 ) | uiA0) )
break;
119 if ( exp == 0x3FFE ) uiZ.v64 |= packToF128UI64( 0, 0x3FFF, 0 );
122 if ( uiZ.v64 ) uiZ.v64 = packToF128UI64( 1, 0x3FFF, 0 );
125 if ( !uiZ.v64 ) uiZ.v64 = packToF128UI64( 0, 0x3FFF, 0 );
127#ifdef SOFTFLOAT_ROUND_ODD
129 uiZ.v64 |= packToF128UI64( 0, 0x3FFF, 0 );
140 roundBitsMask = lastBitMask64 - 1;
142 uiZ.v64 += lastBitMask64>>1;
144 uiZ.v64 += lastBitMask64>>1;
145 if ( !((uiZ.v64 & roundBitsMask) | uiA0) ) {
146 uiZ.v64 &= ~lastBitMask64;
153 uiZ.v64 = (uiZ.v64 | (uiA0 != 0)) + roundBitsMask;
155 uiZ.v64 &= ~roundBitsMask;
158 if ( (uiZ.v64 != uiA64) || (uiZ.v0 != uiA0) ) {
159#ifdef SOFTFLOAT_ROUND_ODD
161 uiZ.v64 |= lastBitMask64;
162 uiZ.v0 |= lastBitMask0;