The below RoundAmount function is not rounding as expected. It rounds the values between 0.491 and 0.495 to 0.49, and the values between 0.496 and 0.499 are rounded to 0.5.

I was expecting rounded value of 0.5 for 0.495, but it is giving 0.49. Whats happening here, is there any explicit rounding during assignment on line5.

It gives the correct result under these circumstances:
1. If I modify the rounding factor to 0.51 (on line line5)
2. If I #define GOOD 1

Could someone explain why it is behaving like that (on UNIX) :confused: ?


#include <math.h>
#include <stdio.h>

void RoundAmount( double & opCharge, const int ipPrecision )
{
//extract the integral part
const long int lIntegral = opCharge;

printf("A: opCharge - lIntegral = %.012f\n",
opCharge - lIntegral); // line1
printf("B: ... * pow() = %.012f\n",
(opCharge - lIntegral) * pow(10, ipPrecision)); // line2
printf("C: result as double = %.020f\n",
(opCharge - lIntegral) * pow(10, ipPrecision) + 0.5); // line3

double lDecimalAsDbl = 0.0;
long int lDecimalAsInt = 0;

//extract the decimal part with specified precision
#if GOOD
lDecimalAsDbl = (opCharge - lIntegral) * pow(10, ipPrecision) + 0.5; //line4
lDecimalAsInt = lDecimalAsDbl;
#else
lDecimalAsInt = (opCharge - lIntegral) * pow(10, ipPrecision) + 0.5; //line5
#endif

printf("4: lDecimal = %ld (%.20f)\n",
lDecimalAsInt, lDecimalAsDbl); // line6

//now construct a rounded value
opCharge = lIntegral + (lDecimalAsInt / pow(10, ipPrecision));
}

int main(int argc, char **argv)
{
double lTestVal = 0.495;

RoundAmount(lTestVal, 2);

printf("Rounded amount is %.06f (%.02f)\n", lTestVal, lTestVal);

return 0;
}