If we want to round a number at most with 2 decimal places, a common use is the follow:
var number = Use Math.round(num * 100) / 100;
While this will work for most cases, it will not work for 1.005 which will end up coming out to be 1 instead of 1.01, so it’s a wrong way to round a number.
Starting from the example proposed over the precisionRound that I found over MDN (that event for 1.005 returs 1 and not 1.01), I write a custom precisionRound that manage a random precision number and for 1.005 returns 1.01.
This is the function:
function precisionRound(number, precision) { if(precision < 0) { var factor = Math.pow(10, precision); return Math.round(number * factor) / factor; } else return +(Math.round(number + "e+"+precision) + "e-" + precision); } console.log(precisionRound(1234.5678, 1)); // output: 1234.6 console.log(precisionRound(1234.5678, -1)); // output: 1230 console.log(precisionRound(1.005, 2)); // output: 1.01 console.log(precisionRound(1.0005, 2)); // output: 1 console.log(precisionRound(1.0005, 3)); // output: 1.001 console.log(precisionRound(1.0005, 4)); // output: 1.0005
For TypeScript:
public static precisionRound(number: number, precision: number) { if (precision < 0) { let factor = Math.pow(10, precision); return Math.round(number * factor) / factor; } else return +(Math.round(Number(number + "e+" + precision)) + "e-" + precision); }