how does the javaScriptde decimal.js library control the accuracy of numbers
function round(x, y) {
const a = new Decimal(x).round();
Decimal.set({ precision: 2, rounding: y });
return a;
}
console.log(round("1235.1235", 1).valueOf()); // 1235
above is the code. I don"t know what the problem is