I'm using this function to convert a file size in bytes to a human-readable file size:
function getReadableFileSizeString(fileSizeInBytes) {
var i = -1;
var byteUnits = [' kB', ' MB', ' GB', ' TB', 'PB', 'EB', 'ZB', 'YB'];
do {
fileSizeInBytes = fileSizeInBytes / 1024;
i++;
} while (fileSizeInBytes > 1024);
return Math.max(fileSizeInBytes, 0.1).toFixed(1) + byteUnits[i];
};
However, it seems like this isn't 100% accurate. For example:
getReadableFileSizeString(1551859712); // output is "1.4 GB"
Shouldn't this be "1.5 GB"
? It seems like the division by 1024 is losing precision. Am I totally misunderstanding something or is there a better way to do this?
Here's one I wrote:
/**
* Format bytes as human-readable text.
*
* @param bytes Number of bytes.
* @param si True to use metric (SI) units, aka powers of 1000. False to use
* binary (IEC), aka powers of 1024.
* @param dp Number of decimal places to display.
*
* @return Formatted string.
*/
function humanFileSize(bytes, si=false, dp=1) {
const thresh = si ? 1000 : 1024;
if (Math.abs(bytes) < thresh) {
return bytes + ' B';
}
const units = si
? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
: ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];
let u = -1;
const r = 10**dp;
do {
bytes /= thresh;
++u;
} while (Math.round(Math.abs(bytes) * r) / r >= thresh && u < units.length - 1);
return bytes.toFixed(dp) + ' ' + units[u];
}
console.log(humanFileSize(1551859712)) // 1.4 GiB
console.log(humanFileSize(5000, true)) // 5.0 kB
console.log(humanFileSize(5000, false)) // 4.9 KiB
console.log(humanFileSize(-10000000000000000000000000000)) // -8271.8 YiB
console.log(humanFileSize(999949, true)) // 999.9 kB
console.log(humanFileSize(999950, true)) // 1.0 MB
console.log(humanFileSize(999950, true, 2)) // 999.95 kB
console.log(humanFileSize(999500, true, 0)) // 1 MB