Code works fine when it's processing less than 500 MB worth of images. (I'm trying to do +25 GB worth) Is there a way to modify the code to get it to work with a larger quantity of images?
I get an error that looks like this:
node:internal/process/promises:279 triggerUncaughtException(err, true /* fromPromise */); ^
[Error: EIO: i/o error, write] { errno: -5, code: 'EIO', syscall: 'write' }
or this:
node:internal/process/promises:279 triggerUncaughtException(err, true /* fromPromise */); ^
Error: read ENOTCONN at tryReadStart (node:net:614:20) at Socket._read (node:net:625:5) at Socket.Readable.read (node:internal/streams/readable:487:10) at Socket.read (node:net:666:39) at new Socket (node:net:415:12) at Object.Socket (node:net:286:41) at createSocket (node:internal/child_process:328:14) at ChildProcess.spawn (node:internal/child_process:445:23) at Object.spawn (node:child_process:700:9) at execa (file:///mnt/d/Projects/GH_2022/imagemin-mozjpeg/node_modules/execa/index.js:84:26) { errno: -107, code: 'ENOTCONN', syscall: 'read', originalMessage: 'read ENOTCONN', shortMessage: 'Command failed with ENOTCONN: /mnt/d/Projects/GH_2022/imagemin-mozjpeg/node_modules/mozjpeg/vendor/cjpeg -quality 75\n' + 'read ENOTCONN', command: '/mnt/d/Projects/GH_2022/imagemin-mozjpeg/node_modules/mozjpeg/vendor/cjpeg -quality 75', escapedCommand: '"/mnt/d/Projects/GH_2022/imagemin-mozjpeg/node_modules/mozjpeg/vendor/cjpeg" -quality 75', exitCode: undefined, signal: undefined, signalDescription: undefined, stdout: Buffer(0) [Uint8Array] [], stderr: Buffer(0) [Uint8Array] [], failed: true, timedOut: false, isCanceled: false, killed: false }
This is what I currently have:
import imagemin from 'imagemin';
import imageminMozjpeg from 'imagemin-mozjpeg';
import { promises as fsPromises } from 'node:fs';
import { promisify } from 'node:util';
import path from 'node:path';
import fs from 'graceful-fs';
const writeFile = promisify(fs.writeFile);
const srcdir = 'images/source';
const distdir = 'images/dist';
imagemin([srcdir + '/**/*.{jpg,jpeg,png}'], {
plugins: [
imageminMozjpeg({
quality: 75
})
]
}).then(files => files
.forEach(async v => {
let source = path.parse(v.sourcePath);
v.destinationPath = `${source.dir.replace(srcdir, distdir)}/${source.name}${source.ext}`;
await fsPromises.mkdir(path.dirname(v.destinationPath), { recursive: true });
await writeFile(v.destinationPath, v.data);
})
);
So it looks like imagemin grabs all the images at once before it ever gets to the for loop and crashes so I just made it so it process all the images synchronously.
So I made the following code. It's robust and can do as many images as needed in one go. It also replicates the folder structure to make things easy. Hopefully it helps someone else that wants to optimize a ton of jpeg files using the Mozjpeg encoder for their own images. (You can also easily change it for other plugins too if you want like imageminJpegtran
, imageminPngquant
, etc.)
Here's what I came up with:
// mozjpegify.mjs
import path from 'path';
import glob from 'glob';
import imagemin from 'imagemin';
import imageminMozjpeg from 'imagemin-mozjpeg';
const srcdir = 'images/source';
const distdir = 'images/dist';
Main();
async function Main() {
GetPath(srcdir, GetPathCallback);
}
function GetPath( src, callback ) {
glob(src + '/**/*', callback);
};
async function GetPathCallback(err, filePath) {
if (!err) {
for(let i=0; i<filePath.length; i++) {
//console.log( filePath[i] + ' -> ' + filePath[i].replace(srcdir, distdir) ); // source -> target
let ext = filePath[i].split('.').pop(); // get file extension
if( ext == 'jpg' || ext == 'jpeg' || ext == 'png' ) { // make sure it's an image and not a folder or something
await Optimize( filePath[i], ParsePath(filePath[i].replace(srcdir, distdir)) );
}
}
}
else {
console.log('Error:', err);
}
}
async function Optimize( src, destFolder ) {
const files = await imagemin(
[src],
{
destination: destFolder,
plugins: [
imageminMozjpeg({ quality: 75 })
]
}
);
console.log( src + '...Done' );
}
function ParsePath(filepath) {
return path.parse(filepath).dir;
}