Compare commits

..

4 Commits

Author SHA1 Message Date
Lovell Fuller
af89127208 Improve thread-safety of error and warning message handling.
Ensures all message reading occurs before thread shutdown.
2026-01-21 21:36:45 +00:00
Lovell Fuller
66764b359b Remove unused option parameter added in 8561f0d 2026-01-20 21:10:56 +00:00
Lovell Fuller
8561f0da1d Ensure HEIF primary item is used as default page #4487 2026-01-18 20:24:34 +00:00
Lovell Fuller
0468c1be9f Encoding lossless AVIF is mutually exclusive with iq tuning 2026-01-08 12:43:53 +00:00
19 changed files with 215 additions and 102 deletions

View File

@@ -717,7 +717,7 @@ instead of providing `xres` and `yres` in pixels/mm.
| [options.xres] | <code>number</code> | <code>1.0</code> | horizontal resolution in pixels/mm | | [options.xres] | <code>number</code> | <code>1.0</code> | horizontal resolution in pixels/mm |
| [options.yres] | <code>number</code> | <code>1.0</code> | vertical resolution in pixels/mm | | [options.yres] | <code>number</code> | <code>1.0</code> | vertical resolution in pixels/mm |
| [options.resolutionUnit] | <code>string</code> | <code>&quot;&#x27;inch&#x27;&quot;</code> | resolution unit options: inch, cm | | [options.resolutionUnit] | <code>string</code> | <code>&quot;&#x27;inch&#x27;&quot;</code> | resolution unit options: inch, cm |
| [options.bitdepth] | <code>number</code> | <code>8</code> | reduce bitdepth to 1, 2 or 4 bit | | [options.bitdepth] | <code>number</code> | <code>0</code> | reduce bitdepth to 1, 2 or 4 bit |
| [options.miniswhite] | <code>boolean</code> | <code>false</code> | write 1-bit images as miniswhite | | [options.miniswhite] | <code>boolean</code> | <code>false</code> | write 1-bit images as miniswhite |
**Example** **Example**
@@ -758,7 +758,7 @@ When using Windows ARM64, this feature requires a CPU with ARM64v8.4 or later.
| [options.effort] | <code>number</code> | <code>4</code> | CPU effort, between 0 (fastest) and 9 (slowest) | | [options.effort] | <code>number</code> | <code>4</code> | CPU effort, between 0 (fastest) and 9 (slowest) |
| [options.chromaSubsampling] | <code>string</code> | <code>&quot;&#x27;4:4:4&#x27;&quot;</code> | set to '4:2:0' to use chroma subsampling | | [options.chromaSubsampling] | <code>string</code> | <code>&quot;&#x27;4:4:4&#x27;&quot;</code> | set to '4:2:0' to use chroma subsampling |
| [options.bitdepth] | <code>number</code> | <code>8</code> | set bitdepth to 8, 10 or 12 bit | | [options.bitdepth] | <code>number</code> | <code>8</code> | set bitdepth to 8, 10 or 12 bit |
| [options.tune] | <code>string</code> | <code>&quot;&#x27;iq&#x27;&quot;</code> | tune output for a quality metric, one of 'iq' (default), 'ssim' or 'psnr' | | [options.tune] | <code>string</code> | <code>&quot;&#x27;iq&#x27;&quot;</code> | tune output for a quality metric, one of 'iq' (default), 'ssim' (default when lossless) or 'psnr' |
**Example** **Example**
```js ```js

View File

@@ -20,6 +20,8 @@ slug: changelog/v0.35.0
* Upgrade to libvips v8.18.0 for upstream bug fixes. * Upgrade to libvips v8.18.0 for upstream bug fixes.
* Improve thread-safety of error (and warning) messages.
* Deprecate Windows 32-bit (win32-ia32) prebuilt binaries. * Deprecate Windows 32-bit (win32-ia32) prebuilt binaries.
* Add AVIF/HEIF `tune` option for control over quality metrics. * Add AVIF/HEIF `tune` option for control over quality metrics.
@@ -38,4 +40,7 @@ slug: changelog/v0.35.0
[#4480](https://github.com/lovell/sharp/issues/4480) [#4480](https://github.com/lovell/sharp/issues/4480)
[@eddienubes](https://github.com/eddienubes) [@eddienubes](https://github.com/eddienubes)
* Ensure HEIF primary item is used as default page/frame.
[#4487](https://github.com/lovell/sharp/issues/4487)
* Add WebP `exact` option for control over transparent pixel colour values. * Add WebP `exact` option for control over transparent pixel colour values.

View File

@@ -366,7 +366,7 @@ const Sharp = function (input, options) {
tiffPredictor: 'horizontal', tiffPredictor: 'horizontal',
tiffPyramid: false, tiffPyramid: false,
tiffMiniswhite: false, tiffMiniswhite: false,
tiffBitdepth: 8, tiffBitdepth: 0,
tiffTile: false, tiffTile: false,
tiffTileHeight: 256, tiffTileHeight: 256,
tiffTileWidth: 256, tiffTileWidth: 256,

View File

@@ -1055,7 +1055,7 @@ function trySetAnimationOptions (source, target) {
* @param {number} [options.xres=1.0] - horizontal resolution in pixels/mm * @param {number} [options.xres=1.0] - horizontal resolution in pixels/mm
* @param {number} [options.yres=1.0] - vertical resolution in pixels/mm * @param {number} [options.yres=1.0] - vertical resolution in pixels/mm
* @param {string} [options.resolutionUnit='inch'] - resolution unit options: inch, cm * @param {string} [options.resolutionUnit='inch'] - resolution unit options: inch, cm
* @param {number} [options.bitdepth=8] - reduce bitdepth to 1, 2 or 4 bit * @param {number} [options.bitdepth=0] - reduce bitdepth to 1, 2 or 4 bit
* @param {boolean} [options.miniswhite=false] - write 1-bit images as miniswhite * @param {boolean} [options.miniswhite=false] - write 1-bit images as miniswhite
* @returns {Sharp} * @returns {Sharp}
* @throws {Error} Invalid options * @throws {Error} Invalid options
@@ -1070,10 +1070,10 @@ function tiff (options) {
} }
} }
if (is.defined(options.bitdepth)) { if (is.defined(options.bitdepth)) {
if (is.integer(options.bitdepth) && is.inArray(options.bitdepth, [1, 2, 4, 8])) { if (is.integer(options.bitdepth) && is.inArray(options.bitdepth, [1, 2, 4])) {
this.options.tiffBitdepth = options.bitdepth; this.options.tiffBitdepth = options.bitdepth;
} else { } else {
throw is.invalidParameterError('bitdepth', '1, 2, 4 or 8', options.bitdepth); throw is.invalidParameterError('bitdepth', '1, 2 or 4', options.bitdepth);
} }
} }
// tiling // tiling
@@ -1175,7 +1175,7 @@ function tiff (options) {
* @param {number} [options.effort=4] - CPU effort, between 0 (fastest) and 9 (slowest) * @param {number} [options.effort=4] - CPU effort, between 0 (fastest) and 9 (slowest)
* @param {string} [options.chromaSubsampling='4:4:4'] - set to '4:2:0' to use chroma subsampling * @param {string} [options.chromaSubsampling='4:4:4'] - set to '4:2:0' to use chroma subsampling
* @param {number} [options.bitdepth=8] - set bitdepth to 8, 10 or 12 bit * @param {number} [options.bitdepth=8] - set bitdepth to 8, 10 or 12 bit
* @param {string} [options.tune='iq'] - tune output for a quality metric, one of 'iq' (default), 'ssim' or 'psnr' * @param {string} [options.tune='iq'] - tune output for a quality metric, one of 'iq' (default), 'ssim' (default when lossless) or 'psnr'
* @returns {Sharp} * @returns {Sharp}
* @throws {Error} Invalid options * @throws {Error} Invalid options
*/ */
@@ -1255,7 +1255,11 @@ function heif (options) {
} }
if (is.defined(options.tune)) { if (is.defined(options.tune)) {
if (is.string(options.tune) && is.inArray(options.tune, ['iq', 'ssim', 'psnr'])) { if (is.string(options.tune) && is.inArray(options.tune, ['iq', 'ssim', 'psnr'])) {
this.options.heifTune = options.tune; if (this.options.heifLossless && options.tune === 'iq') {
this.options.heifTune = 'ssim';
} else {
this.options.heifTune = options.tune;
}
} else { } else {
throw is.invalidParameterError('tune', 'one of: psnr, ssim, iq', options.tune); throw is.invalidParameterError('tune', 'one of: psnr, ssim, iq', options.tune);
} }

View File

@@ -426,7 +426,7 @@ namespace sharp {
} }
if (ImageTypeSupportsPage(imageType)) { if (ImageTypeSupportsPage(imageType)) {
option->set("n", descriptor->pages); option->set("n", descriptor->pages);
option->set("page", descriptor->page); option->set("page", std::max(0, descriptor->page));
} }
switch (imageType) { switch (imageType) {
case ImageType::SVG: case ImageType::SVG:
@@ -456,6 +456,22 @@ namespace sharp {
return option; return option;
} }
/*
Should HEIF image be re-opened using the primary item?
*/
static bool HeifPrimaryPageReopen(VImage image, InputDescriptor *descriptor) {
if (image.get_typeof(VIPS_META_N_PAGES) == G_TYPE_INT && image.get_typeof("heif-primary") == G_TYPE_INT) {
if (image.get_int(VIPS_META_N_PAGES) > 1 && descriptor->pages == 1 && descriptor->page == -1) {
int const pagePrimary = image.get_int("heif-primary");
if (pagePrimary != 0) {
descriptor->page = pagePrimary;
return true;
}
}
}
return false;
}
/* /*
Open an image from the given InputDescriptor (filesystem, compressed buffer, raw pixel data) Open an image from the given InputDescriptor (filesystem, compressed buffer, raw pixel data)
*/ */
@@ -490,12 +506,15 @@ namespace sharp {
image = VImage::new_from_buffer(descriptor->buffer, descriptor->bufferLength, nullptr, option); image = VImage::new_from_buffer(descriptor->buffer, descriptor->bufferLength, nullptr, option);
if (imageType == ImageType::SVG || imageType == ImageType::PDF || imageType == ImageType::MAGICK) { if (imageType == ImageType::SVG || imageType == ImageType::PDF || imageType == ImageType::MAGICK) {
image = SetDensity(image, descriptor->density); image = SetDensity(image, descriptor->density);
} else if (imageType == ImageType::HEIF && HeifPrimaryPageReopen(image, descriptor)) {
option = GetOptionsForImageType(imageType, descriptor);
image = VImage::new_from_buffer(descriptor->buffer, descriptor->bufferLength, nullptr, option);
} }
} catch (vips::VError const &err) { } catch (std::runtime_error const &err) {
throw vips::VError(std::string("Input buffer has corrupt header: ") + err.what()); throw std::runtime_error(std::string("Input buffer has corrupt header: ") + err.what());
} }
} else { } else {
throw vips::VError("Input buffer contains unsupported image format"); throw std::runtime_error("Input buffer contains unsupported image format");
} }
} }
} else { } else {
@@ -566,10 +585,10 @@ namespace sharp {
imageType = DetermineImageType(descriptor->file.data()); imageType = DetermineImageType(descriptor->file.data());
if (imageType == ImageType::MISSING) { if (imageType == ImageType::MISSING) {
if (descriptor->file.find("<svg") != std::string::npos) { if (descriptor->file.find("<svg") != std::string::npos) {
throw vips::VError("Input file is missing, did you mean " throw std::runtime_error("Input file is missing, did you mean "
"sharp(Buffer.from('" + descriptor->file.substr(0, 8) + "...')?"); "sharp(Buffer.from('" + descriptor->file.substr(0, 8) + "...')?");
} }
throw vips::VError("Input file is missing: " + descriptor->file); throw std::runtime_error("Input file is missing: " + descriptor->file);
} }
if (imageType != ImageType::UNKNOWN) { if (imageType != ImageType::UNKNOWN) {
try { try {
@@ -577,12 +596,15 @@ namespace sharp {
image = VImage::new_from_file(descriptor->file.data(), option); image = VImage::new_from_file(descriptor->file.data(), option);
if (imageType == ImageType::SVG || imageType == ImageType::PDF || imageType == ImageType::MAGICK) { if (imageType == ImageType::SVG || imageType == ImageType::PDF || imageType == ImageType::MAGICK) {
image = SetDensity(image, descriptor->density); image = SetDensity(image, descriptor->density);
} else if (imageType == ImageType::HEIF && HeifPrimaryPageReopen(image, descriptor)) {
option = GetOptionsForImageType(imageType, descriptor);
image = VImage::new_from_file(descriptor->file.data(), option);
} }
} catch (vips::VError const &err) { } catch (std::runtime_error const &err) {
throw vips::VError(std::string("Input file has corrupt header: ") + err.what()); throw std::runtime_error(std::string("Input file has corrupt header: ") + err.what());
} }
} else { } else {
throw vips::VError("Input file contains unsupported image format"); throw std::runtime_error("Input file contains unsupported image format");
} }
} }
} }
@@ -590,7 +612,7 @@ namespace sharp {
// Limit input images to a given number of pixels, where pixels = width * height // Limit input images to a given number of pixels, where pixels = width * height
if (descriptor->limitInputPixels > 0 && if (descriptor->limitInputPixels > 0 &&
static_cast<uint64_t>(image.width()) * image.height() > descriptor->limitInputPixels) { static_cast<uint64_t>(image.width()) * image.height() > descriptor->limitInputPixels) {
throw vips::VError("Input image exceeds pixel limit"); throw std::runtime_error("Input image exceeds pixel limit");
} }
return std::make_tuple(image, imageType); return std::make_tuple(image, imageType);
} }
@@ -766,19 +788,19 @@ namespace sharp {
: image.height(); : image.height();
if (imageType == ImageType::JPEG) { if (imageType == ImageType::JPEG) {
if (image.width() > 65535 || height > 65535) { if (image.width() > 65535 || height > 65535) {
throw vips::VError("Processed image is too large for the JPEG format"); throw std::runtime_error("Processed image is too large for the JPEG format");
} }
} else if (imageType == ImageType::WEBP) { } else if (imageType == ImageType::WEBP) {
if (image.width() > 16383 || height > 16383) { if (image.width() > 16383 || height > 16383) {
throw vips::VError("Processed image is too large for the WebP format"); throw std::runtime_error("Processed image is too large for the WebP format");
} }
} else if (imageType == ImageType::GIF) { } else if (imageType == ImageType::GIF) {
if (image.width() > 65535 || height > 65535) { if (image.width() > 65535 || height > 65535) {
throw vips::VError("Processed image is too large for the GIF format"); throw std::runtime_error("Processed image is too large for the GIF format");
} }
} else if (imageType == ImageType::HEIF) { } else if (imageType == ImageType::HEIF) {
if (image.width() > 16384 || height > 16384) { if (image.width() > 16384 || height > 16384) {
throw vips::VError("Processed image is too large for the HEIF format"); throw std::runtime_error("Processed image is too large for the HEIF format");
} }
} }
} }

View File

@@ -105,7 +105,7 @@ namespace sharp {
rawPremultiplied(false), rawPremultiplied(false),
rawPageHeight(0), rawPageHeight(0),
pages(1), pages(1),
page(0), page(-1),
createChannels(0), createChannels(0),
createWidth(0), createWidth(0),
createHeight(0), createHeight(0),

View File

@@ -31,7 +31,7 @@ class MetadataWorker : public Napi::AsyncWorker {
sharp::ImageType imageType = sharp::ImageType::UNKNOWN; sharp::ImageType imageType = sharp::ImageType::UNKNOWN;
try { try {
std::tie(image, imageType) = OpenInput(baton->input); std::tie(image, imageType) = OpenInput(baton->input);
} catch (vips::VError const &err) { } catch (std::runtime_error const &err) {
(baton->err).append(err.what()); (baton->err).append(err.what());
} }
if (imageType != sharp::ImageType::UNKNOWN) { if (imageType != sharp::ImageType::UNKNOWN) {
@@ -152,7 +152,12 @@ class MetadataWorker : public Napi::AsyncWorker {
// PNG comments // PNG comments
vips_image_map(image.get_image(), readPNGComment, &baton->comments); vips_image_map(image.get_image(), readPNGComment, &baton->comments);
} }
// Handle warnings
std::string warning = sharp::VipsWarningPop();
while (!warning.empty()) {
baton->warnings.push_back(warning);
warning = sharp::VipsWarningPop();
}
// Clean up // Clean up
vips_error_clear(); vips_error_clear();
vips_thread_shutdown(); vips_thread_shutdown();
@@ -162,13 +167,9 @@ class MetadataWorker : public Napi::AsyncWorker {
Napi::Env env = Env(); Napi::Env env = Env();
Napi::HandleScope scope(env); Napi::HandleScope scope(env);
// Handle warnings for (auto& warning : baton->warnings) {
std::string warning = sharp::VipsWarningPop();
while (!warning.empty()) {
debuglog.Call(Receiver().Value(), { Napi::String::New(env, warning) }); debuglog.Call(Receiver().Value(), { Napi::String::New(env, warning) });
warning = sharp::VipsWarningPop();
} }
if (baton->err.empty()) { if (baton->err.empty()) {
Napi::Object info = Napi::Object::New(env); Napi::Object info = Napi::Object::New(env);
info.Set("format", baton->format); info.Set("format", baton->format);

View File

@@ -57,6 +57,7 @@ struct MetadataBaton {
size_t gainMapLength; size_t gainMapLength;
MetadataComments comments; MetadataComments comments;
std::string err; std::string err;
std::vector<std::string> warnings;
MetadataBaton(): MetadataBaton():
input(nullptr), input(nullptr),

View File

@@ -14,7 +14,6 @@
#include "./operations.h" #include "./operations.h"
using vips::VImage; using vips::VImage;
using vips::VError;
namespace sharp { namespace sharp {
/* /*
@@ -287,7 +286,7 @@ namespace sharp {
*/ */
VImage Trim(VImage image, std::vector<double> background, double threshold, bool const lineArt, int const margin) { VImage Trim(VImage image, std::vector<double> background, double threshold, bool const lineArt, int const margin) {
if (image.width() < 3 && image.height() < 3) { if (image.width() < 3 && image.height() < 3) {
throw VError("Image to trim must be at least 3x3 pixels"); throw std::runtime_error("Image to trim must be at least 3x3 pixels");
} }
if (background.size() == 0) { if (background.size() == 0) {
// Top-left pixel provides the default background colour if none is given // Top-left pixel provides the default background colour if none is given
@@ -361,7 +360,7 @@ namespace sharp {
VImage Linear(VImage image, std::vector<double> const a, std::vector<double> const b) { VImage Linear(VImage image, std::vector<double> const a, std::vector<double> const b) {
size_t const bands = static_cast<size_t>(image.bands()); size_t const bands = static_cast<size_t>(image.bands());
if (a.size() > bands) { if (a.size() > bands) {
throw VError("Band expansion using linear is unsupported"); throw std::runtime_error("Band expansion using linear is unsupported");
} }
bool const uchar = !Is16Bit(image.interpretation()); bool const uchar = !Is16Bit(image.interpretation());
if (image.has_alpha() && a.size() != bands && (a.size() == 1 || a.size() == bands - 1 || bands - 1 == 1)) { if (image.has_alpha() && a.size() != bands && (a.size() == 1 || a.size() == bands - 1 || bands - 1 == 1)) {

View File

@@ -84,7 +84,7 @@ class PipelineWorker : public Napi::AsyncWorker {
if (nPages == -1) { if (nPages == -1) {
// Resolve the number of pages if we need to render until the end of the document // Resolve the number of pages if we need to render until the end of the document
nPages = image.get_typeof(VIPS_META_N_PAGES) != 0 nPages = image.get_typeof(VIPS_META_N_PAGES) != 0
? image.get_int(VIPS_META_N_PAGES) - baton->input->page ? image.get_int(VIPS_META_N_PAGES) - std::max(0, baton->input->page)
: 1; : 1;
} }
@@ -274,7 +274,7 @@ class PipelineWorker : public Napi::AsyncWorker {
} }
sharp::SetDensity(image, baton->input->density); sharp::SetDensity(image, baton->input->density);
if (image.width() > 32767 || image.height() > 32767) { if (image.width() > 32767 || image.height() > 32767) {
throw vips::VError("Input SVG image will exceed 32767x32767 pixel limit when scaled"); throw std::runtime_error("Input SVG image will exceed 32767x32767 pixel limit when scaled");
} }
} else if (inputImageType == sharp::ImageType::PDF) { } else if (inputImageType == sharp::ImageType::PDF) {
if (baton->input->buffer != nullptr) { if (baton->input->buffer != nullptr) {
@@ -290,7 +290,7 @@ class PipelineWorker : public Napi::AsyncWorker {
} }
} else { } else {
if (inputImageType == sharp::ImageType::SVG && (image.width() > 32767 || image.height() > 32767)) { if (inputImageType == sharp::ImageType::SVG && (image.width() > 32767 || image.height() > 32767)) {
throw vips::VError("Input SVG image exceeds 32767x32767 pixel limit"); throw std::runtime_error("Input SVG image exceeds 32767x32767 pixel limit");
} }
} }
if (baton->input->autoOrient) { if (baton->input->autoOrient) {
@@ -675,7 +675,7 @@ class PipelineWorker : public Napi::AsyncWorker {
// Verify within current dimensions // Verify within current dimensions
if (compositeImage.width() > image.width() || compositeImage.height() > image.height()) { if (compositeImage.width() > image.width() || compositeImage.height() > image.height()) {
throw vips::VError("Image to composite must have same dimensions or smaller"); throw std::runtime_error("Image to composite must have same dimensions or smaller");
} }
// Check if overlay is tiled // Check if overlay is tiled
if (composite->tile) { if (composite->tile) {
@@ -1086,20 +1086,19 @@ class PipelineWorker : public Napi::AsyncWorker {
// Get raw image data // Get raw image data
baton->bufferOut = static_cast<char*>(image.write_to_memory(&baton->bufferOutLength)); baton->bufferOut = static_cast<char*>(image.write_to_memory(&baton->bufferOutLength));
if (baton->bufferOut == nullptr) { if (baton->bufferOut == nullptr) {
(baton->err).append("Could not allocate enough memory for raw output"); throw std::runtime_error("Could not allocate enough memory for raw output");
return Error();
} }
baton->formatOut = "raw"; baton->formatOut = "raw";
} else { } else {
// Unsupported output format // Unsupported output format
(baton->err).append("Unsupported output format "); auto unsupported = std::string("Unsupported output format ");
if (baton->formatOut == "input") { if (baton->formatOut == "input") {
(baton->err).append("when trying to match input format of "); unsupported.append("when trying to match input format of ");
(baton->err).append(ImageTypeId(inputImageType)); unsupported.append(ImageTypeId(inputImageType));
} else { } else {
(baton->err).append(baton->formatOut); unsupported.append(baton->formatOut);
} }
return Error(); throw std::runtime_error(unsupported);
} }
} else { } else {
// File output // File output
@@ -1274,19 +1273,28 @@ class PipelineWorker : public Napi::AsyncWorker {
return Error(); return Error();
} }
} }
} catch (vips::VError const &err) { } catch (std::runtime_error const &err) {
char const *what = err.what(); char const *what = err.what();
if (what && what[0]) { if (what && what[0]) {
(baton->err).append(what); (baton->err).append(what);
} else { } else {
if (baton->input->failOn == VIPS_FAIL_ON_WARNING) { if (baton->input->failOn == VIPS_FAIL_ON_WARNING) {
(baton->err).append("Warning treated as error due to failOn setting"); (baton->err).append("Warning treated as error due to failOn setting");
baton->errUseWarning = true;
} else { } else {
(baton->err).append("Unknown error"); (baton->err).append("Unknown error");
} }
} }
} }
// Handle warnings
std::string warning = sharp::VipsWarningPop();
while (!warning.empty()) {
if (baton->input->failOn == VIPS_FAIL_ON_WARNING) {
(baton->err).append("\n").append(warning);
} else {
(baton->warnings).push_back(warning);
}
warning = sharp::VipsWarningPop();
}
// Clean up libvips' per-request data and threads // Clean up libvips' per-request data and threads
vips_error_clear(); vips_error_clear();
vips_thread_shutdown(); vips_thread_shutdown();
@@ -1296,17 +1304,9 @@ class PipelineWorker : public Napi::AsyncWorker {
Napi::Env env = Env(); Napi::Env env = Env();
Napi::HandleScope scope(env); Napi::HandleScope scope(env);
// Handle warnings for (auto &warning : baton->warnings) {
std::string warning = sharp::VipsWarningPop(); debuglog.Call(Receiver().Value(), { Napi::String::New(env, warning) });
while (!warning.empty()) {
if (baton->errUseWarning) {
(baton->err).append("\n").append(warning);
} else {
debuglog.Call(Receiver().Value(), { Napi::String::New(env, warning) });
}
warning = sharp::VipsWarningPop();
} }
if (baton->err.empty()) { if (baton->err.empty()) {
int width = baton->width; int width = baton->width;
int height = baton->height; int height = baton->height;
@@ -1407,7 +1407,7 @@ class PipelineWorker : public Napi::AsyncWorker {
void MultiPageUnsupported(int const pages, std::string op) { void MultiPageUnsupported(int const pages, std::string op) {
if (pages > 1) { if (pages > 1) {
throw vips::VError(op + " is not supported for multi-page images"); throw std::runtime_error(op + " is not supported for multi-page images");
} }
} }

View File

@@ -204,6 +204,7 @@ struct PipelineBaton {
bool jxlLossless; bool jxlLossless;
VipsBandFormat rawDepth; VipsBandFormat rawDepth;
std::string err; std::string err;
std::vector<std::string> warnings;
bool errUseWarning; bool errUseWarning;
int keepMetadata; int keepMetadata;
int withMetadataOrientation; int withMetadataOrientation;
@@ -365,7 +366,7 @@ struct PipelineBaton {
tiffBigtiff(false), tiffBigtiff(false),
tiffPredictor(VIPS_FOREIGN_TIFF_PREDICTOR_HORIZONTAL), tiffPredictor(VIPS_FOREIGN_TIFF_PREDICTOR_HORIZONTAL),
tiffPyramid(false), tiffPyramid(false),
tiffBitdepth(8), tiffBitdepth(0),
tiffMiniswhite(false), tiffMiniswhite(false),
tiffTile(false), tiffTile(false),
tiffTileHeight(256), tiffTileHeight(256),

View File

@@ -39,7 +39,7 @@ class StatsWorker : public Napi::AsyncWorker {
sharp::ImageType imageType = sharp::ImageType::UNKNOWN; sharp::ImageType imageType = sharp::ImageType::UNKNOWN;
try { try {
std::tie(image, imageType) = OpenInput(baton->input); std::tie(image, imageType) = OpenInput(baton->input);
} catch (vips::VError const &err) { } catch (std::runtime_error const &err) {
(baton->err).append(err.what()); (baton->err).append(err.what());
} }
if (imageType != sharp::ImageType::UNKNOWN) { if (imageType != sharp::ImageType::UNKNOWN) {
@@ -92,11 +92,16 @@ class StatsWorker : public Napi::AsyncWorker {
baton->dominantRed = dx * 16 + 8; baton->dominantRed = dx * 16 + 8;
baton->dominantGreen = dy * 16 + 8; baton->dominantGreen = dy * 16 + 8;
baton->dominantBlue = dz * 16 + 8; baton->dominantBlue = dz * 16 + 8;
} catch (vips::VError const &err) { } catch (std::runtime_error const &err) {
(baton->err).append(err.what()); (baton->err).append(err.what());
} }
} }
// Handle warnings
std::string warning = sharp::VipsWarningPop();
while (!warning.empty()) {
baton->warnings.push_back(warning);
warning = sharp::VipsWarningPop();
}
// Clean up // Clean up
vips_error_clear(); vips_error_clear();
vips_thread_shutdown(); vips_thread_shutdown();
@@ -106,13 +111,9 @@ class StatsWorker : public Napi::AsyncWorker {
Napi::Env env = Env(); Napi::Env env = Env();
Napi::HandleScope scope(env); Napi::HandleScope scope(env);
// Handle warnings for (auto& warning : baton->warnings) {
std::string warning = sharp::VipsWarningPop();
while (!warning.empty()) {
debuglog.Call(Receiver().Value(), { Napi::String::New(env, warning) }); debuglog.Call(Receiver().Value(), { Napi::String::New(env, warning) });
warning = sharp::VipsWarningPop();
} }
if (baton->err.empty()) { if (baton->err.empty()) {
// Stats Object // Stats Object
Napi::Object info = Napi::Object::New(env); Napi::Object info = Napi::Object::New(env);

View File

@@ -45,6 +45,7 @@ struct StatsBaton {
int dominantBlue; int dominantBlue;
std::string err; std::string err;
std::vector<std::string> warnings;
StatsBaton(): StatsBaton():
input(nullptr), input(nullptr),

View File

@@ -244,7 +244,7 @@ Napi::Value _maxColourDistance(const Napi::CallbackInfo& info) {
} }
// Calculate colour distance // Calculate colour distance
maxColourDistance = image1.dE00(image2).max(); maxColourDistance = image1.dE00(image2).max();
} catch (vips::VError const &err) { } catch (std::runtime_error const &err) {
throw Napi::Error::New(env, err.what()); throw Napi::Error::New(env, err.what());
} }

View File

@@ -127,7 +127,7 @@ module.exports = {
inputSvgSmallViewBox: getPath('circle.svg'), inputSvgSmallViewBox: getPath('circle.svg'),
inputSvgWithEmbeddedImages: getPath('struct-image-04-t.svg'), // https://dev.w3.org/SVG/profiles/1.2T/test/svg/struct-image-04-t.svg inputSvgWithEmbeddedImages: getPath('struct-image-04-t.svg'), // https://dev.w3.org/SVG/profiles/1.2T/test/svg/struct-image-04-t.svg
inputAvif: getPath('sdr_cosmos12920_cicp1-13-6_yuv444_full_qp10.avif'), // CC by-nc-nd https://github.com/AOMediaCodec/av1-avif/tree/master/testFiles/Netflix inputAvif: getPath('sdr_cosmos12920_cicp1-13-6_yuv444_full_qp10.avif'), // CC by-nc-nd https://github.com/AOMediaCodec/av1-avif/tree/master/testFiles/Netflix
inputAvifWithPitmBox: getPath('pitm.avif'), // https://github.com/lovell/sharp/issues/4487
inputJPGBig: getPath('flowers.jpeg'), inputJPGBig: getPath('flowers.jpeg'),
inputPngDotAndLines: getPath('dot-and-lines.png'), inputPngDotAndLines: getPath('dot-and-lines.png'),

BIN
test/fixtures/pitm.avif vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

View File

@@ -7,7 +7,13 @@ const { describe, it } = require('node:test');
const assert = require('node:assert'); const assert = require('node:assert');
const sharp = require('../../'); const sharp = require('../../');
const { inputAvif, inputJpg, inputGifAnimated } = require('../fixtures'); const {
inputAvif,
inputAvifWithPitmBox,
inputJpg,
inputGifAnimated,
inputPng,
} = require('../fixtures');
describe('AVIF', () => { describe('AVIF', () => {
it('called without options does not throw an error', () => { it('called without options does not throw an error', () => {
@@ -17,16 +23,13 @@ describe('AVIF', () => {
}); });
it('can convert AVIF to JPEG', async () => { it('can convert AVIF to JPEG', async () => {
const data = await sharp(inputAvif) const data = await sharp(inputAvif).resize(32).jpeg().toBuffer();
.resize(32)
.jpeg()
.toBuffer();
const { size, ...metadata } = await sharp(data).metadata(); const { size, ...metadata } = await sharp(data).metadata();
void size; void size;
assert.deepStrictEqual(metadata, { assert.deepStrictEqual(metadata, {
autoOrient: { autoOrient: {
height: 13, height: 13,
width: 32 width: 32,
}, },
channels: 3, channels: 3,
chromaSubsampling: '4:2:0', chromaSubsampling: '4:2:0',
@@ -41,7 +44,7 @@ describe('AVIF', () => {
isProgressive: false, isProgressive: false,
isPalette: false, isPalette: false,
space: 'srgb', space: 'srgb',
width: 32 width: 32,
}); });
}); });
@@ -55,7 +58,7 @@ describe('AVIF', () => {
assert.deepStrictEqual(metadata, { assert.deepStrictEqual(metadata, {
autoOrient: { autoOrient: {
height: 26, height: 26,
width: 32 width: 32,
}, },
channels: 3, channels: 3,
compression: 'av1', compression: 'av1',
@@ -70,20 +73,47 @@ describe('AVIF', () => {
pagePrimary: 0, pagePrimary: 0,
pages: 1, pages: 1,
space: 'srgb', space: 'srgb',
width: 32 width: 32,
}); });
}); });
it('can passthrough AVIF', async () => { it('can convert PNG to lossless AVIF', async () => {
const data = await sharp(inputAvif) const data = await sharp(inputPng)
.resize(32) .resize(32)
.avif({ lossless: true, effort: 0 })
.toBuffer(); .toBuffer();
const { size, ...metadata } = await sharp(data).metadata(); const { size, ...metadata } = await sharp(data).metadata();
void size; void size;
assert.deepStrictEqual(metadata, {
autoOrient: {
height: 24,
width: 32,
},
channels: 3,
compression: 'av1',
depth: 'uchar',
format: 'heif',
hasAlpha: false,
hasProfile: false,
height: 24,
isProgressive: false,
isPalette: false,
bitsPerSample: 8,
pagePrimary: 0,
pages: 1,
space: 'srgb',
width: 32,
});
});
it('can passthrough AVIF', async () => {
const data = await sharp(inputAvif).resize(32).toBuffer();
const { size, ...metadata } = await sharp(data).metadata();
void size;
assert.deepStrictEqual(metadata, { assert.deepStrictEqual(metadata, {
autoOrient: { autoOrient: {
height: 13, height: 13,
width: 32 width: 32,
}, },
channels: 3, channels: 3,
compression: 'av1', compression: 'av1',
@@ -98,7 +128,7 @@ describe('AVIF', () => {
pagePrimary: 0, pagePrimary: 0,
pages: 1, pages: 1,
space: 'srgb', space: 'srgb',
width: 32 width: 32,
}); });
}); });
@@ -112,7 +142,7 @@ describe('AVIF', () => {
assert.deepStrictEqual(metadata, { assert.deepStrictEqual(metadata, {
autoOrient: { autoOrient: {
height: 300, height: 300,
width: 10 width: 10,
}, },
channels: 4, channels: 4,
compression: 'av1', compression: 'av1',
@@ -127,7 +157,7 @@ describe('AVIF', () => {
pagePrimary: 0, pagePrimary: 0,
pages: 1, pages: 1,
space: 'srgb', space: 'srgb',
width: 10 width: 10,
}); });
}); });
@@ -142,7 +172,7 @@ describe('AVIF', () => {
assert.deepStrictEqual(metadata, { assert.deepStrictEqual(metadata, {
autoOrient: { autoOrient: {
height: 26, height: 26,
width: 32 width: 32,
}, },
channels: 3, channels: 3,
compression: 'av1', compression: 'av1',
@@ -157,30 +187,37 @@ describe('AVIF', () => {
pagePrimary: 0, pagePrimary: 0,
pages: 1, pages: 1,
space: 'srgb', space: 'srgb',
width: 32 width: 32,
}); });
}); });
it('Invalid width - too large', async () => it('Invalid width - too large', async () =>
assert.rejects( assert.rejects(
() => sharp({ create: { width: 16385, height: 16, channels: 3, background: 'red' } }).avif().toBuffer(), () =>
/Processed image is too large for the HEIF format/ sharp({
) create: { width: 16385, height: 16, channels: 3, background: 'red' },
); })
.avif()
.toBuffer(),
/Processed image is too large for the HEIF format/,
));
it('Invalid height - too large', async () => it('Invalid height - too large', async () =>
assert.rejects( assert.rejects(
() => sharp({ create: { width: 16, height: 16385, channels: 3, background: 'red' } }).avif().toBuffer(), () =>
/Processed image is too large for the HEIF format/ sharp({
) create: { width: 16, height: 16385, channels: 3, background: 'red' },
); })
.avif()
.toBuffer(),
/Processed image is too large for the HEIF format/,
));
it('Invalid bitdepth value throws error', () => it('Invalid bitdepth value throws error', () =>
assert.throws( assert.throws(
() => sharp().avif({ bitdepth: 11 }), () => sharp().avif({ bitdepth: 11 }),
/Expected 8, 10 or 12 for bitdepth but received 11 of type number/ /Expected 8, 10 or 12 for bitdepth but received 11 of type number/,
) ));
);
it('Different tune options result in different file sizes', async () => { it('Different tune options result in different file sizes', async () => {
const ssim = await sharp(inputJpg) const ssim = await sharp(inputJpg)
@@ -192,5 +229,47 @@ describe('AVIF', () => {
.avif({ tune: 'iq', effort: 0 }) .avif({ tune: 'iq', effort: 0 })
.toBuffer(); .toBuffer();
assert(ssim.length < iq.length); assert(ssim.length < iq.length);
}) });
it('AVIF with non-zero primary item uses it as default page', async () => {
const { exif, ...metadata } = await sharp(inputAvifWithPitmBox).metadata();
void exif;
assert.deepStrictEqual(metadata, {
format: 'heif',
width: 4096,
height: 800,
space: 'srgb',
channels: 3,
depth: 'uchar',
isProgressive: false,
isPalette: false,
bitsPerSample: 8,
pages: 5,
pagePrimary: 4,
compression: 'av1',
resolutionUnit: 'cm',
hasProfile: false,
hasAlpha: false,
autoOrient: { width: 4096, height: 800 },
});
const data = await sharp(inputAvifWithPitmBox)
.png({ compressionLevel: 0 })
.toBuffer();
const { size, ...pngMetadata } = await sharp(data).metadata();
assert.deepStrictEqual(pngMetadata, {
format: 'png',
width: 4096,
height: 800,
space: 'srgb',
channels: 3,
depth: 'uchar',
isProgressive: false,
isPalette: false,
bitsPerSample: 8,
hasProfile: false,
hasAlpha: false,
autoOrient: { width: 4096, height: 800 },
});
});
}); });

View File

@@ -642,7 +642,7 @@ describe('Image metadata', () => {
}); });
it('keep existing ICC profile', async () => { it('keep existing ICC profile', async () => {
const data = await sharp(fixtures.inputJpgWithExif) const data = await sharp(fixtures.inputJpgWithExif, { failOn: 'error' })
.keepIccProfile() .keepIccProfile()
.toBuffer(); .toBuffer();
@@ -675,7 +675,7 @@ describe('Image metadata', () => {
}); });
it('keep existing ICC profile, avoid colour transform', async () => { it('keep existing ICC profile, avoid colour transform', async () => {
const [r, g, b] = await sharp(fixtures.inputPngWithProPhotoProfile) const [r, g, b] = await sharp(fixtures.inputPngWithProPhotoProfile, { failOn: 'error' })
.keepIccProfile() .keepIccProfile()
.raw() .raw()
.toBuffer(); .toBuffer();
@@ -721,7 +721,7 @@ describe('Image metadata', () => {
}); });
it('transform to invalid ICC profile emits warning', async () => { it('transform to invalid ICC profile emits warning', async () => {
const img = sharp({ create }) const img = sharp({ create, failOn: 'error' })
.png() .png()
.withIccProfile(fixtures.path('invalid-illuminant.icc')); .withIccProfile(fixtures.path('invalid-illuminant.icc'));

View File

@@ -122,7 +122,6 @@ describe('TIFF', () => {
sharp(fixtures.inputTiff8BitDepth) sharp(fixtures.inputTiff8BitDepth)
.toColourspace('b-w') // can only squash 1 band uchar images .toColourspace('b-w') // can only squash 1 band uchar images
.tiff({ .tiff({
bitdepth: 8,
compression: 'none', compression: 'none',
predictor: 'none' predictor: 'none'
}) })
@@ -154,7 +153,7 @@ describe('TIFF', () => {
it('Invalid TIFF bitdepth value throws error', () => { it('Invalid TIFF bitdepth value throws error', () => {
assert.throws(() => { assert.throws(() => {
sharp().tiff({ bitdepth: 3 }); sharp().tiff({ bitdepth: 3 });
}, /Error: Expected 1, 2, 4 or 8 for bitdepth but received 3 of type number/); }, /Error: Expected 1, 2 or 4 for bitdepth but received 3 of type number/);
}); });
it('TIFF setting xres and yres on file', () => it('TIFF setting xres and yres on file', () =>