Compare commits

...

35 Commits

Author SHA1 Message Date
Lovell Fuller
1ff84b20b7 Release v0.29.3 2021-11-14 11:40:19 +00:00
Lovell Fuller
97655d2dfd Bump deps 2021-11-14 09:17:44 +00:00
Michael B. Klein
d10d7b02d4 Docs: remove duplicate entry for mbklein (#2971) 2021-11-11 19:10:44 +00:00
Lovell Fuller
2ffdae2914 Docs: changelog and credit for #2952 2021-11-08 19:43:49 +00:00
Michael B. Klein
342de36973 Impute TIFF xres/yres from withMetadata({density}) 2021-11-08 19:43:42 +00:00
Lovell Fuller
b33231d4bd Ensure correct dimensions when contain 1px image #2951 2021-11-07 16:35:30 +00:00
Lovell Fuller
319db21f29 Release v0.29.2 2021-10-21 09:15:21 +01:00
Lovell Fuller
d359331426 Remove animation props from single page images #2890 2021-10-18 20:27:10 +01:00
Lovell Fuller
7ae151362b Bump devDeps 2021-10-17 15:17:50 +01:00
Lovell Fuller
648a1e05da Throw error rather than exit for invalid binaries #2931 2021-10-17 15:14:40 +01:00
Lovell Fuller
b9f211fe34 Docs: changelog for #2918 2021-10-17 15:11:38 +01:00
Dmitri Pyatkov
e475d9e47f Improve error message on Windows for version conflict (#2918) 2021-10-17 14:10:28 +01:00
Lovell Fuller
f37ca8249a Bump deps 2021-09-22 11:41:22 +01:00
Lovell Fuller
1dd4be670d Add timeout function to limit processing time 2021-09-22 10:33:59 +01:00
Lovell Fuller
197d4cf835 Docs: changelog and credit for #2893 2021-09-22 10:31:12 +01:00
Lovell Fuller
83eed86b53 Docs: clarify prebuilt libc support on ARMv6/v7 2021-09-22 10:08:52 +01:00
Lovell Fuller
bbf612cb9e Replace use of deprecated util.inherits 2021-09-22 10:08:44 +01:00
Erlend
2679bb567b Allow use of 'tif' to select TIFF output (#2893) 2021-09-16 18:49:14 +01:00
Lovell Fuller
481e350f39 Ensure 'versions' is populated from vendored libvips 2021-09-07 11:21:00 +01:00
Lovell Fuller
50c7a08754 Release v0.29.1 2021-09-07 10:23:50 +01:00
Lovell Fuller
9a0bb60737 Bump deps 2021-09-07 10:21:51 +01:00
Lovell Fuller
deb5d81221 Docs: changelog entries for #2878 #2879 2021-09-06 16:30:31 +01:00
Espen Hovlandsdal
916b04dbac Allow using speed 9 for AVIF/HEIC encoding (#2879) 2021-09-06 16:23:02 +01:00
Espen Hovlandsdal
52307fad5d Resolve paths before comparing input/output destination (#2878)
This fixes an issue where if you try to write to the same destination as the
input file but you are not using absolute (or the same relative path) for both
the input and output, sharp/vips might produce errors such as:

someFile.jpg: unable to open for write
unix error: No such file or directory
2021-09-06 16:21:43 +01:00
Lovell Fuller
afb21135c2 Docs: add changelog entry for #2868 2021-09-05 09:35:46 +01:00
Zaruike
b7fbffb3f7 Add support for libvips compiled with OpenJPEG 2021-09-05 09:32:02 +01:00
Lovell Fuller
5d98bcd8d8 Remove unsupported animation props from AVIF #2870 2021-09-05 08:46:15 +01:00
Lovell Fuller
e044788f63 Docs: changelog and credit for #2846 2021-08-30 20:31:10 +01:00
Tenpi
4a9267ce12 Add lightness option to modulate operation 2021-08-30 20:22:41 +01:00
Lovell Fuller
104464c2e0 Ensure images with P3 profiles retain full gamut #2862 2021-08-30 17:15:17 +01:00
Lovell Fuller
60adc110f5 Ensure background is premultiplied when compositing #2858 2021-08-29 16:40:40 +01:00
Paul Straw
2031d7d112 Ensure compatibility with ImageMagick 7 (#2865) 2021-08-28 20:17:44 +01:00
Lovell Fuller
3402656ec5 Set PNG bitdepth based on number of colours #2855
Removes use of deprecated libvips API
2021-08-26 22:05:29 +01:00
Lovell Fuller
4e84f743e4 Docs: toFile expects directory structure to exist 2021-08-20 09:22:22 +01:00
Lovell Fuller
17e50de5f0 Docs: serve docute from same hostname
Cheapo corporate web proxies ignore CSP and rewrite HTML
2021-08-19 18:58:17 +01:00
35 changed files with 864 additions and 77 deletions

View File

@@ -400,7 +400,9 @@ Returns **Sharp**
## modulate
Transforms the image using brightness, saturation and hue rotation.
Transforms the image using brightness, saturation, hue rotation, and lightness.
Brightness and lightness both operate on luminance, with the difference being that
brightness is multiplicative whereas lightness is additive.
### Parameters
@@ -409,13 +411,14 @@ Transforms the image using brightness, saturation and hue rotation.
* `options.brightness` **[number][1]?** Brightness multiplier
* `options.saturation` **[number][1]?** Saturation multiplier
* `options.hue` **[number][1]?** Degrees for hue rotation
* `options.lightness` **[number][1]?** Lightness addend
### Examples
```javascript
sharp(input)
.modulate({
brightness: 2 // increase lightness by a factor of 2
brightness: 2 // increase brightness by a factor of 2
});
sharp(input)
@@ -423,6 +426,11 @@ sharp(input)
hue: 180 // hue-rotate by 180 degrees
});
sharp(input)
.modulate({
lightness: 50 // increase lightness by +50
});
// decreate brightness and saturation while also hue-rotating by 90 degrees
sharp(input)
.modulate({

View File

@@ -11,6 +11,8 @@ Note that raw pixel data is only supported for buffer output.
By default all metadata will be removed, which includes EXIF-based orientation.
See [withMetadata][1] for control over this.
The caller is responsible for ensuring directory structures and permissions exist.
A `Promise` is returned when `callback` is not provided.
### Parameters
@@ -328,10 +330,57 @@ The prebuilt binaries do not include this - see
Returns **Sharp**
## jp2
Use these JP2 options for output image.
Requires libvips compiled with support for OpenJPEG.
The prebuilt binaries do not include this - see
[installing a custom libvips][11].
### Parameters
* `options` **[Object][6]?** output options
* `options.quality` **[number][9]** quality, integer 1-100 (optional, default `80`)
* `options.lossless` **[boolean][7]** use lossless compression mode (optional, default `false`)
* `options.tileWidth` **[number][9]** horizontal tile size (optional, default `512`)
* `options.tileHeight` **[number][9]** vertical tile size (optional, default `512`)
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
### Examples
```javascript
// Convert any input to lossless JP2 output
const data = await sharp(input)
.jp2({ lossless: true })
.toBuffer();
```
```javascript
// Convert any input to very high quality JP2 output
const data = await sharp(input)
.jp2({
quality: 100,
chromaSubsampling: '4:4:4'
})
.toBuffer();
```
* Throws **[Error][4]** Invalid options
Returns **Sharp**
**Meta**
* **since**: 0.29.1
## tiff
Use these TIFF options for output image.
The `density` can be set in pixels/inch via [withMetadata][1] instead of providing `xres` and `yres` in pixels/mm.
### Parameters
* `options` **[Object][6]?** output options
@@ -372,13 +421,15 @@ Use these AVIF options for output image.
Whilst it is possible to create AVIF images smaller than 16x16 pixels,
most web browsers do not display these properly.
AVIF image sequences are not supported.
### Parameters
* `options` **[Object][6]?** output options
* `options.quality` **[number][9]** quality, integer 1-100 (optional, default `50`)
* `options.lossless` **[boolean][7]** use lossless compression (optional, default `false`)
* `options.speed` **[number][9]** CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest) (optional, default `5`)
* `options.speed` **[number][9]** CPU effort vs file size, 0 (slowest/smallest) to 9 (fastest/largest) (optional, default `5`)
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
<!---->
@@ -405,7 +456,7 @@ globally-installed libvips compiled with support for libheif, libde265 and x265.
* `options.quality` **[number][9]** quality, integer 1-100 (optional, default `50`)
* `options.compression` **[string][2]** compression format: av1, hevc (optional, default `'av1'`)
* `options.lossless` **[boolean][7]** use lossless compression (optional, default `false`)
* `options.speed` **[number][9]** CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest) (optional, default `5`)
* `options.speed` **[number][9]** CPU effort vs file size, 0 (slowest/smallest) to 9 (fastest/largest) (optional, default `5`)
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
<!---->
@@ -493,6 +544,26 @@ sharp('input.tiff')
Returns **Sharp**
## timeout
Set a timeout for processing, in seconds.
Use a value of zero to continue processing indefinitely, the default behaviour.
The clock starts when libvips opens an input image for processing.
Time spent waiting for a libuv thread to become available is not included.
### Parameters
* `options` **[Object][6]**
* `options.seconds` **[number][9]** Number of seconds after which processing will be stopped
Returns **Sharp**
**Meta**
* **since**: 0.29.2
[1]: #withmetadata
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String

View File

@@ -4,6 +4,63 @@
Requires libvips v8.11.3
### v0.29.3 - 14th November 2021
* Ensure correct dimensions when containing image resized to 1px.
[#2951](https://github.com/lovell/sharp/issues/2951)
* Impute TIFF `xres`/`yres` from `density` provided to `withMetadata`.
[#2952](https://github.com/lovell/sharp/pull/2952)
[@mbklein](https://github.com/mbklein)
### v0.29.2 - 21st October 2021
* Add `timeout` function to limit processing time.
* Ensure `sharp.versions` is populated from vendored libvips.
* Remove animation properties from single page images.
[#2890](https://github.com/lovell/sharp/issues/2890)
* Allow use of 'tif' to select TIFF output.
[#2893](https://github.com/lovell/sharp/pull/2893)
[@erf](https://github.com/erf)
* Improve error message on Windows for version conflict.
[#2918](https://github.com/lovell/sharp/pull/2918)
[@dkrnl](https://github.com/dkrnl)
* Throw error rather than exit when invalid binaries detected.
[#2931](https://github.com/lovell/sharp/issues/2931)
### v0.29.1 - 7th September 2021
* Add `lightness` option to `modulate` operation.
[#2846](https://github.com/lovell/sharp/pull/2846)
* Ensure correct PNG bitdepth is set based on number of colours.
[#2855](https://github.com/lovell/sharp/issues/2855)
* Ensure background is always premultiplied when compositing.
[#2858](https://github.com/lovell/sharp/issues/2858)
* Ensure images with P3 profiles retain full gamut.
[#2862](https://github.com/lovell/sharp/issues/2862)
* Add support for libvips compiled with OpenJPEG.
[#2868](https://github.com/lovell/sharp/pull/2868)
* Remove unsupported animation properties from AVIF output.
[#2870](https://github.com/lovell/sharp/issues/2870)
* Resolve paths before comparing input/output filenames.
[#2878](https://github.com/lovell/sharp/pull/2878)
[@rexxars](https://github.com/rexxars)
* Allow use of speed 9 (fastest) for HEIF encoding.
[#2879](https://github.com/lovell/sharp/pull/2879)
[@rexxars](https://github.com/rexxars)
### v0.29.0 - 17th August 2021
* Drop support for Node.js 10, now requires Node.js >= 12.13.0.

1
docs/docute.min.js vendored Normal file

File diff suppressed because one or more lines are too long

View File

@@ -218,3 +218,12 @@ GitHub: https://github.com/Daiz
Name: Mart Jansink
GitHub: https://github.com/mart-jansink
Name: Tenpi
GitHub: https://github.com/Tenpi
Name: Zaruike
GitHub: https://github.com/Zaruike
Name: Erlend F
GitHub: https://github.com/erf

File diff suppressed because one or more lines are too long

View File

@@ -31,8 +31,8 @@ JPEG, PNG, WebP, AVIF, TIFF, GIF (input) and SVG (input) image formats.
The following platforms have prebuilt libvips but not sharp:
* Linux ARMv6
* Linux ARMv7 (glibc >= 2.28)
* Linux ARMv6 (glibc >= 2.28)
* Windows ARM64
The following platforms require compilation of both libvips and sharp from source:
@@ -40,6 +40,8 @@ The following platforms require compilation of both libvips and sharp from sourc
* Linux x86
* Linux x64 (glibc <= 2.16, includes RHEL/CentOS 6)
* Linux ARM64 (glibc <= 2.28)
* Linux ARMv7 (glibc <= 2.27, musl)
* Linux ARMv6 (glibc <= 2.27, musl)
* Linux PowerPC
* FreeBSD
* OpenBSD

File diff suppressed because one or more lines are too long

View File

@@ -199,6 +199,7 @@ const Sharp = function (input, options) {
brightness: 1,
saturation: 1,
hue: 0,
lightness: 0,
booleanBufferIn: null,
booleanFileIn: '',
joinChannelIn: [],
@@ -232,8 +233,13 @@ const Sharp = function (input, options) {
pngAdaptiveFiltering: false,
pngPalette: false,
pngQuality: 100,
pngColours: 256,
pngBitdepth: 8,
pngDither: 1,
jp2Quality: 80,
jp2TileHeight: 512,
jp2TileWidth: 512,
jp2Lossless: false,
jp2ChromaSubsampling: '4:4:4',
webpQuality: 80,
webpAlphaQuality: 100,
webpLossless: false,
@@ -267,6 +273,7 @@ const Sharp = function (input, options) {
tileBackground: [255, 255, 255, 255],
tileCentre: false,
tileId: 'https://example.com/iiif',
timeoutSeconds: 0,
linearA: 1,
linearB: 0,
// Function to notify of libvips warnings
@@ -282,7 +289,8 @@ const Sharp = function (input, options) {
this.options.input = this._createInputDescriptor(input, options, { allowStream: true });
return this;
};
util.inherits(Sharp, stream.Duplex);
Object.setPrototypeOf(Sharp.prototype, stream.Duplex.prototype);
Object.setPrototypeOf(Sharp, stream.Duplex);
/**
* Take a "snapshot" of the Sharp instance, returning a new instance.

View File

@@ -570,14 +570,16 @@ function recomb (inputMatrix) {
}
/**
* Transforms the image using brightness, saturation and hue rotation.
* Transforms the image using brightness, saturation, hue rotation, and lightness.
* Brightness and lightness both operate on luminance, with the difference being that
* brightness is multiplicative whereas lightness is additive.
*
* @since 0.22.1
*
* @example
* sharp(input)
* .modulate({
* brightness: 2 // increase lightness by a factor of 2
* brightness: 2 // increase brightness by a factor of 2
* });
*
* sharp(input)
@@ -585,6 +587,11 @@ function recomb (inputMatrix) {
* hue: 180 // hue-rotate by 180 degrees
* });
*
* sharp(input)
* .modulate({
* lightness: 50 // increase lightness by +50
* });
*
* // decreate brightness and saturation while also hue-rotating by 90 degrees
* sharp(input)
* .modulate({
@@ -597,6 +604,7 @@ function recomb (inputMatrix) {
* @param {number} [options.brightness] Brightness multiplier
* @param {number} [options.saturation] Saturation multiplier
* @param {number} [options.hue] Degrees for hue rotation
* @param {number} [options.lightness] Lightness addend
* @returns {Sharp}
*/
function modulate (options) {
@@ -624,6 +632,13 @@ function modulate (options) {
throw is.invalidParameterError('hue', 'number', options.hue);
}
}
if ('lightness' in options) {
if (is.number(options.lightness)) {
this.options.lightness = options.lightness;
} else {
throw is.invalidParameterError('lightness', 'number', options.lightness);
}
}
return this;
}

View File

@@ -1,5 +1,6 @@
'use strict';
const path = require('path');
const is = require('./is');
const sharp = require('./sharp');
@@ -12,11 +13,17 @@ const formats = new Map([
['png', 'png'],
['raw', 'raw'],
['tiff', 'tiff'],
['tif', 'tiff'],
['webp', 'webp'],
['gif', 'gif']
['gif', 'gif'],
['jp2', 'jp2'],
['jpx', 'jp2'],
['j2k', 'jp2'],
['j2c', 'jp2']
]);
const errMagickSave = new Error('GIF output requires libvips with support for ImageMagick');
const errJp2Save = new Error('JP2 output requires libvips with support for OpenJPEG');
/**
* Write output image data to a file.
@@ -28,6 +35,8 @@ const errMagickSave = new Error('GIF output requires libvips with support for Im
* By default all metadata will be removed, which includes EXIF-based orientation.
* See {@link withMetadata} for control over this.
*
* The caller is responsible for ensuring directory structures and permissions exist.
*
* A `Promise` is returned when `callback` is not provided.
*
* @example
@@ -52,7 +61,7 @@ function toFile (fileOut, callback) {
let err;
if (!is.string(fileOut)) {
err = new Error('Missing output file path');
} else if (this.options.input.file === fileOut) {
} else if (is.string(this.options.input.file) && path.resolve(this.options.input.file) === path.resolve(fileOut)) {
err = new Error('Cannot use same file for input and output');
} else if (this.options.formatOut === 'input' && fileOut.toLowerCase().endsWith('.gif') && !this.constructor.format.magick.output.file) {
err = errMagickSave;
@@ -403,7 +412,7 @@ function png (options) {
const colours = options.colours || options.colors;
if (is.defined(colours)) {
if (is.integer(colours) && is.inRange(colours, 2, 256)) {
this.options.pngColours = colours;
this.options.pngBitdepth = 1 << 31 - Math.clz32(Math.ceil(Math.log2(colours)));
} else {
throw is.invalidParameterError('colours', 'integer between 2 and 256', colours);
}
@@ -509,6 +518,84 @@ function gif (options) {
return this._updateFormatOut('gif', options);
}
/**
* Use these JP2 options for output image.
*
* Requires libvips compiled with support for OpenJPEG.
* The prebuilt binaries do not include this - see
* {@link https://sharp.pixelplumbing.com/install#custom-libvips installing a custom libvips}.
*
* @example
* // Convert any input to lossless JP2 output
* const data = await sharp(input)
* .jp2({ lossless: true })
* .toBuffer();
*
* @example
* // Convert any input to very high quality JP2 output
* const data = await sharp(input)
* .jp2({
* quality: 100,
* chromaSubsampling: '4:4:4'
* })
* .toBuffer();
*
* @since 0.29.1
*
* @param {Object} [options] - output options
* @param {number} [options.quality=80] - quality, integer 1-100
* @param {boolean} [options.lossless=false] - use lossless compression mode
* @param {number} [options.tileWidth=512] - horizontal tile size
* @param {number} [options.tileHeight=512] - vertical tile size
* @param {string} [options.chromaSubsampling='4:4:4'] - set to '4:2:0' to use chroma subsampling
* @returns {Sharp}
* @throws {Error} Invalid options
*/
/* istanbul ignore next */
function jp2 (options) {
if (!this.constructor.format.jp2k.output.buffer) {
throw errJp2Save;
}
if (is.object(options)) {
if (is.defined(options.quality)) {
if (is.integer(options.quality) && is.inRange(options.quality, 1, 100)) {
this.options.jp2Quality = options.quality;
} else {
throw is.invalidParameterError('quality', 'integer between 1 and 100', options.quality);
}
}
if (is.defined(options.lossless)) {
if (is.bool(options.lossless)) {
this.options.jp2Lossless = options.lossless;
} else {
throw is.invalidParameterError('lossless', 'boolean', options.lossless);
}
}
if (is.defined(options.tileWidth)) {
if (is.integer(options.tileWidth) && is.inRange(options.tileWidth, 1, 32768)) {
this.options.jp2TileWidth = options.tileWidth;
} else {
throw is.invalidParameterError('tileWidth', 'integer between 1 and 32768', options.tileWidth);
}
}
if (is.defined(options.tileHeight)) {
if (is.integer(options.tileHeight) && is.inRange(options.tileHeight, 1, 32768)) {
this.options.jp2TileHeight = options.tileHeight;
} else {
throw is.invalidParameterError('tileHeight', 'integer between 1 and 32768', options.tileHeight);
}
}
if (is.defined(options.chromaSubsampling)) {
if (is.string(options.chromaSubsampling) && is.inArray(options.chromaSubsampling, ['4:2:0', '4:4:4'])) {
this.options.heifChromaSubsampling = options.chromaSubsampling;
} else {
throw is.invalidParameterError('chromaSubsampling', 'one of: 4:2:0, 4:4:4', options.chromaSubsampling);
}
}
}
return this._updateFormatOut('jp2', options);
}
/**
* Set animation options if available.
* @private
@@ -550,6 +637,8 @@ function trySetAnimationOptions (source, target) {
/**
* Use these TIFF options for output image.
*
* The `density` can be set in pixels/inch via {@link withMetadata} instead of providing `xres` and `yres` in pixels/mm.
*
* @example
* // Convert SVG input to LZW-compressed, 1 bit per pixel TIFF output
* sharp('input.svg')
@@ -654,12 +743,14 @@ function tiff (options) {
* Whilst it is possible to create AVIF images smaller than 16x16 pixels,
* most web browsers do not display these properly.
*
* AVIF image sequences are not supported.
*
* @since 0.27.0
*
* @param {Object} [options] - output options
* @param {number} [options.quality=50] - quality, integer 1-100
* @param {boolean} [options.lossless=false] - use lossless compression
* @param {number} [options.speed=5] - CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest)
* @param {number} [options.speed=5] - CPU effort vs file size, 0 (slowest/smallest) to 9 (fastest/largest)
* @param {string} [options.chromaSubsampling='4:4:4'] - set to '4:2:0' to use chroma subsampling
* @returns {Sharp}
* @throws {Error} Invalid options
@@ -680,7 +771,7 @@ function avif (options) {
* @param {number} [options.quality=50] - quality, integer 1-100
* @param {string} [options.compression='av1'] - compression format: av1, hevc
* @param {boolean} [options.lossless=false] - use lossless compression
* @param {number} [options.speed=5] - CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest)
* @param {number} [options.speed=5] - CPU effort vs file size, 0 (slowest/smallest) to 9 (fastest/largest)
* @param {string} [options.chromaSubsampling='4:4:4'] - set to '4:2:0' to use chroma subsampling
* @returns {Sharp}
* @throws {Error} Invalid options
@@ -709,10 +800,10 @@ function heif (options) {
}
}
if (is.defined(options.speed)) {
if (is.integer(options.speed) && is.inRange(options.speed, 0, 8)) {
if (is.integer(options.speed) && is.inRange(options.speed, 0, 9)) {
this.options.heifSpeed = options.speed;
} else {
throw is.invalidParameterError('speed', 'integer between 0 and 8', options.speed);
throw is.invalidParameterError('speed', 'integer between 0 and 9', options.speed);
}
}
if (is.defined(options.chromaSubsampling)) {
@@ -886,6 +977,31 @@ function tile (options) {
return this._updateFormatOut('dz');
}
/**
* Set a timeout for processing, in seconds.
* Use a value of zero to continue processing indefinitely, the default behaviour.
*
* The clock starts when libvips opens an input image for processing.
* Time spent waiting for a libuv thread to become available is not included.
*
* @since 0.29.2
*
* @param {Object} options
* @param {number} options.seconds - Number of seconds after which processing will be stopped
* @returns {Sharp}
*/
function timeout (options) {
if (!is.plainObject(options)) {
throw is.invalidParameterError('options', 'object', options);
}
if (is.integer(options.seconds) && is.inRange(options.seconds, 0, 3600)) {
this.options.timeoutSeconds = options.seconds;
} else {
throw is.invalidParameterError('seconds', 'integer between 0 and 3600', options.seconds);
}
return this;
}
/**
* Update the output format unless options.force is false,
* in which case revert to input format.
@@ -1031,6 +1147,7 @@ module.exports = function (Sharp) {
withMetadata,
toFormat,
jpeg,
jp2,
png,
webp,
tiff,
@@ -1039,6 +1156,7 @@ module.exports = function (Sharp) {
gif,
raw,
tile,
timeout,
// Private
_updateFormatOut,
_setBooleanOption,

View File

@@ -19,6 +19,13 @@ try {
help.push(
'- Consult the installation documentation: https://sharp.pixelplumbing.com/install'
);
console.error(help.join('\n'));
process.exit(1);
// Check loaded
if (process.platform === 'win32') {
const loadedModule = Object.keys(require.cache).find((i) => /[\\/]build[\\/]Release[\\/]sharp(.*)\.node$/.test(i));
if (loadedModule) {
const [, loadedPackage] = loadedModule.match(/node_modules[\\/]([^\\/]+)[\\/]/);
help.push(`- Ensure the version of sharp aligns with the ${loadedPackage} package: "npm ls sharp"`);
}
}
throw new Error(help.join('\n'));
}

View File

@@ -4,6 +4,7 @@ const events = require('events');
const detectLibc = require('detect-libc');
const is = require('./is');
const platformAndArch = require('./platform')();
const sharp = require('./sharp');
/**
@@ -45,7 +46,7 @@ let versions = {
vips: sharp.libvipsVersion()
};
try {
versions = require(`../vendor/${versions.vips}/versions.json`);
versions = require(`../vendor/${versions.vips}/${platformAndArch}/versions.json`);
} catch (err) {}
/**

View File

@@ -1,7 +1,7 @@
{
"name": "sharp",
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP, AVIF and TIFF images",
"version": "0.29.0",
"version": "0.29.3",
"author": "Lovell Fuller <npm@lovell.info>",
"homepage": "https://github.com/lovell/sharp",
"contributors": [
@@ -78,7 +78,8 @@
"Jacob Smith <jacob@frende.me>",
"Michael Nutt <michael@nutt.im>",
"Brad Parham <baparham@gmail.com>",
"Taneli Vatanen <taneli.vatanen@gmail.com>"
"Taneli Vatanen <taneli.vatanen@gmail.com>",
"Joris Dugué <zaruike10@gmail.com>"
],
"scripts": {
"install": "(node install/libvips && node install/dll-copy && prebuild-install) || (node install/can-compile && node-gyp rebuild && node install/dll-copy)",
@@ -112,6 +113,7 @@
"tiff",
"gif",
"svg",
"jp2",
"dzi",
"image",
"resize",
@@ -124,25 +126,25 @@
"dependencies": {
"color": "^4.0.1",
"detect-libc": "^1.0.3",
"node-addon-api": "^4.0.0",
"prebuild-install": "^6.1.4",
"node-addon-api": "^4.2.0",
"prebuild-install": "^7.0.0",
"semver": "^7.3.5",
"simple-get": "^3.1.0",
"simple-get": "^4.0.0",
"tar-fs": "^2.1.1",
"tunnel-agent": "^0.6.0"
},
"devDependencies": {
"async": "^3.2.1",
"async": "^3.2.2",
"cc": "^3.0.1",
"decompress-zip": "^0.3.3",
"documentation": "^13.2.5",
"exif-reader": "^1.0.3",
"icc": "^2.0.0",
"license-checker": "^25.0.1",
"mocha": "^9.0.3",
"mock-fs": "^5.0.0",
"mocha": "^9.1.3",
"mock-fs": "^5.1.2",
"nyc": "^15.1.0",
"prebuild": "^10.0.1",
"prebuild": "^11.0.0",
"rimraf": "^3.0.2",
"semistandard": "^16.0.1"
},

View File

@@ -157,6 +157,10 @@ namespace sharp {
bool IsGif(std::string const &str) {
return EndsWith(str, ".gif") || EndsWith(str, ".GIF");
}
bool IsJp2(std::string const &str) {
return EndsWith(str, ".jp2") || EndsWith(str, ".jpx") || EndsWith(str, ".j2k") || EndsWith(str, ".j2c")
|| EndsWith(str, ".JP2") || EndsWith(str, ".JPX") || EndsWith(str, ".J2K") || EndsWith(str, ".J2C");
}
bool IsTiff(std::string const &str) {
return EndsWith(str, ".tif") || EndsWith(str, ".tiff") || EndsWith(str, ".TIF") || EndsWith(str, ".TIFF");
}
@@ -190,6 +194,7 @@ namespace sharp {
case ImageType::WEBP: id = "webp"; break;
case ImageType::TIFF: id = "tiff"; break;
case ImageType::GIF: id = "gif"; break;
case ImageType::JP2: id = "jp2"; break;
case ImageType::SVG: id = "svg"; break;
case ImageType::HEIF: id = "heif"; break;
case ImageType::PDF: id = "pdf"; break;
@@ -226,6 +231,8 @@ namespace sharp {
{ "VipsForeignLoadGifBuffer", ImageType::GIF },
{ "VipsForeignLoadNsgifFile", ImageType::GIF },
{ "VipsForeignLoadNsgifBuffer", ImageType::GIF },
{ "VipsForeignLoadJp2kBuffer", ImageType::JP2 },
{ "VipsForeignLoadJp2kFile", ImageType::JP2 },
{ "VipsForeignLoadSvgFile", ImageType::SVG },
{ "VipsForeignLoadSvgBuffer", ImageType::SVG },
{ "VipsForeignLoadHeifFile", ImageType::HEIF },
@@ -234,6 +241,8 @@ namespace sharp {
{ "VipsForeignLoadPdfBuffer", ImageType::PDF },
{ "VipsForeignLoadMagickFile", ImageType::MAGICK },
{ "VipsForeignLoadMagickBuffer", ImageType::MAGICK },
{ "VipsForeignLoadMagick7File", ImageType::MAGICK },
{ "VipsForeignLoadMagick7Buffer", ImageType::MAGICK },
{ "VipsForeignLoadOpenslide", ImageType::OPENSLIDE },
{ "VipsForeignLoadPpmFile", ImageType::PPM },
{ "VipsForeignLoadFits", ImageType::FITS },
@@ -285,6 +294,7 @@ namespace sharp {
imageType == ImageType::WEBP ||
imageType == ImageType::MAGICK ||
imageType == ImageType::GIF ||
imageType == ImageType::JP2 ||
imageType == ImageType::TIFF ||
imageType == ImageType::HEIF ||
imageType == ImageType::PDF;
@@ -508,6 +518,17 @@ namespace sharp {
return copy;
}
/*
Remove animation properties from image.
*/
VImage RemoveAnimationProperties(VImage image) {
VImage copy = image.copy();
copy.remove(VIPS_META_PAGE_HEIGHT);
copy.remove("delay");
copy.remove("loop");
return copy;
}
/*
Does this image have a non-default density?
*/
@@ -589,6 +610,33 @@ namespace sharp {
return warning;
}
/*
Attach an event listener for progress updates, used to detect timeout
*/
void SetTimeout(VImage image, int const seconds) {
if (seconds > 0) {
VipsImage *im = image.get_image();
if (im->progress_signal == NULL) {
int *timeout = VIPS_NEW(im, int);
*timeout = seconds;
g_signal_connect(im, "eval", G_CALLBACK(VipsProgressCallBack), timeout);
vips_image_set_progress(im, TRUE);
}
}
}
/*
Event listener for progress updates, used to detect timeout
*/
void VipsProgressCallBack(VipsImage *im, VipsProgress *progress, int *timeout) {
// printf("VipsProgressCallBack progress=%d run=%d timeout=%d\n", progress->percent, progress->run, *timeout);
if (*timeout > 0 && progress->run >= *timeout) {
vips_image_set_kill(im, TRUE);
vips_error("timeout", "%d%% complete", progress->percent);
*timeout = 0;
}
}
/*
Calculate the (left, top) coordinates of the output image
within the input image, applying the given gravity during an embed.
@@ -757,23 +805,27 @@ namespace sharp {
/*
Convert RGBA value to another colourspace
*/
std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba, VipsInterpretation const interpretation) {
std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba,
VipsInterpretation const interpretation, bool premultiply) {
int const bands = static_cast<int>(rgba.size());
if (bands < 3 || interpretation == VIPS_INTERPRETATION_sRGB || interpretation == VIPS_INTERPRETATION_RGB) {
if (bands < 3) {
return rgba;
} else {
VImage pixel = VImage::new_matrix(1, 1);
pixel.set("bands", bands);
pixel = pixel.new_from_image(rgba);
pixel = pixel.colourspace(interpretation, VImage::option()->set("source_space", VIPS_INTERPRETATION_sRGB));
return pixel(0, 0);
}
VImage pixel = VImage::new_matrix(1, 1);
pixel.set("bands", bands);
pixel = pixel
.new_from_image(rgba)
.colourspace(interpretation, VImage::option()->set("source_space", VIPS_INTERPRETATION_sRGB));
if (premultiply) {
pixel = pixel.premultiply();
}
return pixel(0, 0);
}
/*
Apply the alpha channel to a given colour
*/
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour) {
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour, bool premultiply) {
// Scale up 8-bit values to match 16-bit input image
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
// Create alphaColour colour
@@ -797,7 +849,7 @@ namespace sharp {
alphaColour.push_back(colour[3] * multiplier);
}
// Ensure alphaColour colour uses correct colourspace
alphaColour = sharp::GetRgbaAsColourspace(alphaColour, image.interpretation());
alphaColour = sharp::GetRgbaAsColourspace(alphaColour, image.interpretation(), premultiply);
// Add non-transparent alpha channel, if required
if (colour[3] < 255.0 && !HasAlpha(image)) {
image = image.bandjoin(
@@ -827,4 +879,5 @@ namespace sharp {
}
return image;
}
} // namespace sharp

View File

@@ -116,6 +116,7 @@ namespace sharp {
JPEG,
PNG,
WEBP,
JP2,
TIFF,
GIF,
SVG,
@@ -142,6 +143,7 @@ namespace sharp {
bool IsJpeg(std::string const &str);
bool IsPng(std::string const &str);
bool IsWebp(std::string const &str);
bool IsJp2(std::string const &str);
bool IsGif(std::string const &str);
bool IsTiff(std::string const &str);
bool IsHeic(std::string const &str);
@@ -208,6 +210,11 @@ namespace sharp {
*/
VImage SetAnimationProperties(VImage image, int pageHeight, std::vector<int> delay, int loop);
/*
Remove animation properties from image.
*/
VImage RemoveAnimationProperties(VImage image);
/*
Does this image have a non-default density?
*/
@@ -243,6 +250,16 @@ namespace sharp {
*/
std::string VipsWarningPop();
/*
Attach an event listener for progress updates, used to detect timeout
*/
void SetTimeout(VImage image, int const timeoutSeconds);
/*
Event listener for progress updates, used to detect timeout
*/
void VipsProgressCallBack(VipsImage *image, VipsProgress *progress, int *timeoutSeconds);
/*
Calculate the (left, top) coordinates of the output image
within the input image, applying the given gravity during an embed.
@@ -288,12 +305,13 @@ namespace sharp {
/*
Convert RGBA value to another colourspace
*/
std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba, VipsInterpretation const interpretation);
std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba,
VipsInterpretation const interpretation, bool premultiply);
/*
Apply the alpha channel to a given colour
*/
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour);
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour, bool premultiply);
/*
Removes alpha channel, if any.

View File

@@ -182,7 +182,8 @@ namespace sharp {
0.0, 0.0, 0.0, 1.0));
}
VImage Modulate(VImage image, double const brightness, double const saturation, int const hue) {
VImage Modulate(VImage image, double const brightness, double const saturation,
int const hue, double const lightness) {
if (HasAlpha(image)) {
// Separate alpha channel
VImage alpha = image[image.bands() - 1];
@@ -190,7 +191,7 @@ namespace sharp {
.colourspace(VIPS_INTERPRETATION_LCH)
.linear(
{ brightness, saturation, 1},
{ 0.0, 0.0, static_cast<double>(hue) }
{ lightness, 0.0, static_cast<double>(hue) }
)
.colourspace(VIPS_INTERPRETATION_sRGB)
.bandjoin(alpha);
@@ -199,7 +200,7 @@ namespace sharp {
.colourspace(VIPS_INTERPRETATION_LCH)
.linear(
{ brightness, saturation, 1 },
{ 0.0, 0.0, static_cast<double>(hue) }
{ lightness, 0.0, static_cast<double>(hue) }
)
.colourspace(VIPS_INTERPRETATION_sRGB);
}

View File

@@ -98,9 +98,10 @@ namespace sharp {
VImage Recomb(VImage image, std::unique_ptr<double[]> const &matrix);
/*
* Modulate brightness, saturation and hue
* Modulate brightness, saturation, hue and lightness
*/
VImage Modulate(VImage image, double const brightness, double const saturation, int const hue);
VImage Modulate(VImage image, double const brightness, double const saturation,
int const hue, double const lightness);
/*
* Ensure the image is in a given colourspace

View File

@@ -90,7 +90,7 @@ class PipelineWorker : public Napi::AsyncWorker {
}
if (baton->rotationAngle != 0.0) {
std::vector<double> background;
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground);
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground, FALSE);
image = image.rotate(baton->rotationAngle, VImage::option()->set("background", background));
}
}
@@ -289,16 +289,21 @@ class PipelineWorker : public Napi::AsyncWorker {
yfactor = static_cast<double>(shrunkOnLoadHeight) / static_cast<double>(targetResizeHeight);
}
}
// Remove animation properties from single page images
if (baton->input->pages == 1) {
image = sharp::RemoveAnimationProperties(image);
}
// Ensure we're using a device-independent colour space
char const *processingProfile = image.interpretation() == VIPS_INTERPRETATION_RGB16 ? "p3" : "srgb";
if (
sharp::HasProfile(image) &&
image.interpretation() != VIPS_INTERPRETATION_LABS &&
image.interpretation() != VIPS_INTERPRETATION_GREY16
) {
// Convert to sRGB using embedded profile
// Convert to sRGB/P3 using embedded profile
try {
image = image.icc_transform("srgb", VImage::option()
image = image.icc_transform(processingProfile, VImage::option()
->set("embedded", TRUE)
->set("depth", image.interpretation() == VIPS_INTERPRETATION_RGB16 ? 16 : 8)
->set("intent", VIPS_INTENT_PERCEPTUAL));
@@ -306,7 +311,7 @@ class PipelineWorker : public Napi::AsyncWorker {
// Ignore failure of embedded profile
}
} else if (image.interpretation() == VIPS_INTERPRETATION_CMYK) {
image = image.icc_transform("srgb", VImage::option()
image = image.icc_transform(processingProfile, VImage::option()
->set("input_profile", "cmyk")
->set("intent", VIPS_INTENT_PERCEPTUAL));
}
@@ -346,7 +351,8 @@ class PipelineWorker : public Napi::AsyncWorker {
bool const shouldSharpen = baton->sharpenSigma != 0.0;
bool const shouldApplyMedian = baton->medianSize > 0;
bool const shouldComposite = !baton->composite.empty();
bool const shouldModulate = baton->brightness != 1.0 || baton->saturation != 1.0 || baton->hue != 0.0;
bool const shouldModulate = baton->brightness != 1.0 || baton->saturation != 1.0 ||
baton->hue != 0.0 || baton->lightness != 0.0;
bool const shouldApplyClahe = baton->claheWidth != 0 && baton->claheHeight != 0;
if (shouldComposite && !sharp::HasAlpha(image)) {
@@ -376,11 +382,15 @@ class PipelineWorker : public Napi::AsyncWorker {
// Ensure shortest edge is at least 1 pixel
if (image.width() / xfactor < 0.5) {
xfactor = 2 * image.width();
baton->width = 1;
if (baton->canvas != Canvas::EMBED) {
baton->width = 1;
}
}
if (image.height() / yfactor < 0.5) {
yfactor = 2 * image.height();
baton->height = 1;
if (baton->canvas != Canvas::EMBED) {
baton->height = 1;
}
}
image = image.resize(1.0 / xfactor, VImage::option()
->set("vscale", 1.0 / yfactor)
@@ -423,7 +433,7 @@ class PipelineWorker : public Napi::AsyncWorker {
if (image.width() != baton->width || image.height() != baton->height) {
if (baton->canvas == Canvas::EMBED) {
std::vector<double> background;
std::tie(image, background) = sharp::ApplyAlpha(image, baton->resizeBackground);
std::tie(image, background) = sharp::ApplyAlpha(image, baton->resizeBackground, shouldPremultiplyAlpha);
// Embed
@@ -480,7 +490,7 @@ class PipelineWorker : public Napi::AsyncWorker {
// Rotate post-extract non-90 angle
if (!baton->rotateBeforePreExtract && baton->rotationAngle != 0.0) {
std::vector<double> background;
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground);
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground, shouldPremultiplyAlpha);
image = image.rotate(baton->rotationAngle, VImage::option()->set("background", background));
}
@@ -493,7 +503,7 @@ class PipelineWorker : public Napi::AsyncWorker {
// Affine transform
if (baton->affineMatrix.size() > 0) {
std::vector<double> background;
std::tie(image, background) = sharp::ApplyAlpha(image, baton->affineBackground);
std::tie(image, background) = sharp::ApplyAlpha(image, baton->affineBackground, shouldPremultiplyAlpha);
image = image.affine(baton->affineMatrix, VImage::option()->set("background", background)
->set("idx", baton->affineIdx)
->set("idy", baton->affineIdy)
@@ -505,7 +515,7 @@ class PipelineWorker : public Napi::AsyncWorker {
// Extend edges
if (baton->extendTop > 0 || baton->extendBottom > 0 || baton->extendLeft > 0 || baton->extendRight > 0) {
std::vector<double> background;
std::tie(image, background) = sharp::ApplyAlpha(image, baton->extendBackground);
std::tie(image, background) = sharp::ApplyAlpha(image, baton->extendBackground, shouldPremultiplyAlpha);
// Embed
baton->width = image.width() + baton->extendLeft + baton->extendRight;
@@ -542,7 +552,7 @@ class PipelineWorker : public Napi::AsyncWorker {
}
if (shouldModulate) {
image = sharp::Modulate(image, baton->brightness, baton->saturation, baton->hue);
image = sharp::Modulate(image, baton->brightness, baton->saturation, baton->hue, baton->lightness);
}
// Sharpen
@@ -715,9 +725,10 @@ class PipelineWorker : public Napi::AsyncWorker {
// Convert colourspace, pass the current known interpretation so libvips doesn't have to guess
image = image.colourspace(baton->colourspace, VImage::option()->set("source_space", image.interpretation()));
// Transform colours from embedded profile to output profile
if (baton->withMetadata && sharp::HasProfile(image)) {
image = image.icc_transform(vips_enum_nick(VIPS_TYPE_INTERPRETATION, baton->colourspace),
VImage::option()->set("embedded", TRUE));
if (baton->withMetadata && sharp::HasProfile(image) && baton->withMetadataIcc.empty()) {
image = image.icc_transform("srgb", VImage::option()
->set("embedded", TRUE)
->set("intent", VIPS_INTENT_PERCEPTUAL));
}
}
@@ -726,7 +737,8 @@ class PipelineWorker : public Napi::AsyncWorker {
image = image.icc_transform(
const_cast<char*>(baton->withMetadataIcc.data()),
VImage::option()
->set("input_profile", "srgb")
->set("input_profile", processingProfile)
->set("embedded", TRUE)
->set("intent", VIPS_INTENT_PERCEPTUAL));
}
// Override EXIF Orientation tag
@@ -760,6 +772,7 @@ class PipelineWorker : public Napi::AsyncWorker {
baton->loop);
// Output
sharp::SetTimeout(image, baton->timeoutSeconds);
if (baton->fileOut.empty()) {
// Buffer output
if (baton->formatOut == "jpeg" || (baton->formatOut == "input" && inputImageType == sharp::ImageType::JPEG)) {
@@ -787,6 +800,22 @@ class PipelineWorker : public Napi::AsyncWorker {
} else {
baton->channels = std::min(baton->channels, 3);
}
} else if (baton->formatOut == "jp2" || (baton->formatOut == "input"
&& inputImageType == sharp::ImageType::JP2)) {
// Write JP2 to Buffer
sharp::AssertImageTypeDimensions(image, sharp::ImageType::JP2);
VipsArea *area = reinterpret_cast<VipsArea*>(image.jp2ksave_buffer(VImage::option()
->set("Q", baton->jp2Quality)
->set("lossless", baton->jp2Lossless)
->set("subsample_mode", baton->jp2ChromaSubsampling == "4:4:4"
? VIPS_FOREIGN_SUBSAMPLE_OFF : VIPS_FOREIGN_SUBSAMPLE_ON)
->set("tile_height", baton->jp2TileHeight)
->set("tile_width", baton->jp2TileWidth)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
area->free_fn = nullptr;
vips_area_unref(area);
baton->formatOut = "jp2";
} else if (baton->formatOut == "png" || (baton->formatOut == "input" &&
(inputImageType == sharp::ImageType::PNG || (inputImageType == sharp::ImageType::GIF && !supportsGifOutput) ||
inputImageType == sharp::ImageType::SVG))) {
@@ -799,7 +828,7 @@ class PipelineWorker : public Napi::AsyncWorker {
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)
->set("palette", baton->pngPalette)
->set("Q", baton->pngQuality)
->set("colours", baton->pngColours)
->set("bitdepth", baton->pngBitdepth)
->set("dither", baton->pngDither)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
@@ -868,6 +897,7 @@ class PipelineWorker : public Napi::AsyncWorker {
} else if (baton->formatOut == "heif" ||
(baton->formatOut == "input" && inputImageType == sharp::ImageType::HEIF)) {
// Write HEIF to buffer
image = sharp::RemoveAnimationProperties(image);
VipsArea *area = reinterpret_cast<VipsArea*>(image.heifsave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("Q", baton->heifQuality)
@@ -917,13 +947,14 @@ class PipelineWorker : public Napi::AsyncWorker {
bool const isWebp = sharp::IsWebp(baton->fileOut);
bool const isGif = sharp::IsGif(baton->fileOut);
bool const isTiff = sharp::IsTiff(baton->fileOut);
bool const isJp2 = sharp::IsJp2(baton->fileOut);
bool const isHeif = sharp::IsHeif(baton->fileOut);
bool const isDz = sharp::IsDz(baton->fileOut);
bool const isDzZip = sharp::IsDzZip(baton->fileOut);
bool const isV = sharp::IsV(baton->fileOut);
bool const mightMatchInput = baton->formatOut == "input";
bool const willMatchInput = mightMatchInput &&
!(isJpeg || isPng || isWebp || isGif || isTiff || isHeif || isDz || isDzZip || isV);
!(isJpeg || isPng || isWebp || isGif || isTiff || isJp2 || isHeif || isDz || isDzZip || isV);
if (baton->formatOut == "jpeg" || (mightMatchInput && isJpeg) ||
(willMatchInput && inputImageType == sharp::ImageType::JPEG)) {
@@ -943,6 +974,18 @@ class PipelineWorker : public Napi::AsyncWorker {
->set("optimize_coding", baton->jpegOptimiseCoding));
baton->formatOut = "jpeg";
baton->channels = std::min(baton->channels, 3);
} else if (baton->formatOut == "jp2" || (mightMatchInput && isJp2) ||
(willMatchInput && (inputImageType == sharp::ImageType::JP2))) {
// Write JP2 to file
sharp::AssertImageTypeDimensions(image, sharp::ImageType::JP2);
image.jp2ksave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("Q", baton->jp2Quality)
->set("lossless", baton->jp2Lossless)
->set("subsample_mode", baton->jp2ChromaSubsampling == "4:4:4"
? VIPS_FOREIGN_SUBSAMPLE_OFF : VIPS_FOREIGN_SUBSAMPLE_ON)
->set("tile_height", baton->jp2TileHeight)
->set("tile_width", baton->jp2TileWidth));
baton->formatOut = "jp2";
} else if (baton->formatOut == "png" || (mightMatchInput && isPng) || (willMatchInput &&
(inputImageType == sharp::ImageType::PNG || (inputImageType == sharp::ImageType::GIF && !supportsGifOutput) ||
inputImageType == sharp::ImageType::SVG))) {
@@ -955,7 +998,7 @@ class PipelineWorker : public Napi::AsyncWorker {
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)
->set("palette", baton->pngPalette)
->set("Q", baton->pngQuality)
->set("colours", baton->pngColours)
->set("bitdepth", baton->pngBitdepth)
->set("dither", baton->pngDither));
baton->formatOut = "png";
} else if (baton->formatOut == "webp" || (mightMatchInput && isWebp) ||
@@ -1008,6 +1051,7 @@ class PipelineWorker : public Napi::AsyncWorker {
} else if (baton->formatOut == "heif" || (mightMatchInput && isHeif) ||
(willMatchInput && inputImageType == sharp::ImageType::HEIF)) {
// Write HEIF to file
image = sharp::RemoveAnimationProperties(image);
image.heifsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("strip", !baton->withMetadata)
->set("Q", baton->heifQuality)
@@ -1328,6 +1372,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->brightness = sharp::AttrAsDouble(options, "brightness");
baton->saturation = sharp::AttrAsDouble(options, "saturation");
baton->hue = sharp::AttrAsInt32(options, "hue");
baton->lightness = sharp::AttrAsDouble(options, "lightness");
baton->medianSize = sharp::AttrAsUint32(options, "medianSize");
baton->sharpenSigma = sharp::AttrAsDouble(options, "sharpenSigma");
baton->sharpenFlat = sharp::AttrAsDouble(options, "sharpenFlat");
@@ -1415,6 +1460,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
std::string k = sharp::AttrAsStr(mdStrKeys, i);
baton->withMetadataStrs.insert(std::make_pair(k, sharp::AttrAsStr(mdStrs, k)));
}
baton->timeoutSeconds = sharp::AttrAsUint32(options, "timeoutSeconds");
// Format-specific
baton->jpegQuality = sharp::AttrAsUint32(options, "jpegQuality");
baton->jpegProgressive = sharp::AttrAsBool(options, "jpegProgressive");
@@ -1429,8 +1475,13 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->pngAdaptiveFiltering = sharp::AttrAsBool(options, "pngAdaptiveFiltering");
baton->pngPalette = sharp::AttrAsBool(options, "pngPalette");
baton->pngQuality = sharp::AttrAsUint32(options, "pngQuality");
baton->pngColours = sharp::AttrAsUint32(options, "pngColours");
baton->pngBitdepth = sharp::AttrAsUint32(options, "pngBitdepth");
baton->pngDither = sharp::AttrAsDouble(options, "pngDither");
baton->jp2Quality = sharp::AttrAsUint32(options, "jp2Quality");
baton->jp2Lossless = sharp::AttrAsBool(options, "jp2Lossless");
baton->jp2TileHeight = sharp::AttrAsUint32(options, "jp2TileHeight");
baton->jp2TileWidth = sharp::AttrAsUint32(options, "jp2TileWidth");
baton->jp2ChromaSubsampling = sharp::AttrAsStr(options, "jp2ChromaSubsampling");
baton->webpQuality = sharp::AttrAsUint32(options, "webpQuality");
baton->webpAlphaQuality = sharp::AttrAsUint32(options, "webpAlphaQuality");
baton->webpLossless = sharp::AttrAsBool(options, "webpLossless");
@@ -1445,6 +1496,9 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->tiffTileHeight = sharp::AttrAsUint32(options, "tiffTileHeight");
baton->tiffXres = sharp::AttrAsDouble(options, "tiffXres");
baton->tiffYres = sharp::AttrAsDouble(options, "tiffYres");
if (baton->tiffXres == 1.0 && baton->tiffYres == 1.0 && baton->withMetadataDensity > 0) {
baton->tiffXres = baton->tiffYres = baton->withMetadataDensity / 25.4;
}
// tiff compression options
baton->tiffCompression = static_cast<VipsForeignTiffCompression>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_TIFF_COMPRESSION,

View File

@@ -95,6 +95,7 @@ struct PipelineBaton {
double brightness;
double saturation;
int hue;
double lightness;
int medianSize;
double sharpenSigma;
double sharpenFlat;
@@ -146,8 +147,13 @@ struct PipelineBaton {
bool pngAdaptiveFiltering;
bool pngPalette;
int pngQuality;
int pngColours;
int pngBitdepth;
double pngDither;
int jp2Quality;
bool jp2Lossless;
int jp2TileHeight;
int jp2TileWidth;
std::string jp2ChromaSubsampling;
int webpQuality;
int webpAlphaQuality;
bool webpNearLossless;
@@ -176,6 +182,7 @@ struct PipelineBaton {
double withMetadataDensity;
std::string withMetadataIcc;
std::unordered_map<std::string, std::string> withMetadataStrs;
int timeoutSeconds;
std::unique_ptr<double[]> convKernel;
int convKernelWidth;
int convKernelHeight;
@@ -227,6 +234,7 @@ struct PipelineBaton {
brightness(1.0),
saturation(1.0),
hue(0),
lightness(0),
medianSize(0),
sharpenSigma(0.0),
sharpenFlat(1.0),
@@ -276,8 +284,13 @@ struct PipelineBaton {
pngAdaptiveFiltering(false),
pngPalette(false),
pngQuality(100),
pngColours(256),
pngBitdepth(8),
pngDither(1.0),
jp2Quality(80),
jp2Lossless(false),
jp2TileHeight(512),
jp2TileWidth(512),
jp2ChromaSubsampling("4:4:4"),
webpQuality(80),
webpAlphaQuality(100),
webpNearLossless(false),
@@ -303,6 +316,7 @@ struct PipelineBaton {
withMetadata(false),
withMetadataOrientation(-1),
withMetadataDensity(0.0),
timeoutSeconds(0),
convKernelWidth(0),
convKernelHeight(0),
convKernelScale(0.0),

View File

@@ -115,7 +115,7 @@ Napi::Value format(const Napi::CallbackInfo& info) {
Napi::Object format = Napi::Object::New(env);
for (std::string const f : {
"jpeg", "png", "webp", "tiff", "magick", "openslide", "dz",
"ppm", "fits", "gif", "svg", "heif", "pdf", "vips"
"ppm", "fits", "gif", "svg", "heif", "pdf", "vips", "jp2k"
}) {
// Input
Napi::Boolean hasInputFile =

View File

@@ -92,6 +92,7 @@ module.exports = {
inputPngRGBWithAlpha: getPath('2569067123_aca715a2ee_o.png'), // http://www.flickr.com/photos/grizdave/2569067123/ (same as inputJpg)
inputPngImageInAlpha: getPath('image-in-alpha.png'), // https://github.com/lovell/sharp/issues/1597
inputPngSolidAlpha: getPath('with-alpha.png'), // https://github.com/lovell/sharp/issues/1599
inputPngP3: getPath('p3.png'), // https://github.com/lovell/sharp/issues/2862
inputWebP: getPath('4.webp'), // http://www.gstatic.com/webp/gallery/4.webp
inputWebPWithTransparency: getPath('5_webp_a.webp'), // http://www.gstatic.com/webp/gallery3/5_webp_a.webp
@@ -104,6 +105,8 @@ module.exports = {
inputTiffUncompressed: getPath('uncompressed_tiff.tiff'), // https://code.google.com/archive/p/imagetestsuite/wikis/TIFFTestSuite.wiki file: 0c84d07e1b22b76f24cccc70d8788e4a.tif
inputTiff8BitDepth: getPath('8bit_depth.tiff'),
inputTifftagPhotoshop: getPath('tifftag-photoshop.tiff'), // https://github.com/lovell/sharp/issues/1600
inputJp2: getPath('relax.jp2'), // https://www.fnordware.com/j2k/relax.jp2
inputGif: getPath('Crash_test.gif'), // http://upload.wikimedia.org/wikipedia/commons/e/e3/Crash_test.gif
inputGifGreyPlusAlpha: getPath('grey-plus-alpha.gif'), // http://i.imgur.com/gZ5jlmE.gif
inputGifAnimated: getPath('rotating-squares.gif'), // CC0 https://loading.io/spinner/blocks/-rotating-squares-preloader-gif

BIN
test/fixtures/p3.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 610 B

BIN
test/fixtures/relax.jp2 vendored Normal file

Binary file not shown.

View File

@@ -3,7 +3,7 @@
const assert = require('assert');
const sharp = require('../../');
const { inputAvif, inputJpg } = require('../fixtures');
const { inputAvif, inputJpg, inputGifAnimated } = require('../fixtures');
describe('AVIF', () => {
it('called without options does not throw an error', () => {
@@ -81,4 +81,29 @@ describe('AVIF', () => {
width: 32
});
});
it('can convert animated GIF to non-animated AVIF', async () => {
const data = await sharp(inputGifAnimated, { animated: true })
.resize(10)
.avif({ speed: 8 })
.toBuffer();
const metadata = await sharp(data)
.metadata();
const { size, ...metadataWithoutSize } = metadata;
assert.deepStrictEqual(metadataWithoutSize, {
channels: 4,
compression: 'av1',
depth: 'uchar',
format: 'heif',
hasAlpha: true,
hasProfile: false,
height: 300,
isProgressive: false,
pageHeight: 300,
pagePrimary: 0,
pages: 1,
space: 'srgb',
width: 10
});
});
});

View File

@@ -105,6 +105,25 @@ describe('Colour space conversion', function () {
});
});
it('Convert P3 to sRGB', async () => {
const [r, g, b] = await sharp(fixtures.inputPngP3)
.raw()
.toBuffer();
assert.strictEqual(r, 255);
assert.strictEqual(g, 0);
assert.strictEqual(b, 0);
});
it('Passthrough P3', async () => {
const [r, g, b] = await sharp(fixtures.inputPngP3)
.withMetadata({ icc: 'p3' })
.raw()
.toBuffer();
assert.strictEqual(r, 234);
assert.strictEqual(g, 51);
assert.strictEqual(b, 34);
});
it('Invalid pipelineColourspace input', function () {
assert.throws(function () {
sharp(fixtures.inputJpg)

View File

@@ -124,4 +124,30 @@ describe('Extend', function () {
fixtures.assertSimilar(fixtures.expected('extend-2channel.png'), data, done);
});
});
it('Premultiply background when compositing', async () => {
const background = '#bf1942cc';
const data = await sharp({
create: {
width: 1, height: 1, channels: 4, background: '#fff0'
}
})
.composite([{
input: {
create: {
width: 1, height: 1, channels: 4, background
}
}
}])
.extend({
left: 1, background
})
.raw()
.toBuffer();
const [r1, g1, b1, a1, r2, g2, b2, a2] = data;
assert.strictEqual(true, Math.abs(r2 - r1) < 2);
assert.strictEqual(true, Math.abs(g2 - g1) < 2);
assert.strictEqual(true, Math.abs(b2 - b1) < 2);
assert.strictEqual(true, Math.abs(a2 - a1) < 2);
});
});

View File

@@ -57,7 +57,7 @@ describe('HEIF', () => {
});
it('out of range speed should throw an error', () => {
assert.throws(() => {
sharp().heif({ speed: 9 });
sharp().heif({ speed: 10 });
});
});
it('invalid speed should throw an error', () => {

View File

@@ -1,6 +1,7 @@
'use strict';
const fs = require('fs');
const path = require('path');
const assert = require('assert');
const rimraf = require('rimraf');
@@ -297,6 +298,21 @@ describe('Input/output', function () {
});
});
it('Support output to tif format', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 240)
.toFormat('tif')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('tiff', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('Fail when output File is input File', function (done) {
sharp(fixtures.inputJpg).toFile(fixtures.inputJpg, function (err) {
assert(err instanceof Error);
@@ -316,6 +332,48 @@ describe('Input/output', function () {
});
});
it('Fail when output File is input File (relative output, absolute input)', function (done) {
const relativePath = path.relative(process.cwd(), fixtures.inputJpg);
sharp(fixtures.inputJpg).toFile(relativePath, function (err) {
assert(err instanceof Error);
assert.strictEqual('Cannot use same file for input and output', err.message);
done();
});
});
it('Fail when output File is input File via Promise (relative output, absolute input)', function (done) {
const relativePath = path.relative(process.cwd(), fixtures.inputJpg);
sharp(fixtures.inputJpg).toFile(relativePath).then(function (data) {
assert(false);
done();
}).catch(function (err) {
assert(err instanceof Error);
assert.strictEqual('Cannot use same file for input and output', err.message);
done();
});
});
it('Fail when output File is input File (relative input, absolute output)', function (done) {
const relativePath = path.relative(process.cwd(), fixtures.inputJpg);
sharp(relativePath).toFile(fixtures.inputJpg, function (err) {
assert(err instanceof Error);
assert.strictEqual('Cannot use same file for input and output', err.message);
done();
});
});
it('Fail when output File is input File via Promise (relative input, absolute output)', function (done) {
const relativePath = path.relative(process.cwd(), fixtures.inputJpg);
sharp(relativePath).toFile(fixtures.inputJpg).then(function (data) {
assert(false);
done();
}).catch(function (err) {
assert(err instanceof Error);
assert.strictEqual('Cannot use same file for input and output', err.message);
done();
});
});
it('Fail when output File is empty', function (done) {
sharp(fixtures.inputJpg).toFile('', function (err) {
assert(err instanceof Error);

99
test/unit/jp2.js Normal file
View File

@@ -0,0 +1,99 @@
'use strict';
const fs = require('fs');
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('JP2 output', () => {
if (!sharp.format.jp2k.input.buffer) {
it('JP2 output should fail due to missing OpenJPEG', () => {
assert.rejects(() =>
sharp(fixtures.inputJpg)
.jp2()
.toBuffer(),
/JP2 output requires libvips with support for OpenJPEG/
);
});
it('JP2 file output should fail due to missing OpenJPEG', () => {
assert.rejects(async () => await sharp().toFile('test.jp2'),
/JP2 output requires libvips with support for OpenJPEG/
);
});
} else {
it('JP2 Buffer to PNG Buffer', () => {
sharp(fs.readFileSync(fixtures.inputJp2))
.resize(8, 15)
.png()
.toBuffer({ resolveWithObject: true })
.then(({ data, info }) => {
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('png', info.format);
assert.strictEqual(8, info.width);
assert.strictEqual(15, info.height);
assert.strictEqual(4, info.channels);
});
});
it('JP2 quality', function (done) {
sharp(fixtures.inputJp2)
.resize(320, 240)
.jp2({ quality: 70 })
.toBuffer(function (err, buffer70) {
if (err) throw err;
sharp(fixtures.inputJp2)
.resize(320, 240)
.toBuffer(function (err, buffer80) {
if (err) throw err;
sharp(fixtures.inputJp2)
.resize(320, 240)
.jp2({ quality: 90 })
.toBuffer(function (err, buffer90) {
if (err) throw err;
assert(buffer70.length < buffer80.length);
assert(buffer80.length < buffer90.length);
done();
});
});
});
});
it('Without chroma subsampling generates larger file', function (done) {
// First generate with chroma subsampling (default)
sharp(fixtures.inputJp2)
.resize(320, 240)
.jp2({ chromaSubsampling: '4:2:0' })
.toBuffer(function (err, withChromaSubsamplingData, withChromaSubsamplingInfo) {
if (err) throw err;
assert.strictEqual(true, withChromaSubsamplingData.length > 0);
assert.strictEqual(withChromaSubsamplingData.length, withChromaSubsamplingInfo.size);
assert.strictEqual('jp2', withChromaSubsamplingInfo.format);
assert.strictEqual(320, withChromaSubsamplingInfo.width);
assert.strictEqual(240, withChromaSubsamplingInfo.height);
// Then generate without
sharp(fixtures.inputJp2)
.resize(320, 240)
.jp2({ chromaSubsampling: '4:4:4' })
.toBuffer(function (err, withoutChromaSubsamplingData, withoutChromaSubsamplingInfo) {
if (err) throw err;
assert.strictEqual(true, withoutChromaSubsamplingData.length > 0);
assert.strictEqual(withoutChromaSubsamplingData.length, withoutChromaSubsamplingInfo.size);
assert.strictEqual('jp2', withoutChromaSubsamplingInfo.format);
assert.strictEqual(320, withoutChromaSubsamplingInfo.width);
assert.strictEqual(240, withoutChromaSubsamplingInfo.height);
assert.strictEqual(true, withChromaSubsamplingData.length <= withoutChromaSubsamplingData.length);
done();
});
});
});
it('Invalid JP2 chromaSubsampling value throws error', function () {
assert.throws(function () {
sharp().jpeg({ chromaSubsampling: '4:2:2' });
});
});
}
});

View File

@@ -18,7 +18,9 @@ describe('Modulate', function () {
{ saturation: null },
{ hue: '50deg' },
{ hue: 1.5 },
{ hue: null }
{ hue: null },
{ lightness: '+50' },
{ lightness: null }
].forEach(function (options) {
it('should throw', function () {
assert.throws(function () {
@@ -108,6 +110,22 @@ describe('Modulate', function () {
assert.deepStrictEqual({ r: 127, g: 83, b: 81 }, { r, g, b });
});
it('should be able to lighten', async () => {
const [r, g, b] = await sharp({
create: {
width: 1,
height: 1,
channels: 3,
background: { r: 153, g: 68, b: 68 }
}
})
.modulate({ lightness: 10 })
.raw()
.toBuffer();
assert.deepStrictEqual({ r: 182, g: 93, b: 92 }, { r, g, b });
});
it('should be able to modulate all channels', async () => {
const [r, g, b] = await sharp({
create: {

View File

@@ -605,6 +605,40 @@ describe('Resize dimensions', function () {
});
});
it('Ensure embedded shortest edge (height) is at least 1 pixel', function () {
return sharp({
create: {
width: 200,
height: 1,
channels: 3,
background: 'red'
}
})
.resize({ width: 50, height: 50, fit: sharp.fit.contain })
.toBuffer({ resolveWithObject: true })
.then(function (output) {
assert.strictEqual(50, output.info.width);
assert.strictEqual(50, output.info.height);
});
});
it('Ensure embedded shortest edge (width) is at least 1 pixel', function () {
return sharp({
create: {
width: 1,
height: 200,
channels: 3,
background: 'red'
}
})
.resize({ width: 50, height: 50, fit: sharp.fit.contain })
.toBuffer({ resolveWithObject: true })
.then(function (output) {
assert.strictEqual(50, output.info.width);
assert.strictEqual(50, output.info.height);
});
});
it('Skip shrink-on-load where one dimension <4px', async () => {
const jpeg = await sharp({
create: {

View File

@@ -188,6 +188,26 @@ describe('TIFF', function () {
)
);
it('TIFF imputes xres and yres from withMetadataDensity if not explicitly provided', async () => {
const data = await sharp(fixtures.inputTiff)
.resize(8, 8)
.tiff()
.withMetadata({ density: 600 })
.toBuffer();
const { density } = await sharp(data).metadata();
assert.strictEqual(600, density);
});
it('TIFF uses xres and yres over withMetadataDensity if explicitly provided', async () => {
const data = await sharp(fixtures.inputTiff)
.resize(8, 8)
.tiff({ xres: 1000, yres: 1000 })
.withMetadata({ density: 600 })
.toBuffer();
const { density } = await sharp(data).metadata();
assert.strictEqual(25400, density);
});
it('TIFF invalid xres value should throw an error', function () {
assert.throws(function () {
sharp().tiff({ xres: '1000.0' });

26
test/unit/timeout.js Normal file
View File

@@ -0,0 +1,26 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Timeout', function () {
it('Will timeout after 1s when performing slow blur operation', () => assert.rejects(
() => sharp(fixtures.inputJpg)
.blur(100)
.timeout({ seconds: 1 })
.toBuffer(),
/timeout: [0-9]+% complete/
));
it('invalid object', () => assert.throws(
() => sharp().timeout('fail'),
/Expected object for options but received fail of type string/
));
it('invalid seconds', () => assert.throws(
() => sharp().timeout({ seconds: 'fail' }),
/Expected integer between 0 and 3600 for seconds but received fail of type string/
));
});

View File

@@ -209,4 +209,24 @@ describe('WebP', function () {
fixtures.assertSimilar(fixtures.inputWebPAnimated, data, done);
});
});
it('should remove animation properties when loading single page', async () => {
const data = await sharp(fixtures.inputGifAnimatedLoop3)
.resize({ height: 570 })
.webp({ reductionEffort: 0 })
.toBuffer();
const metadata = await sharp(data).metadata();
assert.deepStrictEqual(metadata, {
format: 'webp',
size: 2580,
width: 740,
height: 570,
space: 'srgb',
channels: 3,
depth: 'uchar',
isProgressive: false,
hasProfile: false,
hasAlpha: false
});
});
});