mirror of
https://github.com/lovell/sharp.git
synced 2026-02-04 13:46:19 +01:00
Compare commits
48 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
16ea04fe80 | ||
|
|
9c547dc321 | ||
|
|
5522060e9e | ||
|
|
d2f0fa855b | ||
|
|
2bb3ea8170 | ||
|
|
3434eef5b9 | ||
|
|
2f67823c3d | ||
|
|
38c760cdd7 | ||
|
|
14c3346800 | ||
|
|
0da55bab7e | ||
|
|
cfb659f576 | ||
|
|
cc5ac5385f | ||
|
|
93fafb0c18 | ||
|
|
41e3c8ca09 | ||
|
|
da61ea0199 | ||
|
|
7e6a70af44 | ||
|
|
f5845c7e61 | ||
|
|
eb1e53db83 | ||
|
|
3340120aea | ||
|
|
de0fc07092 | ||
|
|
dc4b39f73f | ||
|
|
e873978e53 | ||
|
|
5255964c79 | ||
|
|
dea319daf6 | ||
|
|
a2ca678854 | ||
|
|
e98993a6e2 | ||
|
|
90abd927c9 | ||
|
|
4d7957a043 | ||
|
|
bf9bb56367 | ||
|
|
8408e99aa3 | ||
|
|
a39f959dcc | ||
|
|
d08baa20e6 | ||
|
|
391018ad3d | ||
|
|
afed876f90 | ||
|
|
d6b60a60c6 | ||
|
|
5f8646d937 | ||
|
|
b763801d68 | ||
|
|
2e0f789c9b | ||
|
|
a8645f0f38 | ||
|
|
7b58ad9360 | ||
|
|
9ebbcc3701 | ||
|
|
e87204b92c | ||
|
|
a4c6eba7d4 | ||
|
|
b9c3851515 | ||
|
|
97cf69c26a | ||
|
|
d5be024bfd | ||
|
|
de01fc44e7 | ||
|
|
ca102ebd6c |
18
.github/SECURITY.md
vendored
Normal file
18
.github/SECURITY.md
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
The latest version of `sharp` as published to npm
|
||||
and reported by `npm view sharp dist-tags.latest`
|
||||
is supported with security updates.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please use
|
||||
[e-mail](https://github.com/lovell/sharp/blob/main/package.json#L5)
|
||||
to report a vulnerability.
|
||||
|
||||
You can expect a response within 48 hours
|
||||
if you are a human reporting a genuine issue.
|
||||
|
||||
Thank you in advance.
|
||||
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
@@ -66,6 +66,7 @@ jobs:
|
||||
if: contains(matrix.container, 'centos')
|
||||
run: |
|
||||
curl -sL https://rpm.nodesource.com/setup_${{ matrix.nodejs_version }}.x | bash -
|
||||
yum install -y https://packages.endpointdev.com/rhel/7/os/x86_64/endpoint-repo.x86_64.rpm
|
||||
yum install -y centos-release-scl
|
||||
yum install -y devtoolset-11-gcc-c++ make git python3 nodejs fontconfig google-noto-sans-fonts
|
||||
echo "/opt/rh/devtoolset-11/root/usr/bin" >> $GITHUB_PATH
|
||||
|
||||
@@ -98,8 +98,6 @@ readableStream
|
||||
A [guide for contributors](https://github.com/lovell/sharp/blob/main/.github/CONTRIBUTING.md)
|
||||
covers reporting bugs, requesting features and submitting code changes.
|
||||
|
||||
[](https://nodejs.org/dist/latest/docs/api/n-api.html#n_api_n_api_version_matrix)
|
||||
|
||||
## Licensing
|
||||
|
||||
Copyright 2013 Lovell Fuller and others.
|
||||
|
||||
@@ -70,7 +70,9 @@
|
||||
}, {
|
||||
'target_name': 'sharp-<(platform_and_arch)',
|
||||
'defines': [
|
||||
'NAPI_VERSION=7'
|
||||
'NAPI_VERSION=7',
|
||||
'NODE_ADDON_API_DISABLE_DEPRECATED',
|
||||
'NODE_API_SWALLOW_UNTHROWABLE_EXCEPTIONS'
|
||||
],
|
||||
'dependencies': [
|
||||
'<!(node -p "require(\'node-addon-api\').gyp")',
|
||||
|
||||
@@ -55,7 +55,8 @@ Alternative spelling of `greyscale`.
|
||||
Set the pipeline colourspace.
|
||||
|
||||
The input image will be converted to the provided colourspace at the start of the pipeline.
|
||||
All operations will use this colourspace before converting to the output colourspace, as defined by [toColourspace](#toColourspace).
|
||||
All operations will use this colourspace before converting to the output colourspace,
|
||||
as defined by [toColourspace](#tocolourspace).
|
||||
|
||||
This feature is experimental and has not yet been fully-tested with all operations.
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ Composite image(s) over the processed (resized, extracted etc.) image.
|
||||
The images to composite must be the same size or smaller than the processed image.
|
||||
If both `top` and `left` options are provided, they take precedence over `gravity`.
|
||||
|
||||
Any resize or rotate operations in the same processing pipeline
|
||||
Any resize, rotate or extract operations in the same processing pipeline
|
||||
will always be applied to the input image before composition.
|
||||
|
||||
The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
|
||||
|
||||
@@ -51,9 +51,9 @@ Implements the [stream.Duplex](http://nodejs.org/api/stream.html#stream_class_st
|
||||
| [options.text.text] | <code>string</code> | | text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`. |
|
||||
| [options.text.font] | <code>string</code> | | font name to render with. |
|
||||
| [options.text.fontfile] | <code>string</code> | | absolute filesystem path to a font file that can be used by `font`. |
|
||||
| [options.text.width] | <code>number</code> | <code>0</code> | integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries. |
|
||||
| [options.text.height] | <code>number</code> | <code>0</code> | integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0. |
|
||||
| [options.text.align] | <code>string</code> | <code>"'left'"</code> | text alignment (`'left'`, `'centre'`, `'center'`, `'right'`). |
|
||||
| [options.text.width] | <code>number</code> | <code>0</code> | Integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries. |
|
||||
| [options.text.height] | <code>number</code> | <code>0</code> | Maximum integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0. |
|
||||
| [options.text.align] | <code>string</code> | <code>"'left'"</code> | Alignment style for multi-line text (`'left'`, `'centre'`, `'center'`, `'right'`). |
|
||||
| [options.text.justify] | <code>boolean</code> | <code>false</code> | set this to true to apply justification to the text. |
|
||||
| [options.text.dpi] | <code>number</code> | <code>72</code> | the resolution (size) at which to render the text. Does not take effect if `height` is specified. |
|
||||
| [options.text.rgba] | <code>boolean</code> | <code>false</code> | set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`. |
|
||||
|
||||
@@ -57,9 +57,13 @@ const resizeThenRotate = await sharp(input)
|
||||
|
||||
|
||||
## flip
|
||||
Flip the image about the vertical Y axis. This always occurs before rotation, if any.
|
||||
Mirror the image vertically (up-down) about the x-axis.
|
||||
This always occurs before rotation, if any.
|
||||
|
||||
The use of `flip` implies the removal of the EXIF `Orientation` tag, if any.
|
||||
|
||||
This operation does not work correctly with multi-page images.
|
||||
|
||||
|
||||
|
||||
| Param | Type | Default |
|
||||
@@ -73,7 +77,9 @@ const output = await sharp(input).flip().toBuffer();
|
||||
|
||||
|
||||
## flop
|
||||
Flop the image about the horizontal X axis. This always occurs before rotation, if any.
|
||||
Mirror the image horizontally (left-right) about the y-axis.
|
||||
This always occurs before rotation, if any.
|
||||
|
||||
The use of `flop` implies the removal of the EXIF `Orientation` tag, if any.
|
||||
|
||||
|
||||
@@ -93,7 +99,7 @@ Perform an affine transform on an image. This operation will always occur after
|
||||
|
||||
You must provide an array of length 4 or a 2x2 affine transformation matrix.
|
||||
By default, new pixels are filled with a black background. You can provide a background color with the `background` option.
|
||||
A particular interpolator may also be specified. Set the `interpolator` option to an attribute of the `sharp.interpolator` Object e.g. `sharp.interpolator.nohalo`.
|
||||
A particular interpolator may also be specified. Set the `interpolator` option to an attribute of the `sharp.interpolators` Object e.g. `sharp.interpolators.nohalo`.
|
||||
|
||||
In the case of a 2x2 matrix, the transform is:
|
||||
- X = `matrix[0, 0]` \* (x + `idx`) + `matrix[0, 1]` \* (y + `idy`) + `odx`
|
||||
@@ -127,7 +133,7 @@ where:
|
||||
const pipeline = sharp()
|
||||
.affine([[1, 0.3], [0.1, 0.7]], {
|
||||
background: 'white',
|
||||
interpolate: sharp.interpolators.nohalo
|
||||
interpolator: sharp.interpolators.nohalo
|
||||
})
|
||||
.toBuffer((err, outputBuffer, info) => {
|
||||
// outputBuffer contains the transformed image
|
||||
@@ -265,6 +271,31 @@ await sharp(rgbaInput)
|
||||
```
|
||||
|
||||
|
||||
## unflatten
|
||||
Ensure the image has an alpha channel
|
||||
with all white pixel values made fully transparent.
|
||||
|
||||
Existing alpha channel values for non-white pixels remain unchanged.
|
||||
|
||||
This feature is experimental and the API may change.
|
||||
|
||||
|
||||
**Since**: 0.32.1
|
||||
**Example**
|
||||
```js
|
||||
await sharp(rgbInput)
|
||||
.unflatten()
|
||||
.toBuffer();
|
||||
```
|
||||
**Example**
|
||||
```js
|
||||
await sharp(rgbInput)
|
||||
.threshold(128, { grayscale: false }) // converter bright pixels to white
|
||||
.unflatten()
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
|
||||
## gamma
|
||||
Apply a gamma correction by reducing the encoding (darken) pre-resize at a factor of `1/gamma`
|
||||
then increasing the encoding (brighten) post-resize at a factor of `gamma`.
|
||||
|
||||
@@ -6,7 +6,7 @@ with JPEG, PNG, WebP, AVIF, TIFF, GIF, DZI, and libvips' V format supported.
|
||||
Note that raw pixel data is only supported for buffer output.
|
||||
|
||||
By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
See [withMetadata](#withMetadata) for control over this.
|
||||
See [withMetadata](#withmetadata) for control over this.
|
||||
|
||||
The caller is responsible for ensuring directory structures and permissions exist.
|
||||
|
||||
@@ -42,12 +42,12 @@ sharp(input)
|
||||
Write output to a Buffer.
|
||||
JPEG, PNG, WebP, AVIF, TIFF, GIF and raw pixel data output are supported.
|
||||
|
||||
Use [toFormat](#toFormat) or one of the format-specific functions such as [jpeg](#jpeg), [png](#png) etc. to set the output format.
|
||||
Use [toFormat](#toformat) or one of the format-specific functions such as [jpeg](#jpeg), [png](#png) etc. to set the output format.
|
||||
|
||||
If no explicit format is set, the output format will match the input image, except SVG input which becomes PNG output.
|
||||
|
||||
By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
See [withMetadata](#withMetadata) for control over this.
|
||||
See [withMetadata](#withmetadata) for control over this.
|
||||
|
||||
`callback`, if present, gets three arguments `(err, data, info)` where:
|
||||
- `err` is an error, if any.
|
||||
@@ -140,12 +140,18 @@ sharp('input.jpg')
|
||||
```
|
||||
**Example**
|
||||
```js
|
||||
// Set "IFD0-Copyright" in output EXIF metadata
|
||||
// Set output EXIF metadata
|
||||
const data = await sharp(input)
|
||||
.withMetadata({
|
||||
exif: {
|
||||
IFD0: {
|
||||
Copyright: 'Wernham Hogg'
|
||||
Copyright: 'The National Gallery'
|
||||
},
|
||||
IFD3: {
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLatitude: '51/1 30/1 3230/100',
|
||||
GPSLongitudeRef: 'W',
|
||||
GPSLongitude: '0/1 7/1 4366/100'
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -370,10 +376,53 @@ await sharp('in.gif', { animated: true })
|
||||
```
|
||||
|
||||
|
||||
## jp2
|
||||
Use these JP2 options for output image.
|
||||
|
||||
Requires libvips compiled with support for OpenJPEG.
|
||||
The prebuilt binaries do not include this - see
|
||||
[installing a custom libvips](https://sharp.pixelplumbing.com/install#custom-libvips).
|
||||
|
||||
|
||||
**Throws**:
|
||||
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
**Since**: 0.29.1
|
||||
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | output options |
|
||||
| [options.quality] | <code>number</code> | <code>80</code> | quality, integer 1-100 |
|
||||
| [options.lossless] | <code>boolean</code> | <code>false</code> | use lossless compression mode |
|
||||
| [options.tileWidth] | <code>number</code> | <code>512</code> | horizontal tile size |
|
||||
| [options.tileHeight] | <code>number</code> | <code>512</code> | vertical tile size |
|
||||
| [options.chromaSubsampling] | <code>string</code> | <code>"'4:4:4'"</code> | set to '4:2:0' to use chroma subsampling |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Convert any input to lossless JP2 output
|
||||
const data = await sharp(input)
|
||||
.jp2({ lossless: true })
|
||||
.toBuffer();
|
||||
```
|
||||
**Example**
|
||||
```js
|
||||
// Convert any input to very high quality JP2 output
|
||||
const data = await sharp(input)
|
||||
.jp2({
|
||||
quality: 100,
|
||||
chromaSubsampling: '4:4:4'
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
|
||||
## tiff
|
||||
Use these TIFF options for output image.
|
||||
|
||||
The `density` can be set in pixels/inch via [withMetadata](#withMetadata) instead of providing `xres` and `yres` in pixels/mm.
|
||||
The `density` can be set in pixels/inch via [withMetadata](#withmetadata)
|
||||
instead of providing `xres` and `yres` in pixels/mm.
|
||||
|
||||
|
||||
**Throws**:
|
||||
|
||||
@@ -29,8 +29,8 @@ const jsdoc2md = require('jsdoc-to-markdown');
|
||||
});
|
||||
|
||||
const cleanMarkdown = markdown
|
||||
.replace(/(## [A-Za-z]+)[^\n]*/g, '$1') // simplify headings to match those of documentationjs, ensures existing URLs work
|
||||
.replace(/<a name="[A-Za-z+]+"><\/a>/g, '') // remove anchors, let docute add these (at markdown to HTML render time)
|
||||
.replace(/(## [A-Za-z0-9]+)[^\n]*/g, '$1') // simplify headings to match those of documentationjs, ensures existing URLs work
|
||||
.replace(/<a name="[A-Za-z0-9+]+"><\/a>/g, '') // remove anchors, let docute add these (at markdown to HTML render time)
|
||||
.replace(/\*\*Kind\*\*: global[^\n]+/g, '') // remove all "global" Kind labels (requires JSDoc refactoring)
|
||||
.trim();
|
||||
|
||||
|
||||
@@ -4,6 +4,57 @@
|
||||
|
||||
Requires libvips v8.14.2
|
||||
|
||||
### v0.32.2 - 11th July 2023
|
||||
|
||||
* Limit HEIF output dimensions to 16384x16384, matches libvips.
|
||||
|
||||
* Ensure exceptions are not thrown when terminating.
|
||||
[#3569](https://github.com/lovell/sharp/issues/3569)
|
||||
|
||||
* Ensure the same access method is used for all inputs (regression in 0.32.0).
|
||||
[#3669](https://github.com/lovell/sharp/issues/3669)
|
||||
|
||||
* Improve detection of jp2 filename extensions.
|
||||
[#3674](https://github.com/lovell/sharp/pull/3674)
|
||||
[@bianjunjie1981](https://github.com/bianjunjie1981)
|
||||
|
||||
* Guard use of smartcrop premultiplied option to prevent warning (regression in 0.32.1).
|
||||
[#3710](https://github.com/lovell/sharp/issues/3710)
|
||||
|
||||
* Prevent over-compute in affine-based rotate before resize.
|
||||
[#3722](https://github.com/lovell/sharp/issues/3722)
|
||||
|
||||
* Allow sequential read for EXIF-based auto-orientation.
|
||||
[#3725](https://github.com/lovell/sharp/issues/3725)
|
||||
|
||||
### v0.32.1 - 27th April 2023
|
||||
|
||||
* Add experimental `unflatten` operation.
|
||||
[#3461](https://github.com/lovell/sharp/pull/3461)
|
||||
[@antonmarsden](https://github.com/antonmarsden)
|
||||
|
||||
* Ensure use of `flip` operation forces random access read (regression in 0.32.0).
|
||||
[#3600](https://github.com/lovell/sharp/issues/3600)
|
||||
|
||||
* Ensure `linear` operation works with 16-bit input (regression in 0.31.3).
|
||||
[#3605](https://github.com/lovell/sharp/issues/3605)
|
||||
|
||||
* Install: ensure proxy URLs are logged correctly.
|
||||
[#3615](https://github.com/lovell/sharp/pull/3615)
|
||||
[@TomWis97](https://github.com/TomWis97)
|
||||
|
||||
* Ensure profile-less CMYK to CMYK roundtrip skips colourspace conversion.
|
||||
[#3620](https://github.com/lovell/sharp/issues/3620)
|
||||
|
||||
* Add support for `modulate` operation when using non-sRGB pipeline colourspace.
|
||||
[#3620](https://github.com/lovell/sharp/issues/3620)
|
||||
|
||||
* Ensure `trim` operation works with CMYK images (regression in 0.31.0).
|
||||
[#3636](https://github.com/lovell/sharp/issues/3636)
|
||||
|
||||
* Install: coerce libc version to semver.
|
||||
[#3641](https://github.com/lovell/sharp/issues/3641)
|
||||
|
||||
### v0.32.0 - 24th March 2023
|
||||
|
||||
* Default to using sequential rather than random access read where possible.
|
||||
|
||||
@@ -272,3 +272,6 @@ GitHub: https://github.com/janaz
|
||||
|
||||
Name: Lachlan Newman
|
||||
GitHub: https://github.com/LachlanNewman
|
||||
|
||||
Name: BJJ
|
||||
GitHub: https://github.com/bianjunjie1981
|
||||
|
||||
@@ -98,7 +98,7 @@ Note: jimp does not support premultiply/unpremultiply.
|
||||
| jimp | buffer | buffer | 7.62 | 19.1 |
|
||||
| imagemagick | file | file | 7.96 | 19.9 |
|
||||
| sharp | file | file | 12.97 | 32.4 |
|
||||
| sharp | buffer | buffer | 13.12 | 45.0 |
|
||||
| sharp | buffer | buffer | 13.12 | 32.8 |
|
||||
|
||||
## Running the benchmark test
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -11,6 +11,7 @@ const zlib = require('zlib');
|
||||
const { createHash } = require('crypto');
|
||||
|
||||
const detectLibc = require('detect-libc');
|
||||
const semverCoerce = require('semver/functions/coerce');
|
||||
const semverLessThan = require('semver/functions/lt');
|
||||
const semverSatisfies = require('semver/functions/satisfies');
|
||||
const simpleGet = require('simple-get');
|
||||
@@ -77,7 +78,11 @@ const verifyIntegrity = function (platformAndArch) {
|
||||
flush: function (done) {
|
||||
const digest = `sha512-${hash.digest('base64')}`;
|
||||
if (expected !== digest) {
|
||||
libvips.removeVendoredLibvips();
|
||||
try {
|
||||
libvips.removeVendoredLibvips();
|
||||
} catch (err) {
|
||||
libvips.log(err.message);
|
||||
}
|
||||
libvips.log(`Integrity expected: ${expected}`);
|
||||
libvips.log(`Integrity received: ${digest}`);
|
||||
done(new Error(`Integrity check failed for ${platformAndArch}`));
|
||||
@@ -135,17 +140,19 @@ try {
|
||||
throw new Error(`BSD/SunOS systems require manual installation of libvips >= ${minimumLibvipsVersion}`);
|
||||
}
|
||||
// Linux libc version check
|
||||
const libcFamily = detectLibc.familySync();
|
||||
const libcVersion = detectLibc.versionSync();
|
||||
if (libcFamily === detectLibc.GLIBC && libcVersion && minimumGlibcVersionByArch[arch]) {
|
||||
const libcVersionWithoutPatch = libcVersion.split('.').slice(0, 2).join('.');
|
||||
if (semverLessThan(`${libcVersionWithoutPatch}.0`, `${minimumGlibcVersionByArch[arch]}.0`)) {
|
||||
handleError(new Error(`Use with glibc ${libcVersion} requires manual installation of libvips >= ${minimumLibvipsVersion}`));
|
||||
const libcVersionRaw = detectLibc.versionSync();
|
||||
if (libcVersionRaw) {
|
||||
const libcFamily = detectLibc.familySync();
|
||||
const libcVersion = semverCoerce(libcVersionRaw).version;
|
||||
if (libcFamily === detectLibc.GLIBC && minimumGlibcVersionByArch[arch]) {
|
||||
if (semverLessThan(libcVersion, semverCoerce(minimumGlibcVersionByArch[arch]).version)) {
|
||||
handleError(new Error(`Use with glibc ${libcVersionRaw} requires manual installation of libvips >= ${minimumLibvipsVersion}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (libcFamily === detectLibc.MUSL && libcVersion) {
|
||||
if (semverLessThan(libcVersion, '1.1.24')) {
|
||||
handleError(new Error(`Use with musl ${libcVersion} requires manual installation of libvips >= ${minimumLibvipsVersion}`));
|
||||
if (libcFamily === detectLibc.MUSL) {
|
||||
if (semverLessThan(libcVersion, '1.1.24')) {
|
||||
handleError(new Error(`Use with musl ${libcVersionRaw} requires manual installation of libvips >= ${minimumLibvipsVersion}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
// Node.js minimum version check
|
||||
|
||||
@@ -30,7 +30,7 @@ module.exports = function (log) {
|
||||
const proxyAuth = proxy.username && proxy.password
|
||||
? `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`
|
||||
: null;
|
||||
log(`Via proxy ${proxy.protocol}://${proxy.hostname}:${proxy.port} ${proxyAuth ? 'with' : 'no'} credentials`);
|
||||
log(`Via proxy ${proxy.protocol}//${proxy.hostname}:${proxy.port} ${proxyAuth ? 'with' : 'no'} credentials`);
|
||||
return tunnel({
|
||||
proxy: {
|
||||
port: Number(proxy.port),
|
||||
|
||||
@@ -70,7 +70,8 @@ function grayscale (grayscale) {
|
||||
* Set the pipeline colourspace.
|
||||
*
|
||||
* The input image will be converted to the provided colourspace at the start of the pipeline.
|
||||
* All operations will use this colourspace before converting to the output colourspace, as defined by {@link toColourspace}.
|
||||
* All operations will use this colourspace before converting to the output colourspace,
|
||||
* as defined by {@link #tocolourspace|toColourspace}.
|
||||
*
|
||||
* This feature is experimental and has not yet been fully-tested with all operations.
|
||||
*
|
||||
|
||||
@@ -46,7 +46,7 @@ const blend = {
|
||||
* The images to composite must be the same size or smaller than the processed image.
|
||||
* If both `top` and `left` options are provided, they take precedence over `gravity`.
|
||||
*
|
||||
* Any resize or rotate operations in the same processing pipeline
|
||||
* Any resize, rotate or extract operations in the same processing pipeline
|
||||
* will always be applied to the input image before composition.
|
||||
*
|
||||
* The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
|
||||
|
||||
@@ -154,9 +154,9 @@ const debuglog = util.debuglog('sharp');
|
||||
* @param {string} [options.text.text] - text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
|
||||
* @param {string} [options.text.font] - font name to render with.
|
||||
* @param {string} [options.text.fontfile] - absolute filesystem path to a font file that can be used by `font`.
|
||||
* @param {number} [options.text.width=0] - integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries.
|
||||
* @param {number} [options.text.height=0] - integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0.
|
||||
* @param {string} [options.text.align='left'] - text alignment (`'left'`, `'centre'`, `'center'`, `'right'`).
|
||||
* @param {number} [options.text.width=0] - Integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries.
|
||||
* @param {number} [options.text.height=0] - Maximum integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0.
|
||||
* @param {string} [options.text.align='left'] - Alignment style for multi-line text (`'left'`, `'centre'`, `'center'`, `'right'`).
|
||||
* @param {boolean} [options.text.justify=false] - set this to true to apply justification to the text.
|
||||
* @param {number} [options.text.dpi=72] - the resolution (size) at which to render the text. Does not take effect if `height` is specified.
|
||||
* @param {boolean} [options.text.rgba=false] - set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`.
|
||||
@@ -217,6 +217,7 @@ const Sharp = function (input, options) {
|
||||
tintB: 128,
|
||||
flatten: false,
|
||||
flattenBackground: [0, 0, 0],
|
||||
unflatten: false,
|
||||
negate: false,
|
||||
negateAlpha: true,
|
||||
medianSize: 0,
|
||||
|
||||
30
lib/index.d.ts
vendored
30
lib/index.d.ts
vendored
@@ -356,7 +356,7 @@ declare namespace sharp {
|
||||
* Perform an affine transform on an image. This operation will always occur after resizing, extraction and rotation, if any.
|
||||
* You must provide an array of length 4 or a 2x2 affine transformation matrix.
|
||||
* By default, new pixels are filled with a black background. You can provide a background color with the `background` option.
|
||||
* A particular interpolator may also be specified. Set the `interpolator` option to an attribute of the `sharp.interpolator` Object e.g. `sharp.interpolator.nohalo`.
|
||||
* A particular interpolator may also be specified. Set the `interpolator` option to an attribute of the `sharp.interpolators` Object e.g. `sharp.interpolators.nohalo`.
|
||||
*
|
||||
* In the case of a 2x2 matrix, the transform is:
|
||||
* X = matrix[0, 0] * (x + idx) + matrix[0, 1] * (y + idy) + odx
|
||||
@@ -427,6 +427,13 @@ declare namespace sharp {
|
||||
*/
|
||||
flatten(flatten?: boolean | FlattenOptions): Sharp;
|
||||
|
||||
/**
|
||||
* Ensure the image has an alpha channel with all white pixel values made fully transparent.
|
||||
* Existing alpha channel values for non-white pixels remain unchanged.
|
||||
* @returns A sharp instance that can be used to chain operations
|
||||
*/
|
||||
unflatten(): Sharp;
|
||||
|
||||
/**
|
||||
* Apply a gamma correction by reducing the encoding (darken) pre-resize at a factor of 1/gamma then increasing the encoding (brighten) post-resize at a factor of gamma.
|
||||
* This can improve the perceived brightness of a resized image in non-linear colour spaces.
|
||||
@@ -1343,9 +1350,9 @@ declare namespace sharp {
|
||||
grayscale?: boolean | undefined;
|
||||
}
|
||||
|
||||
interface OverlayOptions {
|
||||
interface OverlayOptions extends SharpOptions {
|
||||
/** Buffer containing image data, String containing the path to an image file, or Create object */
|
||||
input?: string | Buffer | { create: Create } | { text: CreateText } | undefined;
|
||||
input?: string | Buffer | { create: Create } | { text: CreateText } | { raw: CreateRaw } | undefined;
|
||||
/** how to blend this image with the image below. (optional, default `'over'`) */
|
||||
blend?: Blend | undefined;
|
||||
/** gravity at which to place the overlay. (optional, default 'centre') */
|
||||
@@ -1356,25 +1363,8 @@ declare namespace sharp {
|
||||
left?: number | undefined;
|
||||
/** set to true to repeat the overlay image across the entire image with the given gravity. (optional, default false) */
|
||||
tile?: boolean | undefined;
|
||||
/** number representing the DPI for vector overlay image. (optional, default 72) */
|
||||
density?: number | undefined;
|
||||
/** describes overlay when using raw pixel data. */
|
||||
raw?: Raw | undefined;
|
||||
/** Set to true to avoid premultipling the image below. Equivalent to the --premultiplied vips option. */
|
||||
premultiplied?: boolean | undefined;
|
||||
/** Set to true to read all frames/pages of an animated image. (optional, default false). */
|
||||
animated?: boolean | undefined;
|
||||
/**
|
||||
* When to abort processing of invalid pixel data, one of (in order of sensitivity):
|
||||
* 'none' (least), 'truncated', 'error' or 'warning' (most), highers level imply lower levels, invalid metadata will always abort. (optional, default 'warning')
|
||||
*/
|
||||
failOn?: FailOnOptions | undefined;
|
||||
/**
|
||||
* Do not process input images where the number of pixels (width x height) exceeds this limit.
|
||||
* Assumes image dimensions contained in the input metadata can be trusted.
|
||||
* An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF). (optional, default 268402689)
|
||||
*/
|
||||
limitInputPixels?: number | boolean | undefined;
|
||||
}
|
||||
|
||||
interface TileOptions {
|
||||
|
||||
@@ -88,8 +88,7 @@ const hasVendoredLibvips = function () {
|
||||
|
||||
/* istanbul ignore next */
|
||||
const removeVendoredLibvips = function () {
|
||||
const rm = fs.rmSync ? fs.rmSync : fs.rmdirSync;
|
||||
rm(vendorPath, { recursive: true, maxRetries: 3, force: true });
|
||||
fs.rmSync(vendorPath, { recursive: true, maxRetries: 3, force: true });
|
||||
};
|
||||
|
||||
/* istanbul ignore next */
|
||||
|
||||
@@ -80,9 +80,13 @@ function rotate (angle, options) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Flip the image about the vertical Y axis. This always occurs before rotation, if any.
|
||||
* Mirror the image vertically (up-down) about the x-axis.
|
||||
* This always occurs before rotation, if any.
|
||||
*
|
||||
* The use of `flip` implies the removal of the EXIF `Orientation` tag, if any.
|
||||
*
|
||||
* This operation does not work correctly with multi-page images.
|
||||
*
|
||||
* @example
|
||||
* const output = await sharp(input).flip().toBuffer();
|
||||
*
|
||||
@@ -95,7 +99,9 @@ function flip (flip) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Flop the image about the horizontal X axis. This always occurs before rotation, if any.
|
||||
* Mirror the image horizontally (left-right) about the y-axis.
|
||||
* This always occurs before rotation, if any.
|
||||
*
|
||||
* The use of `flop` implies the removal of the EXIF `Orientation` tag, if any.
|
||||
*
|
||||
* @example
|
||||
@@ -114,7 +120,7 @@ function flop (flop) {
|
||||
*
|
||||
* You must provide an array of length 4 or a 2x2 affine transformation matrix.
|
||||
* By default, new pixels are filled with a black background. You can provide a background color with the `background` option.
|
||||
* A particular interpolator may also be specified. Set the `interpolator` option to an attribute of the `sharp.interpolator` Object e.g. `sharp.interpolator.nohalo`.
|
||||
* A particular interpolator may also be specified. Set the `interpolator` option to an attribute of the `sharp.interpolators` Object e.g. `sharp.interpolators.nohalo`.
|
||||
*
|
||||
* In the case of a 2x2 matrix, the transform is:
|
||||
* - X = `matrix[0, 0]` \* (x + `idx`) + `matrix[0, 1]` \* (y + `idy`) + `odx`
|
||||
@@ -131,7 +137,7 @@ function flop (flop) {
|
||||
* const pipeline = sharp()
|
||||
* .affine([[1, 0.3], [0.1, 0.7]], {
|
||||
* background: 'white',
|
||||
* interpolate: sharp.interpolators.nohalo
|
||||
* interpolator: sharp.interpolators.nohalo
|
||||
* })
|
||||
* .toBuffer((err, outputBuffer, info) => {
|
||||
* // outputBuffer contains the transformed image
|
||||
@@ -405,6 +411,32 @@ function flatten (options) {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the image has an alpha channel
|
||||
* with all white pixel values made fully transparent.
|
||||
*
|
||||
* Existing alpha channel values for non-white pixels remain unchanged.
|
||||
*
|
||||
* This feature is experimental and the API may change.
|
||||
*
|
||||
* @since 0.32.1
|
||||
*
|
||||
* @example
|
||||
* await sharp(rgbInput)
|
||||
* .unflatten()
|
||||
* .toBuffer();
|
||||
*
|
||||
* @example
|
||||
* await sharp(rgbInput)
|
||||
* .threshold(128, { grayscale: false }) // converter bright pixels to white
|
||||
* .unflatten()
|
||||
* .toBuffer();
|
||||
*/
|
||||
function unflatten () {
|
||||
this.options.unflatten = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a gamma correction by reducing the encoding (darken) pre-resize at a factor of `1/gamma`
|
||||
* then increasing the encoding (brighten) post-resize at a factor of `gamma`.
|
||||
@@ -875,6 +907,7 @@ module.exports = function (Sharp) {
|
||||
median,
|
||||
blur,
|
||||
flatten,
|
||||
unflatten,
|
||||
gamma,
|
||||
negate,
|
||||
normalise,
|
||||
|
||||
@@ -29,7 +29,7 @@ const formats = new Map([
|
||||
['jxl', 'jxl']
|
||||
]);
|
||||
|
||||
const jp2Regex = /\.jp[2x]|j2[kc]$/i;
|
||||
const jp2Regex = /\.(jp[2x]|j2[kc])$/i;
|
||||
|
||||
const errJp2Save = () => new Error('JP2 output requires libvips with support for OpenJPEG');
|
||||
|
||||
@@ -43,7 +43,7 @@ const bitdepthFromColourCount = (colours) => 1 << 31 - Math.clz32(Math.ceil(Math
|
||||
* Note that raw pixel data is only supported for buffer output.
|
||||
*
|
||||
* By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
* See {@link withMetadata} for control over this.
|
||||
* See {@link #withmetadata|withMetadata} for control over this.
|
||||
*
|
||||
* The caller is responsible for ensuring directory structures and permissions exist.
|
||||
*
|
||||
@@ -75,7 +75,7 @@ function toFile (fileOut, callback) {
|
||||
err = new Error('Missing output file path');
|
||||
} else if (is.string(this.options.input.file) && path.resolve(this.options.input.file) === path.resolve(fileOut)) {
|
||||
err = new Error('Cannot use same file for input and output');
|
||||
} else if (jp2Regex.test(fileOut) && !this.constructor.format.jp2k.output.file) {
|
||||
} else if (jp2Regex.test(path.extname(fileOut)) && !this.constructor.format.jp2k.output.file) {
|
||||
err = errJp2Save();
|
||||
}
|
||||
if (err) {
|
||||
@@ -95,12 +95,12 @@ function toFile (fileOut, callback) {
|
||||
* Write output to a Buffer.
|
||||
* JPEG, PNG, WebP, AVIF, TIFF, GIF and raw pixel data output are supported.
|
||||
*
|
||||
* Use {@link toFormat} or one of the format-specific functions such as {@link jpeg}, {@link png} etc. to set the output format.
|
||||
* Use {@link #toformat|toFormat} or one of the format-specific functions such as {@link jpeg}, {@link png} etc. to set the output format.
|
||||
*
|
||||
* If no explicit format is set, the output format will match the input image, except SVG input which becomes PNG output.
|
||||
*
|
||||
* By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
* See {@link withMetadata} for control over this.
|
||||
* See {@link #withmetadata|withMetadata} for control over this.
|
||||
*
|
||||
* `callback`, if present, gets three arguments `(err, data, info)` where:
|
||||
* - `err` is an error, if any.
|
||||
@@ -177,12 +177,18 @@ function toBuffer (options, callback) {
|
||||
* .then(info => { ... });
|
||||
*
|
||||
* @example
|
||||
* // Set "IFD0-Copyright" in output EXIF metadata
|
||||
* // Set output EXIF metadata
|
||||
* const data = await sharp(input)
|
||||
* .withMetadata({
|
||||
* exif: {
|
||||
* IFD0: {
|
||||
* Copyright: 'Wernham Hogg'
|
||||
* Copyright: 'The National Gallery'
|
||||
* },
|
||||
* IFD3: {
|
||||
* GPSLatitudeRef: 'N',
|
||||
* GPSLatitude: '51/1 30/1 3230/100',
|
||||
* GPSLongitudeRef: 'W',
|
||||
* GPSLongitude: '0/1 7/1 4366/100'
|
||||
* }
|
||||
* }
|
||||
* })
|
||||
@@ -626,6 +632,7 @@ function gif (options) {
|
||||
return this._updateFormatOut('gif', options);
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
/**
|
||||
* Use these JP2 options for output image.
|
||||
*
|
||||
@@ -659,7 +666,6 @@ function gif (options) {
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid options
|
||||
*/
|
||||
/* istanbul ignore next */
|
||||
function jp2 (options) {
|
||||
if (!this.constructor.format.jp2k.output.buffer) {
|
||||
throw errJp2Save();
|
||||
@@ -740,7 +746,8 @@ function trySetAnimationOptions (source, target) {
|
||||
/**
|
||||
* Use these TIFF options for output image.
|
||||
*
|
||||
* The `density` can be set in pixels/inch via {@link withMetadata} instead of providing `xres` and `yres` in pixels/mm.
|
||||
* The `density` can be set in pixels/inch via {@link #withmetadata|withMetadata}
|
||||
* instead of providing `xres` and `yres` in pixels/mm.
|
||||
*
|
||||
* @example
|
||||
* // Convert SVG input to LZW-compressed, 1 bit per pixel TIFF output
|
||||
|
||||
14
package.json
14
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "sharp",
|
||||
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP, GIF, AVIF and TIFF images",
|
||||
"version": "0.32.0",
|
||||
"version": "0.32.2",
|
||||
"author": "Lovell Fuller <npm@lovell.info>",
|
||||
"homepage": "https://github.com/lovell/sharp",
|
||||
"contributors": [
|
||||
@@ -134,11 +134,11 @@
|
||||
"dependencies": {
|
||||
"color": "^4.2.3",
|
||||
"detect-libc": "^2.0.1",
|
||||
"node-addon-api": "^6.0.0",
|
||||
"node-addon-api": "^6.1.0",
|
||||
"prebuild-install": "^7.1.1",
|
||||
"semver": "^7.3.8",
|
||||
"semver": "^7.5.4",
|
||||
"simple-get": "^4.0.1",
|
||||
"tar-fs": "^2.1.1",
|
||||
"tar-fs": "^3.0.4",
|
||||
"tunnel-agent": "^0.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -147,15 +147,15 @@
|
||||
"cc": "^3.0.1",
|
||||
"exif-reader": "^1.2.0",
|
||||
"extract-zip": "^2.0.1",
|
||||
"icc": "^2.0.0",
|
||||
"icc": "^3.0.0",
|
||||
"jsdoc-to-markdown": "^8.0.0",
|
||||
"license-checker": "^25.0.1",
|
||||
"mocha": "^10.2.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"nyc": "^15.1.0",
|
||||
"prebuild": "^11.0.4",
|
||||
"prebuild": "lovell/prebuild#add-nodejs-20-drop-nodejs-10-and-12",
|
||||
"semistandard": "^16.0.1",
|
||||
"tsd": "^0.28.0"
|
||||
"tsd": "^0.28.1"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"config": {
|
||||
|
||||
@@ -65,16 +65,6 @@ namespace sharp {
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
Napi::Buffer<char> NewOrCopyBuffer(Napi::Env env, char* data, size_t len) {
|
||||
try {
|
||||
return Napi::Buffer<char>::New(env, data, len, FreeCallback);
|
||||
} catch (Napi::Error const &err) {
|
||||
static_cast<void>(err);
|
||||
}
|
||||
Napi::Buffer<char> buf = Napi::Buffer<char>::Copy(env, data, len);
|
||||
FreeCallback(nullptr, data);
|
||||
return buf;
|
||||
}
|
||||
|
||||
// Create an InputDescriptor instance from a Napi::Object describing an input image
|
||||
InputDescriptor* CreateInputDescriptor(Napi::Object input) {
|
||||
@@ -679,6 +669,10 @@ namespace sharp {
|
||||
if (image.width() > 65535 || height > 65535) {
|
||||
throw vips::VError("Processed image is too large for the GIF format");
|
||||
}
|
||||
} else if (imageType == ImageType::HEIF) {
|
||||
if (image.width() > 16384 || height > 16384) {
|
||||
throw vips::VError("Processed image is too large for the HEIF format");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -126,7 +126,6 @@ namespace sharp {
|
||||
return static_cast<T>(
|
||||
vips_enum_from_nick(nullptr, type, AttrAsStr(obj, attr).data()));
|
||||
}
|
||||
Napi::Buffer<char> NewOrCopyBuffer(Napi::Env env, char* data, size_t len);
|
||||
|
||||
// Create an InputDescriptor instance from a Napi::Object describing an input image
|
||||
InputDescriptor* CreateInputDescriptor(Napi::Object input);
|
||||
|
||||
@@ -230,20 +230,21 @@ class MetadataWorker : public Napi::AsyncWorker {
|
||||
info.Set("orientation", baton->orientation);
|
||||
}
|
||||
if (baton->exifLength > 0) {
|
||||
info.Set("exif", sharp::NewOrCopyBuffer(env, baton->exif, baton->exifLength));
|
||||
info.Set("exif", Napi::Buffer<char>::NewOrCopy(env, baton->exif, baton->exifLength, sharp::FreeCallback));
|
||||
}
|
||||
if (baton->iccLength > 0) {
|
||||
info.Set("icc", sharp::NewOrCopyBuffer(env, baton->icc, baton->iccLength));
|
||||
info.Set("icc", Napi::Buffer<char>::NewOrCopy(env, baton->icc, baton->iccLength, sharp::FreeCallback));
|
||||
}
|
||||
if (baton->iptcLength > 0) {
|
||||
info.Set("iptc", sharp::NewOrCopyBuffer(env, baton->iptc, baton->iptcLength));
|
||||
info.Set("iptc", Napi::Buffer<char>::NewOrCopy(env, baton->iptc, baton->iptcLength, sharp::FreeCallback));
|
||||
}
|
||||
if (baton->xmpLength > 0) {
|
||||
info.Set("xmp", sharp::NewOrCopyBuffer(env, baton->xmp, baton->xmpLength));
|
||||
info.Set("xmp", Napi::Buffer<char>::NewOrCopy(env, baton->xmp, baton->xmpLength, sharp::FreeCallback));
|
||||
}
|
||||
if (baton->tifftagPhotoshopLength > 0) {
|
||||
info.Set("tifftagPhotoshop",
|
||||
sharp::NewOrCopyBuffer(env, baton->tifftagPhotoshop, baton->tifftagPhotoshopLength));
|
||||
Napi::Buffer<char>::NewOrCopy(env, baton->tifftagPhotoshop,
|
||||
baton->tifftagPhotoshopLength, sharp::FreeCallback));
|
||||
}
|
||||
Callback().MakeCallback(Receiver().Value(), { env.Null(), info });
|
||||
} else {
|
||||
|
||||
@@ -186,6 +186,7 @@ namespace sharp {
|
||||
|
||||
VImage Modulate(VImage image, double const brightness, double const saturation,
|
||||
int const hue, double const lightness) {
|
||||
VipsInterpretation colourspaceBeforeModulate = image.interpretation();
|
||||
if (HasAlpha(image)) {
|
||||
// Separate alpha channel
|
||||
VImage alpha = image[image.bands() - 1];
|
||||
@@ -195,7 +196,7 @@ namespace sharp {
|
||||
{ brightness, saturation, 1},
|
||||
{ lightness, 0.0, static_cast<double>(hue) }
|
||||
)
|
||||
.colourspace(VIPS_INTERPRETATION_sRGB)
|
||||
.colourspace(colourspaceBeforeModulate)
|
||||
.bandjoin(alpha);
|
||||
} else {
|
||||
return image
|
||||
@@ -204,7 +205,7 @@ namespace sharp {
|
||||
{ brightness, saturation, 1 },
|
||||
{ lightness, 0.0, static_cast<double>(hue) }
|
||||
)
|
||||
.colourspace(VIPS_INTERPRETATION_sRGB);
|
||||
.colourspace(colourspaceBeforeModulate);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -268,30 +269,20 @@ namespace sharp {
|
||||
if (image.width() < 3 && image.height() < 3) {
|
||||
throw VError("Image to trim must be at least 3x3 pixels");
|
||||
}
|
||||
|
||||
// Scale up 8-bit values to match 16-bit input image
|
||||
double multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
|
||||
threshold *= multiplier;
|
||||
|
||||
std::vector<double> backgroundAlpha(1);
|
||||
if (background.size() == 0) {
|
||||
// Top-left pixel provides the default background colour if none is given
|
||||
background = image.extract_area(0, 0, 1, 1)(0, 0);
|
||||
multiplier = 1.0;
|
||||
} else if (sharp::Is16Bit(image.interpretation())) {
|
||||
for (size_t i = 0; i < background.size(); i++) {
|
||||
background[i] *= 256.0;
|
||||
}
|
||||
threshold *= 256.0;
|
||||
}
|
||||
if (HasAlpha(image) && background.size() == 4) {
|
||||
// Just discard the alpha because flattening the background colour with
|
||||
// itself (effectively what find_trim() does) gives the same result
|
||||
backgroundAlpha[0] = background[3] * multiplier;
|
||||
}
|
||||
if (image.bands() > 2) {
|
||||
background = {
|
||||
background[0] * multiplier,
|
||||
background[1] * multiplier,
|
||||
background[2] * multiplier
|
||||
};
|
||||
std::vector<double> backgroundAlpha({ background.back() });
|
||||
if (HasAlpha(image)) {
|
||||
background.pop_back();
|
||||
} else {
|
||||
background[0] = background[0] * multiplier;
|
||||
background.resize(image.bands());
|
||||
}
|
||||
int left, top, width, height;
|
||||
left = image.find_trim(&top, &width, &height, VImage::option()
|
||||
@@ -332,12 +323,26 @@ namespace sharp {
|
||||
if (a.size() > bands) {
|
||||
throw VError("Band expansion using linear is unsupported");
|
||||
}
|
||||
bool const uchar = !Is16Bit(image.interpretation());
|
||||
if (HasAlpha(image) && a.size() != bands && (a.size() == 1 || a.size() == bands - 1 || bands - 1 == 1)) {
|
||||
// Separate alpha channel
|
||||
VImage alpha = image[bands - 1];
|
||||
return RemoveAlpha(image).linear(a, b, VImage::option()->set("uchar", TRUE)).bandjoin(alpha);
|
||||
return RemoveAlpha(image).linear(a, b, VImage::option()->set("uchar", uchar)).bandjoin(alpha);
|
||||
} else {
|
||||
return image.linear(a, b, VImage::option()->set("uchar", TRUE));
|
||||
return image.linear(a, b, VImage::option()->set("uchar", uchar));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Unflatten
|
||||
*/
|
||||
VImage Unflatten(VImage image) {
|
||||
if (HasAlpha(image)) {
|
||||
VImage alpha = image[image.bands() - 1];
|
||||
VImage noAlpha = RemoveAlpha(image);
|
||||
return noAlpha.bandjoin(alpha & (noAlpha.colourspace(VIPS_INTERPRETATION_B_W) < 255));
|
||||
} else {
|
||||
return image.bandjoin(image.colourspace(VIPS_INTERPRETATION_B_W) < 255);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -86,6 +86,11 @@ namespace sharp {
|
||||
*/
|
||||
VImage Linear(VImage image, std::vector<double> const a, std::vector<double> const b);
|
||||
|
||||
/*
|
||||
* Unflatten
|
||||
*/
|
||||
VImage Unflatten(VImage image);
|
||||
|
||||
/*
|
||||
* Recomb with a Matrix of the given bands/channel size.
|
||||
* Eg. RGB will be a 3x3 matrix.
|
||||
|
||||
@@ -79,6 +79,9 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
// Rotate and flip image according to Exif orientation
|
||||
std::tie(autoRotation, autoFlip, autoFlop) = CalculateExifRotationAndFlip(sharp::ExifOrientation(image));
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
if (baton->input->access == VIPS_ACCESS_SEQUENTIAL && (autoRotation != VIPS_ANGLE_D0 || autoFlip)) {
|
||||
image = image.copy_memory();
|
||||
}
|
||||
} else {
|
||||
rotation = CalculateAngleRotation(baton->angle);
|
||||
}
|
||||
@@ -116,7 +119,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
MultiPageUnsupported(nPages, "Rotate");
|
||||
std::vector<double> background;
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground, FALSE);
|
||||
image = image.rotate(baton->rotationAngle, VImage::option()->set("background", background));
|
||||
image = image.rotate(baton->rotationAngle, VImage::option()->set("background", background)).copy_memory();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -322,7 +325,10 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
} catch(...) {
|
||||
sharp::VipsWarningCallback(nullptr, G_LOG_LEVEL_WARNING, "Invalid embedded profile", nullptr);
|
||||
}
|
||||
} else if (image.interpretation() == VIPS_INTERPRETATION_CMYK) {
|
||||
} else if (
|
||||
image.interpretation() == VIPS_INTERPRETATION_CMYK &&
|
||||
baton->colourspaceInput != VIPS_INTERPRETATION_CMYK
|
||||
) {
|
||||
image = image.icc_transform(processingProfile, VImage::option()
|
||||
->set("input_profile", "cmyk")
|
||||
->set("intent", VIPS_INTENT_PERCEPTUAL));
|
||||
@@ -377,11 +383,11 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
if (autoRotation != VIPS_ANGLE_D0) {
|
||||
image = image.rot(autoRotation);
|
||||
}
|
||||
// Flip (mirror about Y axis)
|
||||
// Mirror vertically (up-down) about the x-axis
|
||||
if (baton->flip || autoFlip) {
|
||||
image = image.flip(VIPS_DIRECTION_VERTICAL);
|
||||
}
|
||||
// Flop (mirror about X axis)
|
||||
// Mirror horizontally (left-right) about the y-axis
|
||||
if (baton->flop || autoFlop) {
|
||||
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
|
||||
}
|
||||
@@ -396,6 +402,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
sharp::ImageType joinImageType = sharp::ImageType::UNKNOWN;
|
||||
|
||||
for (unsigned int i = 0; i < baton->joinChannelIn.size(); i++) {
|
||||
baton->joinChannelIn[i]->access = baton->input->access;
|
||||
std::tie(joinImage, joinImageType) = sharp::OpenInput(baton->joinChannelIn[i]);
|
||||
joinImage = sharp::EnsureColourspace(joinImage, baton->colourspaceInput);
|
||||
image = image.bandjoin(joinImage);
|
||||
@@ -470,6 +477,9 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
|
||||
image = image.smartcrop(baton->width, baton->height, VImage::option()
|
||||
->set("interesting", baton->position == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION)
|
||||
#if (VIPS_MAJOR_VERSION >= 8 && VIPS_MINOR_VERSION >= 15)
|
||||
->set("premultiplied", shouldPremultiplyAlpha)
|
||||
#endif
|
||||
->set("attention_x", &attention_x)
|
||||
->set("attention_y", &attention_y));
|
||||
baton->hasCropOffset = true;
|
||||
@@ -550,7 +560,9 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
if (baton->medianSize > 0) {
|
||||
image = image.median(baton->medianSize);
|
||||
}
|
||||
|
||||
// Threshold - must happen before blurring, due to the utility of blurring after thresholding
|
||||
// Threshold - must happen before unflatten to enable non-white unflattening
|
||||
if (baton->threshold != 0) {
|
||||
image = sharp::Threshold(image, baton->threshold, baton->thresholdGrayscale);
|
||||
}
|
||||
@@ -560,6 +572,11 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
image = sharp::Blur(image, baton->blurSigma);
|
||||
}
|
||||
|
||||
// Unflatten the image
|
||||
if (baton->unflatten) {
|
||||
image = sharp::Unflatten(image);
|
||||
}
|
||||
|
||||
// Convolve
|
||||
if (shouldConv) {
|
||||
image = sharp::Convolve(image,
|
||||
@@ -597,6 +614,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
for (Composite *composite : baton->composite) {
|
||||
VImage compositeImage;
|
||||
sharp::ImageType compositeImageType = sharp::ImageType::UNKNOWN;
|
||||
composite->input->access = baton->input->access;
|
||||
std::tie(compositeImage, compositeImageType) = sharp::OpenInput(composite->input);
|
||||
compositeImage = sharp::EnsureColourspace(compositeImage, baton->colourspaceInput);
|
||||
// Verify within current dimensions
|
||||
@@ -695,6 +713,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
if (baton->boolean != nullptr) {
|
||||
VImage booleanImage;
|
||||
sharp::ImageType booleanImageType = sharp::ImageType::UNKNOWN;
|
||||
baton->boolean->access = baton->input->access;
|
||||
std::tie(booleanImage, booleanImageType) = sharp::OpenInput(baton->boolean);
|
||||
booleanImage = sharp::EnsureColourspace(booleanImage, baton->colourspaceInput);
|
||||
image = sharp::Boolean(image, booleanImage, baton->booleanOp);
|
||||
@@ -922,6 +941,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
} else if (baton->formatOut == "heif" ||
|
||||
(baton->formatOut == "input" && inputImageType == sharp::ImageType::HEIF)) {
|
||||
// Write HEIF to buffer
|
||||
sharp::AssertImageTypeDimensions(image, sharp::ImageType::HEIF);
|
||||
image = sharp::RemoveAnimationProperties(image).cast(VIPS_FORMAT_UCHAR);
|
||||
VipsArea *area = reinterpret_cast<VipsArea*>(image.heifsave_buffer(VImage::option()
|
||||
->set("strip", !baton->withMetadata)
|
||||
@@ -1111,6 +1131,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
} else if (baton->formatOut == "heif" || (mightMatchInput && isHeif) ||
|
||||
(willMatchInput && inputImageType == sharp::ImageType::HEIF)) {
|
||||
// Write HEIF to file
|
||||
sharp::AssertImageTypeDimensions(image, sharp::ImageType::HEIF);
|
||||
image = sharp::RemoveAnimationProperties(image).cast(VIPS_FORMAT_UCHAR);
|
||||
image.heifsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
->set("strip", !baton->withMetadata)
|
||||
@@ -1222,8 +1243,8 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
// Add buffer size to info
|
||||
info.Set("size", static_cast<uint32_t>(baton->bufferOutLength));
|
||||
// Pass ownership of output data to Buffer instance
|
||||
Napi::Buffer<char> data = sharp::NewOrCopyBuffer(env, static_cast<char*>(baton->bufferOut),
|
||||
baton->bufferOutLength);
|
||||
Napi::Buffer<char> data = Napi::Buffer<char>::NewOrCopy(env, static_cast<char*>(baton->bufferOut),
|
||||
baton->bufferOutLength, sharp::FreeCallback);
|
||||
Callback().MakeCallback(Receiver().Value(), { env.Null(), data, info });
|
||||
} else {
|
||||
// Add file size to info
|
||||
@@ -1460,6 +1481,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
// Operators
|
||||
baton->flatten = sharp::AttrAsBool(options, "flatten");
|
||||
baton->flattenBackground = sharp::AttrAsVectorOfDouble(options, "flattenBackground");
|
||||
baton->unflatten = sharp::AttrAsBool(options, "unflatten");
|
||||
baton->negate = sharp::AttrAsBool(options, "negate");
|
||||
baton->negateAlpha = sharp::AttrAsBool(options, "negateAlpha");
|
||||
baton->blurSigma = sharp::AttrAsDouble(options, "blurSigma");
|
||||
@@ -1661,7 +1683,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->angle != 0 ||
|
||||
baton->rotationAngle != 0.0 ||
|
||||
baton->tileAngle != 0 ||
|
||||
baton->useExifOrientation ||
|
||||
baton->flip ||
|
||||
baton->claheWidth != 0 ||
|
||||
!baton->affineMatrix.empty()
|
||||
) {
|
||||
|
||||
@@ -73,6 +73,7 @@ struct PipelineBaton {
|
||||
double tintB;
|
||||
bool flatten;
|
||||
std::vector<double> flattenBackground;
|
||||
bool unflatten;
|
||||
bool negate;
|
||||
bool negateAlpha;
|
||||
double blurSigma;
|
||||
@@ -239,6 +240,7 @@ struct PipelineBaton {
|
||||
tintB(128.0),
|
||||
flatten(false),
|
||||
flattenBackground{ 0.0, 0.0, 0.0 },
|
||||
unflatten(false),
|
||||
negate(false),
|
||||
negateAlpha(true),
|
||||
blurSigma(0.0),
|
||||
|
||||
@@ -14,8 +14,7 @@
|
||||
"benchmark": "2.1.4",
|
||||
"gm": "1.25.0",
|
||||
"imagemagick": "0.1.3",
|
||||
"jimp": "0.22.7",
|
||||
"semver": "7.3.8"
|
||||
"jimp": "0.22.7"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@tensorflow/tfjs-node": "4.2.0",
|
||||
|
||||
BIN
test/fixtures/expected/linear-16bit.png
vendored
Normal file
BIN
test/fixtures/expected/linear-16bit.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.1 KiB |
BIN
test/fixtures/expected/unflatten-flag-white-transparent.png
vendored
Normal file
BIN
test/fixtures/expected/unflatten-flag-white-transparent.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 12 KiB |
BIN
test/fixtures/expected/unflatten-swiss.png
vendored
Normal file
BIN
test/fixtures/expected/unflatten-swiss.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.4 KiB |
BIN
test/fixtures/expected/unflatten-white-transparent.png
vendored
Normal file
BIN
test/fixtures/expected/unflatten-white-transparent.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 225 KiB |
@@ -637,3 +637,17 @@ sharp('input.png').composite([
|
||||
sharp('input.png').tile({
|
||||
basename: 'output.dz.tiles',
|
||||
});
|
||||
|
||||
// https://github.com/lovell/sharp/issues/3669
|
||||
sharp(input).composite([
|
||||
{
|
||||
raw: {
|
||||
width: 1,
|
||||
height: 1,
|
||||
channels: 1,
|
||||
premultiplied: false,
|
||||
},
|
||||
sequentialRead: false,
|
||||
unlimited: true,
|
||||
}
|
||||
]);
|
||||
|
||||
@@ -21,7 +21,7 @@ describe('HTTP agent', function () {
|
||||
assert.strictEqual(123, proxy.options.proxy.port);
|
||||
assert.strictEqual('user:pass', proxy.options.proxy.proxyAuth);
|
||||
assert.strictEqual(443, proxy.defaultPort);
|
||||
assert.strictEqual(logMsg, 'Via proxy https:://secure:123 with credentials');
|
||||
assert.strictEqual(logMsg, 'Via proxy https://secure:123 with credentials');
|
||||
});
|
||||
|
||||
it('HTTPS proxy with auth from HTTPS_PROXY using credentials containing special characters', function () {
|
||||
@@ -34,7 +34,7 @@ describe('HTTP agent', function () {
|
||||
assert.strictEqual(789, proxy.options.proxy.port);
|
||||
assert.strictEqual('user,:pass=', proxy.options.proxy.proxyAuth);
|
||||
assert.strictEqual(443, proxy.defaultPort);
|
||||
assert.strictEqual(logMsg, 'Via proxy https:://secure:789 with credentials');
|
||||
assert.strictEqual(logMsg, 'Via proxy https://secure:789 with credentials');
|
||||
});
|
||||
|
||||
it('HTTP proxy without auth from npm_config_proxy', function () {
|
||||
@@ -47,6 +47,6 @@ describe('HTTP agent', function () {
|
||||
assert.strictEqual(456, proxy.options.proxy.port);
|
||||
assert.strictEqual(null, proxy.options.proxy.proxyAuth);
|
||||
assert.strictEqual(443, proxy.defaultPort);
|
||||
assert.strictEqual(logMsg, 'Via proxy http:://plaintext:456 no credentials');
|
||||
assert.strictEqual(logMsg, 'Via proxy http://plaintext:456 no credentials');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -130,4 +130,18 @@ describe('AVIF', () => {
|
||||
width: 32
|
||||
});
|
||||
});
|
||||
|
||||
it('Invalid width - too large', async () =>
|
||||
assert.rejects(
|
||||
() => sharp({ create: { width: 16385, height: 16, channels: 3, background: 'red' } }).avif().toBuffer(),
|
||||
/Processed image is too large for the HEIF format/
|
||||
)
|
||||
);
|
||||
|
||||
it('Invalid height - too large', async () =>
|
||||
assert.rejects(
|
||||
() => sharp({ create: { width: 16, height: 16385, channels: 3, background: 'red' } }).avif().toBuffer(),
|
||||
/Processed image is too large for the HEIF format/
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
@@ -93,6 +93,19 @@ describe('Colour space conversion', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Profile-less CMYK roundtrip', async () => {
|
||||
const [c, m, y, k] = await sharp(fixtures.inputJpgWithCmykNoProfile)
|
||||
.pipelineColourspace('cmyk')
|
||||
.toColourspace('cmyk')
|
||||
.raw()
|
||||
.toBuffer();
|
||||
|
||||
assert.deepStrictEqual(
|
||||
{ c, m, y, k },
|
||||
{ c: 55, m: 27, y: 0, k: 0 }
|
||||
);
|
||||
});
|
||||
|
||||
it('From sRGB with RGB16 pipeline, resize with gamma, to sRGB', function (done) {
|
||||
sharp(fixtures.inputPngGradients)
|
||||
.pipelineColourspace('rgb16')
|
||||
|
||||
@@ -25,6 +25,13 @@ describe('JP2 output', () => {
|
||||
/JP2 output requires libvips with support for OpenJPEG/
|
||||
)
|
||||
);
|
||||
|
||||
it('File with JP2-like suffix should not fail due to missing OpenJPEG', () => {
|
||||
const output = fixtures.path('output.failj2c');
|
||||
return assert.doesNotReject(
|
||||
async () => sharp(fixtures.inputPngWithOneColor).toFile(output)
|
||||
);
|
||||
});
|
||||
} else {
|
||||
it('JP2 Buffer to PNG Buffer', () => {
|
||||
sharp(fs.readFileSync(fixtures.inputJp2))
|
||||
|
||||
@@ -51,6 +51,16 @@ describe('Linear adjustment', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('applies linear levels adjustment to 16-bit w alpha ch', function (done) {
|
||||
sharp(fixtures.inputPngWithTransparency16bit)
|
||||
.linear(a, b)
|
||||
.png({ compressionLevel: 0 })
|
||||
.toBuffer(function (err, data) {
|
||||
if (err) throw err;
|
||||
fixtures.assertSimilar(fixtures.expected('linear-16bit.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('applies slope level adjustment w alpha ch', function (done) {
|
||||
sharp(fixtures.inputPngOverlayLayer1)
|
||||
.resize(240)
|
||||
|
||||
@@ -473,4 +473,20 @@ describe('Rotation', function () {
|
||||
assert.strictEqual(g, 64);
|
||||
assert.strictEqual(b, 30);
|
||||
});
|
||||
|
||||
it('Resize after affine-based rotation does not overcompute', async () =>
|
||||
sharp({
|
||||
create: {
|
||||
width: 4640,
|
||||
height: 2610,
|
||||
channels: 3,
|
||||
background: 'black'
|
||||
}
|
||||
})
|
||||
.rotate(28)
|
||||
.resize({ width: 640, height: 360 })
|
||||
.raw()
|
||||
.timeout({ seconds: 5 })
|
||||
.toBuffer()
|
||||
);
|
||||
});
|
||||
|
||||
@@ -8,7 +8,9 @@ const assert = require('assert');
|
||||
const sharp = require('../../');
|
||||
const fixtures = require('../fixtures');
|
||||
|
||||
describe('Text to image', () => {
|
||||
describe('Text to image', function () {
|
||||
this.retries(3);
|
||||
|
||||
it('text with default values', async () => {
|
||||
const output = fixtures.path('output.text-default.png');
|
||||
const text = sharp({
|
||||
|
||||
@@ -153,6 +153,32 @@ describe('Trim borders', function () {
|
||||
assert.strictEqual(trimOffsetLeft, -12);
|
||||
});
|
||||
|
||||
it('Ensure CMYK image can be trimmed', async () => {
|
||||
const cmyk = await sharp({
|
||||
create: {
|
||||
width: 16,
|
||||
height: 8,
|
||||
channels: 3,
|
||||
background: 'red'
|
||||
}
|
||||
})
|
||||
.extend({ left: 12, right: 24, background: 'blue' })
|
||||
.toColourspace('cmyk')
|
||||
.jpeg()
|
||||
.toBuffer();
|
||||
|
||||
const { info } = await sharp(cmyk)
|
||||
.trim()
|
||||
.raw()
|
||||
.toBuffer({ resolveWithObject: true });
|
||||
|
||||
const { width, height, trimOffsetTop, trimOffsetLeft } = info;
|
||||
assert.strictEqual(width, 16);
|
||||
assert.strictEqual(height, 8);
|
||||
assert.strictEqual(trimOffsetTop, 0);
|
||||
assert.strictEqual(trimOffsetLeft, -12);
|
||||
});
|
||||
|
||||
it('Ensure trim of image with all pixels same is no-op', async () => {
|
||||
const { info } = await sharp({
|
||||
create: {
|
||||
|
||||
31
test/unit/unflatten.js
Normal file
31
test/unit/unflatten.js
Normal file
@@ -0,0 +1,31 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const sharp = require('../../');
|
||||
const fixtures = require('../fixtures');
|
||||
|
||||
describe('Unflatten', function () {
|
||||
it('unflatten white background', function (done) {
|
||||
sharp(fixtures.inputPng).unflatten()
|
||||
.toBuffer(function (err, data) {
|
||||
if (err) throw err;
|
||||
fixtures.assertSimilar(fixtures.expected('unflatten-white-transparent.png'), data, { threshold: 0 }, done);
|
||||
});
|
||||
});
|
||||
it('unflatten transparent image', function (done) {
|
||||
sharp(fixtures.inputPngTrimSpecificColourIncludeAlpha).unflatten()
|
||||
.toBuffer(function (err, data) {
|
||||
if (err) throw err;
|
||||
fixtures.assertSimilar(fixtures.expected('unflatten-flag-white-transparent.png'), data, { threshold: 0 }, done);
|
||||
});
|
||||
});
|
||||
it('unflatten using threshold', function (done) {
|
||||
sharp(fixtures.inputPngPalette).unflatten().threshold(128, { grayscale: false })
|
||||
.toBuffer(function (err, data) {
|
||||
if (err) throw err;
|
||||
fixtures.assertSimilar(fixtures.expected('unflatten-swiss.png'), data, { threshold: 1 }, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user