Compare commits

..

68 Commits

Author SHA1 Message Date
Lovell Fuller
f7b29d7b59 CI: ensure prebuilds use Node API version 7 2022-09-05 10:14:40 +01:00
Lovell Fuller
0b806187fc Release v0.31.0 2022-09-05 09:55:27 +01:00
Lovell Fuller
c1393daa70 Expose unlimited option for HEIF input 2022-09-05 09:19:53 +01:00
Lovell Fuller
31c1cfb049 Docs: add note about GIF optimisation 2022-09-05 09:19:53 +01:00
Lovell Fuller
afc35c2208 Docs: update benchmark results for libvips v8.13.1 2022-09-05 09:19:53 +01:00
Lovell Fuller
6eb2add3bf Docs: refresh for 5cdb2b8 2022-09-04 13:17:02 +01:00
Lovell Fuller
5cdb2b83d5 Upgrade to libvips v8.13.1 2022-09-04 12:42:27 +01:00
Lovell Fuller
1eb66c0944 Tests: relax truncated PNG test assertion 2022-09-04 11:19:01 +01:00
Lovell Fuller
55c4d8807c Improve normalise op with use of histogram #200 2022-09-04 10:31:43 +01:00
Lovell Fuller
9a54a034e1 Tests: ensure truncated PNG test handles possible race 2022-09-04 10:28:24 +01:00
Lovell Fuller
f5109560d6 Standardise approach for string to enum conversion 2022-09-02 12:20:09 +01:00
Lovell Fuller
953a94885b Tests: run benchmarks in container via Docker 2022-09-01 14:58:39 +01:00
Lovell Fuller
0e3bd46ca3 Docs: clarify that metadata respects page/pages 2022-09-01 09:58:17 +01:00
Lovell Fuller
4b38f56d02 Docs: add avif and heif examples 2022-09-01 09:57:50 +01:00
Lovell Fuller
0fe857c5ac Docs: move serverless-esbuild to bundlers section 2022-08-24 17:42:56 +01:00
A. Sayef Reyadh
1bf06bd5b4 Docs: add info for serverless-esbuild users (#3235) 2022-08-24 17:37:58 +01:00
Lovell Fuller
6e3f4c3c92 Docs: changelog for #3332 2022-08-23 13:00:02 +01:00
Lovell Fuller
8583eb1235 Tests: update leak suppressions for latest versions 2022-08-23 12:32:39 +01:00
Mart
c3a852eecf Add trim option to provide a specific background colour (#3332)
Co-authored-by: Mart Jansink <mart@cinemait.nl>
2022-08-23 12:28:02 +01:00
Lovell Fuller
3a44748f49 Ensure PNG bitdepth can be set for non-palette output #3322 2022-08-22 14:57:12 +01:00
Lovell Fuller
e1bc8674fd Docs: clarify composite operation ordering 2022-08-21 19:53:04 +01:00
Lovell Fuller
a618ce7a15 Ensure image is unpremultiplied before composite #3334 2022-08-21 17:51:05 +01:00
Lovell Fuller
a44168c8c7 Docs: changelog and credit for #3303 2022-08-20 10:27:31 +01:00
Anton Marsden
74e3f73934 Expand linear operation to allow use of per-channel arrays #3303 2022-08-20 10:27:04 +01:00
Lovell Fuller
b9261c243c Bump devDeps, requires doc refresh 2022-08-20 09:23:24 +01:00
Lovell Fuller
cc9f91f37c Docs: note about macos fontconfig vs coretext 2022-08-18 16:41:33 +01:00
Lovell Fuller
212a6e7519 Ensure op ordering is respected where possible #3319
Emit warnings when previous ops might be ignored
Flip and flop now occur before rotate, if any
2022-08-18 16:41:33 +01:00
Kid
e547eaa180 Docs: fix broken link to Got stream documentation (#3323) 2022-08-07 14:25:36 +01:00
Lionel Tzatzkin
9a0d9eed74 Docs: correct syntax in sharpen example (#3310) 2022-07-29 12:11:16 +01:00
Lovell Fuller
dd56a9699e Docs: changelog and credit for #3252 2022-07-25 12:19:25 +01:00
brahima
ea7cf2a2ef Expose vips_text to create an image containing rendered text (#3252) 2022-07-25 11:32:10 +01:00
Lovell Fuller
76c4c51e2a Remove previously-deprecated reductionEffort and speed options 2022-07-24 11:18:16 +01:00
Lovell Fuller
b46ab510da Add Buffer and Stream support to tile output #2238 2022-07-24 11:06:41 +01:00
Lovell Fuller
3e327a586c Docs: add section about font discovery 2022-07-23 16:31:00 +01:00
Lovell Fuller
974fab946e Bump devDeps 2022-07-23 16:21:42 +01:00
Lovell Fuller
f998a8f249 Upgrade to libvips v8.13.0 2022-07-23 10:04:14 +01:00
Lovell Fuller
be331e958e CI: upgrade to macOS 11 2022-07-22 23:20:33 +01:00
Lovell Fuller
254944f8ab CI: install Noto font via package manager 2022-07-18 13:35:22 +01:00
Lovell Fuller
f1e640d231 Tests: catch potential failures during tile unzip 2022-07-14 11:13:59 +01:00
Lovell Fuller
c295f06a6f Ensure only props owned by EXIF Object are parsed #3292 2022-07-13 21:33:06 +01:00
Lovell Fuller
6288c7bced Expose reoptimise palette option for GIF output 2022-07-12 21:12:31 +01:00
Lovell Fuller
d247c02762 Add mixed and minSize animation options for WebP output 2022-07-12 14:51:03 +01:00
Lovell Fuller
1b84ccbbe9 Ensure fileSuffix supports suffix-less loaders 2022-07-11 20:56:19 +01:00
Lovell Fuller
e4160c684d Docs: add timeout example 2022-07-11 11:12:32 +01:00
Lovell Fuller
905518fab0 Add input fileSuffix and output alias to format #2642 2022-07-11 10:45:19 +01:00
Lovell Fuller
8ff33763ce Ensure OpenSlide+FITS input works with custom libvips #3226 2022-07-08 22:25:39 +01:00
Lovell Fuller
cbf741cac7 Ensure trim is no-op when it would reduce to nothing #3223 2022-07-08 21:06:58 +01:00
Lovell Fuller
6c2e2be41d CI: upgrade Linux arm64 environment to Node.js 14/18 2022-07-05 19:42:05 +01:00
Lovell Fuller
e0d3c6e05d Use bounding box of alpha+non-alpha for trim op #2166 2022-07-05 18:19:17 +01:00
Lovell Fuller
e3cab7f10f CI: fix path to gcc toolset 2022-06-29 12:40:04 +01:00
Lovell Fuller
204463ffbb CI: ensure gcc 11 toolset is available on the PATH 2022-06-29 12:26:16 +01:00
Lovell Fuller
1bcd3700c5 CI: Node.js 18 does not support CentOS 7, upgrade to Rocky 8 2022-06-29 12:18:41 +01:00
Lovell Fuller
c99a11cff5 CI: add Node.js 18, remove 12 2022-06-29 11:52:41 +01:00
Lovell Fuller
81c74f57e0 Re-introduce support for greyscale ICC profiles #3114 2022-06-29 11:41:36 +01:00
Lovell Fuller
7a8ab452c5 Add support for WebP and PackBits compression with TIFF output #3198 2022-06-29 11:35:58 +01:00
Lovell Fuller
bb91912883 Drop support for Node.js 12, now requires >= 14.15.0 2022-06-29 10:32:25 +01:00
Kleis Auke Wolthuizen
afc4c5bf79 Upgrade to libvips v8.13.0-rc1 (#3230)
* Switch from decompress-zip to extract-zip

The former seems to hang when unzipping a ZIP64 file that uses
the general purpose bit flag 3 as file entry.

See: https://github.com/thejoshwolfe/yauzl#no-streaming-unzip-api

* Prefer to call via static member instead

Makes it clearer that a static method is being called.

* `flatten-orange.jpg`: save without chroma subsampling

To ensure no down-scaling of the Cr/Cb channels.
2022-06-26 22:39:29 +01:00
Lovell Fuller
e40a881ab4 Release v0.30.7 2022-06-22 16:44:16 +01:00
Lovell Fuller
c1b13adac3 Bump deps 2022-06-22 11:53:22 +01:00
Lovell Fuller
29e09898f7 Docs: add examples of custom binary locations 2022-06-22 11:50:30 +01:00
Lovell Fuller
853a20358e Install: add help for possible worker thread problem #3268 2022-06-21 08:22:05 +01:00
Lovell Fuller
8bb30d7801 Docs: changelog and credit #3261 #3267 2022-06-21 07:35:28 +01:00
Blayne Chard
a333b87f5d Prevent upsampling via libwebp (#3267) 2022-06-20 10:49:53 +01:00
AlexanderTheGrey
4662527a17 Allow WebP encoding effort of 0 (#3261) 2022-06-17 08:22:51 +01:00
Lovell Fuller
b10d8f89ca Docs: add example of multi-arch within same install tree 2022-06-10 12:51:44 +01:00
Oleg Andreyev
f903e1465e Docs: clarify wording of resize background option 2022-06-08 12:56:29 +01:00
Lovell Fuller
a75718565c Ensure composite can tile with outside resize #3227 2022-06-08 12:39:00 +01:00
Fonger
4d82331bf6 docs(input): correct getNormalSize with EXIF orientation example (#3241) 2022-05-31 08:59:15 +01:00
79 changed files with 2245 additions and 660 deletions

View File

@@ -3,76 +3,76 @@ version: 2.1
workflows:
build:
jobs:
- linux-arm64-glibc-node-12:
- linux-arm64-glibc-node-14:
filters:
tags:
only: /^v.*/
- linux-arm64-musl-node-12:
- linux-arm64-musl-node-14:
filters:
tags:
only: /^v.*/
- linux-arm64-glibc-node-16:
- linux-arm64-glibc-node-18:
filters:
tags:
only: /^v.*/
- linux-arm64-musl-node-16:
- linux-arm64-musl-node-18:
filters:
tags:
only: /^v.*/
jobs:
linux-arm64-glibc-node-12:
linux-arm64-glibc-node-14:
resource_class: arm.medium
machine:
image: ubuntu-2004:202101-01
image: ubuntu-2004:current
steps:
- checkout
- run: |
sudo docker run -dit --name sharp --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp arm64v8/debian:bullseye
sudo docker exec sharp sh -c "apt-get update && apt-get install -y build-essential git python3 curl"
sudo docker exec sharp sh -c "apt-get update && apt-get install -y build-essential git python3 curl fonts-noto-core"
sudo docker exec sharp sh -c "curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add -"
sudo docker exec sharp sh -c "echo 'deb https://deb.nodesource.com/node_12.x sid main' >/etc/apt/sources.list.d/nodesource.list"
sudo docker exec sharp sh -c "echo 'deb https://deb.nodesource.com/node_14.x sid main' >/etc/apt/sources.list.d/nodesource.list"
sudo docker exec sharp sh -c "apt-get update && apt-get install -y nodejs"
- run: sudo docker exec sharp sh -c "npm install --build-from-source --unsafe-perm"
- run: sudo docker exec sharp sh -c "npm test"
- run: "[[ -n $CIRCLE_TAG ]] && sudo docker exec --env prebuild_upload sharp sh -c \"npx prebuild --runtime napi --target 5 --upload=$prebuild_upload\" || true"
linux-arm64-glibc-node-16:
- run: "[[ -n $CIRCLE_TAG ]] && sudo docker exec --env prebuild_upload sharp sh -c \"npx prebuild --runtime napi --target 7 --upload=$prebuild_upload\" || true"
linux-arm64-glibc-node-18:
resource_class: arm.medium
machine:
image: ubuntu-2004:202101-01
image: ubuntu-2004:current
steps:
- checkout
- run: |
sudo docker run -dit --name sharp --workdir /mnt/sharp arm64v8/debian:bullseye
sudo docker exec sharp sh -c "apt-get update && apt-get install -y build-essential git python3 curl"
sudo docker exec sharp sh -c "apt-get update && apt-get install -y build-essential git python3 curl fonts-noto-core"
sudo docker exec sharp sh -c "curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add -"
sudo docker exec sharp sh -c "echo 'deb https://deb.nodesource.com/node_16.x sid main' >/etc/apt/sources.list.d/nodesource.list"
sudo docker exec sharp sh -c "echo 'deb https://deb.nodesource.com/node_18.x sid main' >/etc/apt/sources.list.d/nodesource.list"
sudo docker exec sharp sh -c "apt-get update && apt-get install -y nodejs"
sudo docker exec sharp sh -c "mkdir -p /mnt/sharp"
sudo docker cp . sharp:/mnt/sharp/.
- run: sudo docker exec sharp sh -c "npm install --build-from-source"
- run: sudo docker exec sharp sh -c "npm test"
linux-arm64-musl-node-12:
linux-arm64-musl-node-14:
resource_class: arm.medium
machine:
image: ubuntu-2004:202101-01
image: ubuntu-2004:current
steps:
- checkout
- run: |
sudo docker run -dit --name sharp --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp node:12-alpine3.11
sudo docker exec sharp sh -c "apk add build-base git python3 --update-cache"
sudo docker run -dit --name sharp --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp node:14-alpine3.12
sudo docker exec sharp sh -c "apk add build-base git python3 font-noto --update-cache"
- run: sudo docker exec sharp sh -c "npm install --build-from-source --unsafe-perm"
- run: sudo docker exec sharp sh -c "npm test"
- run: "[[ -n $CIRCLE_TAG ]] && sudo docker exec --env prebuild_upload sharp sh -c \"npx prebuild --runtime napi --target 5 --upload=$prebuild_upload\" || true"
linux-arm64-musl-node-16:
- run: "[[ -n $CIRCLE_TAG ]] && sudo docker exec --env prebuild_upload sharp sh -c \"npx prebuild --runtime napi --target 7 --upload=$prebuild_upload\" || true"
linux-arm64-musl-node-18:
resource_class: arm.medium
machine:
image: ubuntu-2004:202101-01
image: ubuntu-2004:current
steps:
- checkout
- run: |
sudo docker run -dit --name sharp --workdir /mnt/sharp node:16-alpine3.11
sudo docker exec sharp sh -c "apk add build-base git python3 --update-cache"
sudo docker run -dit --name sharp --workdir /mnt/sharp node:18-alpine3.14
sudo docker exec sharp sh -c "apk add build-base git python3 font-noto --update-cache"
sudo docker exec sharp sh -c "mkdir -p /mnt/sharp"
sudo docker cp . sharp:/mnt/sharp/.
- run: sudo docker exec sharp sh -c "npm install --build-from-source"

View File

@@ -9,9 +9,9 @@ jobs:
fail-fast: false
matrix:
include:
- nodejs_version: 12
- nodejs_version: 14
nodejs_architecture: x64
- nodejs_version: 16
- nodejs_version: 18
nodejs_architecture: arm64
prebuild: true
defaults:
@@ -33,4 +33,4 @@ jobs:
if: matrix.prebuild && startsWith(github.ref, 'refs/tags/')
env:
prebuild_upload: ${{ secrets.GITHUB_TOKEN }}
run: npx prebuild --runtime napi --target 5
run: npx prebuild --runtime napi --target 7

View File

@@ -12,65 +12,69 @@ jobs:
include:
- os: ubuntu-20.04
container: centos:7
nodejs_version: 12
nodejs_version: 14
coverage: true
prebuild: true
- os: ubuntu-20.04
container: centos:7
nodejs_version: 14
- os: ubuntu-20.04
container: centos:7
nodejs_version: 16
- os: ubuntu-20.04
container: node:12-alpine3.11
container: rockylinux:8
nodejs_version: 18
- os: ubuntu-20.04
container: node:14-alpine3.12
prebuild: true
- os: ubuntu-20.04
container: node:14-alpine3.11
container: node:16-alpine3.12
- os: ubuntu-20.04
container: node:14-alpine3.13
- os: ubuntu-20.04
container: node:16-alpine3.11
- os: macos-10.15
nodejs_version: 12
prebuild: true
nodejs_arch: x64
- os: macos-10.15
container: node:18-alpine3.14
- os: macos-11
nodejs_version: 14
prebuild: true
nodejs_arch: x64
- os: macos-10.15
- os: macos-11
nodejs_version: 16
nodejs_arch: x64
- os: windows-2019
nodejs_version: 12
nodejs_arch: x86
prebuild: true
- os: macos-11
nodejs_version: 18
nodejs_arch: x64
- os: windows-2019
nodejs_version: 14
nodejs_arch: x86
prebuild: true
- os: windows-2019
nodejs_version: 16
nodejs_arch: x86
- os: windows-2019
nodejs_version: 12
nodejs_arch: x64
prebuild: true
nodejs_version: 18
nodejs_arch: x86
- os: windows-2019
nodejs_version: 14
nodejs_arch: x64
prebuild: true
- os: windows-2019
nodejs_version: 16
nodejs_arch: x64
- os: windows-2019
nodejs_version: 18
nodejs_arch: x64
steps:
- name: Dependencies (Linux glibc)
if: contains(matrix.container, 'centos')
run: |
curl -sL https://rpm.nodesource.com/setup_${{ matrix.nodejs_version }}.x | bash -
yum install -y centos-release-scl
yum install -y devtoolset-10-gcc-c++ make git python3 nodejs
echo "/opt/rh/devtoolset-10/root/usr/bin" >> $GITHUB_PATH
yum install -y devtoolset-11-gcc-c++ make git python3 nodejs fontconfig google-noto-sans-fonts
echo "/opt/rh/devtoolset-11/root/usr/bin" >> $GITHUB_PATH
- name: Dependencies (Rocky Linux glibc)
if: contains(matrix.container, 'rockylinux')
run: |
curl -sL https://rpm.nodesource.com/setup_${{ matrix.nodejs_version }}.x | bash -
dnf install -y gcc-toolset-11-gcc-c++ make git python3 nodejs fontconfig google-noto-sans-fonts
echo "/opt/rh/gcc-toolset-11/root/usr/bin" >> $GITHUB_PATH
- name: Dependencies (Linux musl)
if: contains(matrix.container, 'alpine')
run: apk add build-base git python3 --update-cache
run: apk add build-base git python3 font-noto --update-cache
- name: Dependencies (macOS, Windows)
if: contains(matrix.os, 'macos') || contains(matrix.os, 'windows')
uses: actions/setup-node@v3
@@ -95,4 +99,4 @@ jobs:
if: matrix.prebuild && startsWith(github.ref, 'refs/tags/')
env:
prebuild_upload: ${{ secrets.GITHUB_TOKEN }}
run: npx prebuild --runtime napi --target 5
run: npx prebuild --runtime napi --target 7

View File

@@ -16,7 +16,7 @@ Lanczos resampling ensures quality is not sacrificed for speed.
As well as image resizing, operations such as
rotation, extraction, compositing and gamma correction are available.
Most modern macOS, Windows and Linux systems running Node.js >= 12.13.0
Most modern macOS, Windows and Linux systems running Node.js >= 14.15.0
do not require any additional install or runtime dependencies.
## Documentation

View File

@@ -15,10 +15,11 @@
'_ALLOW_KEYWORD_MACROS'
],
'sources': [
'src/libvips/cplusplus/VError.cpp',
'src/libvips/cplusplus/VConnection.cpp',
'src/libvips/cplusplus/VError.cpp',
'src/libvips/cplusplus/VImage.cpp',
'src/libvips/cplusplus/VInterpolate.cpp',
'src/libvips/cplusplus/VImage.cpp'
'src/libvips/cplusplus/VRegion.cpp'
],
'include_dirs': [
'<(sharp_vendor_dir)/include',
@@ -69,7 +70,7 @@
}, {
'target_name': 'sharp-<(platform_and_arch)',
'defines': [
'NAPI_VERSION=5'
'NAPI_VERSION=7'
],
'dependencies': [
'<!(node -p "require(\'node-addon-api\').gyp")',

View File

@@ -16,7 +16,7 @@ Lanczos resampling ensures quality is not sacrificed for speed.
As well as image resizing, operations such as
rotation, extraction, compositing and gamma correction are available.
Most modern macOS, Windows and Linux systems running Node.js >= 12.13.0
Most modern macOS, Windows and Linux systems running Node.js >= 14.15.0
do not require any additional install or runtime dependencies.
### Formats
@@ -63,6 +63,10 @@ PNG filtering is disabled by default,
which for diagrams and line art often produces the same result
as [pngcrush](https://pmt.sourceforge.io/pngcrush/).
The file size of animated GIF output is optimised
without having to use separate command line tools such as
[gifsicle](https://www.lcdf.org/gifsicle/).
### Contributing
A [guide for contributors](https://github.com/lovell/sharp/blob/main/.github/CONTRIBUTING.md)

View File

@@ -16,7 +16,7 @@ sharp('rgba.png')
});
```
Returns **Sharp**
Returns **Sharp**&#x20;
## ensureAlpha
@@ -47,7 +47,7 @@ const rgba = await sharp(rgb)
* Throws **[Error][3]** Invalid alpha transparency level
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**
@@ -80,7 +80,7 @@ const [red1, red2, ...] = await sharp(input)
* Throws **[Error][3]** Invalid channel
Returns **Sharp**
Returns **Sharp**&#x20;
## joinChannel
@@ -104,7 +104,7 @@ For raw pixel input, the `options` object should contain a `raw` attribute, whic
* Throws **[Error][3]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## bandbool
@@ -128,7 +128,7 @@ sharp('3-channel-rgb-input.png')
* Throws **[Error][3]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
[1]: /api-operation#flatten

View File

@@ -19,7 +19,7 @@ const output = await sharp(input)
* Throws **[Error][4]** Invalid parameter
Returns **Sharp**
Returns **Sharp**&#x20;
## greyscale
@@ -40,7 +40,7 @@ An alpha channel may be present, and will be unchanged by the operation.
const output = await sharp(input).greyscale().toBuffer();
```
Returns **Sharp**
Returns **Sharp**&#x20;
## grayscale
@@ -50,7 +50,7 @@ Alternative spelling of `greyscale`.
* `grayscale` **[Boolean][5]** (optional, default `true`)
Returns **Sharp**
Returns **Sharp**&#x20;
## pipelineColourspace
@@ -77,7 +77,7 @@ await sharp(input)
* Throws **[Error][4]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**
@@ -95,7 +95,7 @@ Alternative spelling of `pipelineColourspace`.
* Throws **[Error][4]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## toColourspace
@@ -117,7 +117,7 @@ await sharp(input)
* Throws **[Error][4]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## toColorspace
@@ -131,7 +131,7 @@ Alternative spelling of `toColourspace`.
* Throws **[Error][4]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
[1]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String

View File

@@ -7,6 +7,9 @@ Composite image(s) over the processed (resized, extracted etc.) image.
The images to composite must be the same size or smaller than the processed image.
If both `top` and `left` options are provided, they take precedence over `gravity`.
Any resize or rotate operations in the same processing pipeline
will always be applied to the input image before composition.
The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
`dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
`xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
@@ -25,10 +28,22 @@ and [https://www.cairographics.org/operators/][2]
* `images[].input.create` **[Object][4]?** describes a blank overlay to be created.
* `images[].input.create.width` **[Number][7]?**
* `images[].input.create.height` **[Number][7]?**
* `images[].input.create.width` **[Number][7]?**&#x20;
* `images[].input.create.height` **[Number][7]?**&#x20;
* `images[].input.create.channels` **[Number][7]?** 3-4
* `images[].input.create.background` **([String][6] | [Object][4])?** parsed by the [color][8] module to extract values for red, green, blue and alpha.
* `images[].input.text` **[Object][4]?** describes a new text image to be created.
* `images[].input.text.text` **[string][6]?** text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
* `images[].input.text.font` **[string][6]?** font name to render with.
* `images[].input.text.fontfile` **[string][6]?** absolute filesystem path to a font file that can be used by `font`.
* `images[].input.text.width` **[number][7]** integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries. (optional, default `0`)
* `images[].input.text.height` **[number][7]** integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0. (optional, default `0`)
* `images[].input.text.align` **[string][6]** text alignment (`'left'`, `'centre'`, `'center'`, `'right'`). (optional, default `'left'`)
* `images[].input.text.justify` **[boolean][9]** set this to true to apply justification to the text. (optional, default `false`)
* `images[].input.text.dpi` **[number][7]** the resolution (size) at which to render the text. Does not take effect if `height` is specified. (optional, default `72`)
* `images[].input.text.rgba` **[boolean][9]** set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`. (optional, default `false`)
* `images[].input.text.spacing` **[number][7]** text line height in points. Will use the font line height if none is specified. (optional, default `0`)
* `images[].blend` **[String][6]** how to blend this image with the image below. (optional, default `'over'`)
* `images[].gravity` **[String][6]** gravity at which to place the overlay. (optional, default `'centre'`)
* `images[].top` **[Number][7]?** the pixel offset from the top edge.
@@ -38,9 +53,9 @@ and [https://www.cairographics.org/operators/][2]
* `images[].density` **[Number][7]** number representing the DPI for vector overlay image. (optional, default `72`)
* `images[].raw` **[Object][4]?** describes overlay when using raw pixel data.
* `images[].raw.width` **[Number][7]?**
* `images[].raw.height` **[Number][7]?**
* `images[].raw.channels` **[Number][7]?**
* `images[].raw.width` **[Number][7]?**&#x20;
* `images[].raw.height` **[Number][7]?**&#x20;
* `images[].raw.channels` **[Number][7]?**&#x20;
* `images[].animated` **[boolean][9]** Set to `true` to read all frames/pages of an animated image. (optional, default `false`)
* `images[].failOn` **[string][6]** @see [constructor parameters][10] (optional, default `'warning'`)
* `images[].limitInputPixels` **([number][7] | [boolean][9])** @see [constructor parameters][10] (optional, default `268402689`)
@@ -83,7 +98,7 @@ sharp('input.png')
* Throws **[Error][11]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**

View File

@@ -24,7 +24,7 @@ Implements the [stream.Duplex][1] class.
* `options.limitInputPixels` **([number][14] | [boolean][15])** Do not process input images where the number of pixels
(width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted.
An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF). (optional, default `268402689`)
* `options.unlimited` **[boolean][15]** Set this to `true` to remove safety features that help prevent memory exhaustion (SVG, PNG). (optional, default `false`)
* `options.unlimited` **[boolean][15]** Set this to `true` to remove safety features that help prevent memory exhaustion (JPEG, PNG, SVG, HEIF). (optional, default `false`)
* `options.sequentialRead` **[boolean][15]** Set this to `true` to use sequential rather than random access where possible.
This can reduce memory usage and might improve performance on some systems. (optional, default `false`)
* `options.density` **[number][14]** number representing the DPI for vector images in the range 1 to 100000. (optional, default `72`)
@@ -51,6 +51,18 @@ Implements the [stream.Duplex][1] class.
* `options.create.noise.type` **[string][12]?** type of generated noise, currently only `gaussian` is supported.
* `options.create.noise.mean` **[number][14]?** mean of pixels in generated noise.
* `options.create.noise.sigma` **[number][14]?** standard deviation of pixels in generated noise.
* `options.text` **[Object][13]?** describes a new text image to be created.
* `options.text.text` **[string][12]?** text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
* `options.text.font` **[string][12]?** font name to render with.
* `options.text.fontfile` **[string][12]?** absolute filesystem path to a font file that can be used by `font`.
* `options.text.width` **[number][14]** integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries. (optional, default `0`)
* `options.text.height` **[number][14]** integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0. (optional, default `0`)
* `options.text.align` **[string][12]** text alignment (`'left'`, `'centre'`, `'center'`, `'right'`). (optional, default `'left'`)
* `options.text.justify` **[boolean][15]** set this to true to apply justification to the text. (optional, default `false`)
* `options.text.dpi` **[number][14]** the resolution (size) at which to render the text. Does not take effect if `height` is specified. (optional, default `72`)
* `options.text.rgba` **[boolean][15]** set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`. (optional, default `false`)
* `options.text.spacing` **[number][14]** text line height in points. Will use the font line height if none is specified. (optional, default `0`)
### Examples
@@ -127,9 +139,32 @@ await sharp({
}).toFile('noise.png');
```
```javascript
// Generate an image from text
await sharp({
text: {
text: 'Hello, world!',
width: 400, // max width
height: 300 // max height
}
}).toFile('text_bw.png');
```
```javascript
// Generate an rgba image from text using pango markup and font
await sharp({
text: {
text: '<span foreground="red">Red!</span><span background="cyan">blue</span>',
font: 'sans',
rgba: true,
dpi: 300
}
}).toFile('text_rgba.png');
```
* Throws **[Error][17]** Invalid parameters
Returns **[Sharp][18]**
Returns **[Sharp][18]**&#x20;
## clone
@@ -180,7 +215,7 @@ promises.push(
.toFile("optimized-500.webp")
);
// https://github.com/sindresorhus/got#gotstreamurl-options
// https://github.com/sindresorhus/got/blob/main/documentation/3-streams.md
got.stream("https://www.example.com/some-file.jpg").pipe(sharpStream);
Promise.all(promises)
@@ -195,7 +230,7 @@ Promise.all(promises)
});
```
Returns **[Sharp][18]**
Returns **[Sharp][18]**&#x20;
[1]: http://nodejs.org/api/stream.html#stream_class_stream_duplex

View File

@@ -7,15 +7,18 @@ Fast access to (uncached) image metadata without decoding any compressed pixel d
This is taken from the header of the input image.
It does not include operations, such as resize, to be applied to the output image.
Dimensions in the response will respect the `page` and `pages` properties of the
[constructor parameters][1].
A `Promise` is returned when `callback` is not provided.
* `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
* `size`: Total size of image in bytes, for Stream and Buffer input only
* `width`: Number of pixels wide (EXIF orientation is not taken into consideration, see example below)
* `height`: Number of pixels high (EXIF orientation is not taken into consideration, see example below)
* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...][1]
* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...][2]
* `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
* `depth`: Name of pixel depth format e.g. `uchar`, `char`, `ushort`, `float` [...][2]
* `depth`: Name of pixel depth format e.g. `uchar`, `char`, `ushort`, `float` [...][3]
* `density`: Number of pixels per inch (DPI), if present
* `chromaSubsampling`: String containing JPEG chroma subsampling, `4:2:0` or `4:4:4` for RGB, `4:2:0:4` or `4:4:4:4` for CMYK
* `isProgressive`: Boolean indicating whether the image is interlaced using a progressive scan
@@ -33,14 +36,14 @@ A `Promise` is returned when `callback` is not provided.
* `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
* `orientation`: Number value of the EXIF Orientation header, if present
* `exif`: Buffer containing raw EXIF data, if present
* `icc`: Buffer containing raw [ICC][3] profile data, if present
* `icc`: Buffer containing raw [ICC][4] profile data, if present
* `iptc`: Buffer containing raw IPTC data, if present
* `xmp`: Buffer containing raw XMP data, if present
* `tifftagPhotoshop`: Buffer containing raw TIFFTAG_PHOTOSHOP data, if present
* `tifftagPhotoshop`: Buffer containing raw TIFFTAG\_PHOTOSHOP data, if present
### Parameters
* `callback` **[Function][4]?** called with the arguments `(err, metadata)`
* `callback` **[Function][5]?** called with the arguments `(err, metadata)`
### Examples
@@ -69,13 +72,13 @@ image
const size = getNormalSize(await sharp(input).metadata());
function getNormalSize({ width, height, orientation }) {
return orientation || 0 >= 5
return (orientation || 0) >= 5
? { width: height, height: width }
: { width, height };
}
```
Returns **([Promise][5]<[Object][6]> | Sharp)**
Returns **([Promise][6]<[Object][7]> | Sharp)**&#x20;
## stats
@@ -103,7 +106,7 @@ written to a buffer in order to run `stats` on the result (see third example).
### Parameters
* `callback` **[Function][4]?** called with the arguments `(err, stats)`
* `callback` **[Function][5]?** called with the arguments `(err, stats)`
### Examples
@@ -129,16 +132,18 @@ const part = await image.extract(region).toBuffer();
const stats = await sharp(part).stats();
```
Returns **[Promise][5]<[Object][6]>**
Returns **[Promise][6]<[Object][7]>**&#x20;
[1]: https://www.libvips.org/API/current/VipsImage.html#VipsInterpretation
[1]: /api-constructor#parameters
[2]: https://www.libvips.org/API/current/VipsImage.html#VipsBandFormat
[2]: https://www.libvips.org/API/current/VipsImage.html#VipsInterpretation
[3]: https://www.npmjs.com/package/icc
[3]: https://www.libvips.org/API/current/VipsImage.html#VipsBandFormat
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function
[4]: https://www.npmjs.com/package/icc
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Promise
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Promise
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object

View File

@@ -16,8 +16,11 @@ Mirroring is supported and may infer the use of a flip operation.
The use of `rotate` implies the removal of the EXIF `Orientation` tag, if any.
Method order is important when both rotating and extracting regions,
for example `rotate(x).extract(y)` will produce a different result to `extract(y).rotate(x)`.
Only one rotation can occur per pipeline.
Previous calls to `rotate` in the same pipeline will be ignored.
Method order is important when rotating, resizing and/or extracting regions,
for example `.rotate(x).extract(y)` will produce a different result to `.extract(y).rotate(x)`.
### Parameters
@@ -40,13 +43,24 @@ const pipeline = sharp()
readableStream.pipe(pipeline);
```
```javascript
const rotateThenResize = await sharp(input)
.rotate(90)
.resize({ width: 16, height: 8, fit: 'fill' })
.toBuffer();
const resizeThenRotate = await sharp(input)
.resize({ width: 16, height: 8, fit: 'fill' })
.rotate(90)
.toBuffer();
```
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## flip
Flip the image about the vertical Y axis. This always occurs after rotation, if any.
Flip the image about the vertical Y axis. This always occurs before rotation, if any.
The use of `flip` implies the removal of the EXIF `Orientation` tag, if any.
### Parameters
@@ -59,11 +73,11 @@ The use of `flip` implies the removal of the EXIF `Orientation` tag, if any.
const output = await sharp(input).flip().toBuffer();
```
Returns **Sharp**
Returns **Sharp**&#x20;
## flop
Flop the image about the horizontal X axis. This always occurs after rotation, if any.
Flop the image about the horizontal X axis. This always occurs before rotation, if any.
The use of `flop` implies the removal of the EXIF `Orientation` tag, if any.
### Parameters
@@ -76,7 +90,7 @@ The use of `flop` implies the removal of the EXIF `Orientation` tag, if any.
const output = await sharp(input).flop().toBuffer();
```
Returns **Sharp**
Returns **Sharp**&#x20;
## affine
@@ -128,7 +142,7 @@ inputStream
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**
@@ -170,7 +184,7 @@ const data = await sharp(input).sharpen({ sigma: 2 }).toBuffer();
const data = await sharp(input)
.sharpen({
sigma: 2,
m1: 0
m1: 0,
m2: 3,
x1: 3,
y2: 15,
@@ -181,7 +195,7 @@ const data = await sharp(input)
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## median
@@ -204,7 +218,7 @@ const output = await sharp(input).median(5).toBuffer();
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## blur
@@ -234,7 +248,7 @@ const gaussianBlurred = await sharp(input)
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## flatten
@@ -244,7 +258,7 @@ See also [removeAlpha][9].
### Parameters
* `options` **[Object][2]?**
* `options` **[Object][2]?**&#x20;
* `options.background` **([string][3] | [Object][2])** background colour, parsed by the [color][4] module, defaults to black. (optional, default `{r:0,g:0,b:0}`)
@@ -256,7 +270,7 @@ await sharp(rgbaInput)
.toBuffer();
```
Returns **Sharp**
Returns **Sharp**&#x20;
## gamma
@@ -277,7 +291,7 @@ Supply a second argument to use a different output gamma value, otherwise the fi
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## negate
@@ -285,7 +299,7 @@ Produce the "negative" of the image.
### Parameters
* `options` **[Object][2]?**
* `options` **[Object][2]?**&#x20;
* `options.alpha` **[Boolean][6]** Whether or not to negate any alpha channel (optional, default `true`)
@@ -303,7 +317,7 @@ const output = await sharp(input)
.toBuffer();
```
Returns **Sharp**
Returns **Sharp**&#x20;
## normalise
@@ -319,7 +333,7 @@ Enhance output image contrast by stretching its luminance to cover the full dyna
const output = await sharp(input).normalise().toBuffer();
```
Returns **Sharp**
Returns **Sharp**&#x20;
## normalize
@@ -335,7 +349,7 @@ Alternative spelling of normalise.
const output = await sharp(input).normalize().toBuffer();
```
Returns **Sharp**
Returns **Sharp**&#x20;
## clahe
@@ -346,7 +360,7 @@ This will, in general, enhance the clarity of the image by bringing out darker d
### Parameters
* `options` **[Object][2]**
* `options` **[Object][2]**&#x20;
* `options.width` **[number][1]** integer width of the region in pixels.
* `options.height` **[number][1]** integer height of the region in pixels.
@@ -367,7 +381,7 @@ const output = await sharp(input)
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**
@@ -379,7 +393,7 @@ Convolve the image with the specified kernel.
### Parameters
* `kernel` **[Object][2]**
* `kernel` **[Object][2]**&#x20;
* `kernel.width` **[number][1]** width of the kernel in pixels.
* `kernel.height` **[number][1]** height of the kernel in pixels.
@@ -405,7 +419,7 @@ sharp(input)
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## threshold
@@ -414,7 +428,7 @@ Any pixel value greater than or equal to the threshold value will be set to 255,
### Parameters
* `threshold` **[number][1]** a value in the range 0-255 representing the level at which the threshold will be applied. (optional, default `128`)
* `options` **[Object][2]?**
* `options` **[Object][2]?**&#x20;
* `options.greyscale` **[Boolean][6]** convert to single channel greyscale. (optional, default `true`)
* `options.grayscale` **[Boolean][6]** alternative spelling for greyscale. (optional, default `true`)
@@ -423,7 +437,7 @@ Any pixel value greater than or equal to the threshold value will be set to 255,
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## boolean
@@ -436,34 +450,52 @@ the selected bitwise boolean `operation` between the corresponding pixels of the
* `operand` **([Buffer][11] | [string][3])** Buffer containing image data or string containing the path to an image file.
* `operator` **[string][3]** one of `and`, `or` or `eor` to perform that bitwise operation, like the C logic operators `&`, `|` and `^` respectively.
* `options` **[Object][2]?**
* `options` **[Object][2]?**&#x20;
* `options.raw` **[Object][2]?** describes operand when using raw pixel data.
* `options.raw.width` **[number][1]?**
* `options.raw.height` **[number][1]?**
* `options.raw.channels` **[number][1]?**
* `options.raw.width` **[number][1]?**&#x20;
* `options.raw.height` **[number][1]?**&#x20;
* `options.raw.channels` **[number][1]?**&#x20;
<!---->
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## linear
Apply the linear formula a \* input + b to the image (levels adjustment)
Apply the linear formula `a` \* input + `b` to the image to adjust image levels.
When a single number is provided, it will be used for all image channels.
When an array of numbers is provided, the array length must match the number of channels.
### Parameters
* `a` **[number][1]** multiplier (optional, default `1.0`)
* `b` **[number][1]** offset (optional, default `0.0`)
* `a` **([number][1] | [Array][7]<[number][1]>)** multiplier (optional, default `[]`)
* `b` **([number][1] | [Array][7]<[number][1]>)** offset (optional, default `[]`)
<!---->
### Examples
```javascript
await sharp(input)
.linear(0.5, 2)
.toBuffer();
```
```javascript
await sharp(rgbInput)
.linear(
[0.25, 0.5, 0.75],
[150, 100, 50]
)
.toBuffer();
```
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## recomb
@@ -491,7 +523,7 @@ sharp(input)
* Throws **[Error][5]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**
@@ -505,7 +537,7 @@ brightness is multiplicative whereas lightness is additive.
### Parameters
* `options` **[Object][2]?**
* `options` **[Object][2]?**&#x20;
* `options.brightness` **[number][1]?** Brightness multiplier
* `options.saturation` **[number][1]?** Saturation multiplier
@@ -552,7 +584,7 @@ const output = await sharp(input)
.toBuffer();
```
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**

View File

@@ -22,6 +22,7 @@ A `Promise` is returned when `callback` is not provided.
`info` contains the output image `format`, `size` (bytes), `width`, `height`,
`channels` and `premultiplied` (indicating if premultiplication was used).
When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text.
### Examples
@@ -58,18 +59,18 @@ See [withMetadata][1] for control over this.
* `err` is an error, if any.
* `data` is the output image data.
* `info` contains the output image `format`, `size` (bytes), `width`, `height`,
`channels` and `premultiplied` (indicating if premultiplication was used).
When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
`channels` and `premultiplied` (indicating if premultiplication was used).
When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text.
A `Promise` is returned when `callback` is not provided.
### Parameters
* `options` **[Object][6]?**
* `options` **[Object][6]?**&#x20;
* `options.resolveWithObject` **[boolean][10]?** Resolve the Promise with an Object containing `data` and `info` properties instead of resolving only with `data`.
* `callback` **[Function][3]?**
* `callback` **[Function][3]?**&#x20;
### Examples
@@ -125,7 +126,7 @@ EXIF metadata is unsupported for TIFF output.
### Parameters
* `options` **[Object][6]?**
* `options` **[Object][6]?**&#x20;
* `options.orientation` **[number][12]?** value between 1 and 8, used to update the EXIF `Orientation` tag.
* `options.icc` **[string][2]?** filesystem path to output ICC profile, defaults to sRGB.
@@ -163,7 +164,7 @@ const data = await sharp(input)
* Throws **[Error][4]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## toFormat
@@ -185,7 +186,7 @@ const data = await sharp(input)
* Throws **[Error][4]** unsupported format or options
Returns **Sharp**
Returns **Sharp**&#x20;
## jpeg
@@ -230,7 +231,7 @@ const data = await sharp(input)
* Throws **[Error][4]** Invalid options
Returns **Sharp**
Returns **Sharp**&#x20;
## png
@@ -242,7 +243,7 @@ Set `palette` to `true` for slower, indexed PNG output.
### Parameters
* `options` **[Object][6]?**
* `options` **[Object][6]?**&#x20;
* `options.progressive` **[boolean][10]** use progressive (interlace) scan (optional, default `false`)
* `options.compressionLevel` **[number][12]** zlib compression level, 0 (fastest, largest) to 9 (slowest, smallest) (optional, default `6`)
@@ -273,7 +274,7 @@ const data = await sharp(input)
* Throws **[Error][4]** Invalid options
Returns **Sharp**
Returns **Sharp**&#x20;
## webp
@@ -286,11 +287,13 @@ Use these WebP options for output image.
* `options.quality` **[number][12]** quality, integer 1-100 (optional, default `80`)
* `options.alphaQuality` **[number][12]** quality of alpha layer, integer 0-100 (optional, default `100`)
* `options.lossless` **[boolean][10]** use lossless compression mode (optional, default `false`)
* `options.nearLossless` **[boolean][10]** use near_lossless compression mode (optional, default `false`)
* `options.nearLossless` **[boolean][10]** use near\_lossless compression mode (optional, default `false`)
* `options.smartSubsample` **[boolean][10]** use high quality chroma subsampling (optional, default `false`)
* `options.effort` **[number][12]** CPU effort, between 0 (fastest) and 6 (slowest) (optional, default `4`)
* `options.loop` **[number][12]** number of animation iterations, use 0 for infinite animation (optional, default `0`)
* `options.delay` **([number][12] | [Array][13]<[number][12]>)?** delay(s) between animation frames (in milliseconds)
* `options.minSize` **[boolean][10]** prevent use of animation key frames to minimise file size (slow) (optional, default `false`)
* `options.mixed` **[boolean][10]** allow mixture of lossy and lossless animation frames (slow) (optional, default `false`)
* `options.force` **[boolean][10]** force WebP output, otherwise attempt to use input format (optional, default `true`)
### Examples
@@ -311,7 +314,7 @@ const outputWebp = await sharp(inputWebp, { animated: true })
* Throws **[Error][4]** Invalid options
Returns **Sharp**
Returns **Sharp**&#x20;
## gif
@@ -319,10 +322,14 @@ Use these GIF options for the output image.
The first entry in the palette is reserved for transparency.
The palette of the input image will be re-used if possible.
### Parameters
* `options` **[Object][6]?** output options
* `options.reoptimise` **[boolean][10]** always generate new palettes (slow), re-use existing by default (optional, default `false`)
* `options.reoptimize` **[boolean][10]** alternative spelling of `options.reoptimise` (optional, default `false`)
* `options.colours` **[number][12]** maximum number of palette entries, including transparency, between 2 and 256 (optional, default `256`)
* `options.colors` **[number][12]** alternative spelling of `options.colours` (optional, default `256`)
* `options.effort` **[number][12]** CPU effort, between 1 (fastest) and 10 (slowest) (optional, default `7`)
@@ -356,7 +363,7 @@ const out = await sharp('in.gif', { animated: true })
* Throws **[Error][4]** Invalid options
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**
@@ -401,7 +408,7 @@ const data = await sharp(input)
* Throws **[Error][4]** Invalid options
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**
@@ -419,7 +426,7 @@ The `density` can be set in pixels/inch via [withMetadata][1] instead of providi
* `options.quality` **[number][12]** quality, integer 1-100 (optional, default `80`)
* `options.force` **[boolean][10]** force TIFF output, otherwise attempt to use input format (optional, default `true`)
* `options.compression` **[string][2]** compression options: lzw, deflate, jpeg, ccittfax4 (optional, default `'jpeg'`)
* `options.compression` **[string][2]** compression options: none, jpeg, deflate, packbits, ccittfax4, lzw, webp, zstd, jp2k (optional, default `'jpeg'`)
* `options.predictor` **[string][2]** compression predictor options: none, horizontal, float (optional, default `'horizontal'`)
* `options.pyramid` **[boolean][10]** write an image pyramid (optional, default `false`)
* `options.tile` **[boolean][10]** write a tiled tiff (optional, default `false`)
@@ -445,7 +452,7 @@ sharp('input.svg')
* Throws **[Error][4]** Invalid options
Returns **Sharp**
Returns **Sharp**&#x20;
## avif
@@ -465,11 +472,23 @@ AVIF image sequences are not supported.
* `options.effort` **[number][12]** CPU effort, between 0 (fastest) and 9 (slowest) (optional, default `4`)
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
<!---->
### Examples
```javascript
const data = await sharp(input)
.avif({ effort: 2 })
.toBuffer();
```
```javascript
const data = await sharp(input)
.avif({ lossless: true })
.toBuffer();
```
* Throws **[Error][4]** Invalid options
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**
@@ -479,7 +498,7 @@ Returns **Sharp**
Use these HEIF options for output image.
Support for patent-encumbered HEIC images requires the use of a
Support for patent-encumbered HEIC images using `hevc` compression requires the use of a
globally-installed libvips compiled with support for libheif, libde265 and x265.
### Parameters
@@ -492,11 +511,17 @@ globally-installed libvips compiled with support for libheif, libde265 and x265.
* `options.effort` **[number][12]** CPU effort, between 0 (fastest) and 9 (slowest) (optional, default `4`)
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
<!---->
### Examples
```javascript
const data = await sharp(input)
.heif({ compression: 'hevc' })
.toBuffer();
```
* Throws **[Error][4]** Invalid options
Returns **Sharp**
Returns **Sharp**&#x20;
**Meta**
@@ -538,12 +563,15 @@ const data = await sharp('input.png')
## tile
Use tile-based deep zoom (image pyramid) output.
Set the format and options for tile images via the `toFormat`, `jpeg`, `png` or `webp` functions.
Use a `.zip` or `.szi` file extension with `toFile` to write to a compressed archive file format.
The container will be set to `zip` when the output is a Buffer or Stream, otherwise it will default to `fs`.
### Parameters
* `options` **[Object][6]?**
* `options` **[Object][6]?**&#x20;
* `options.size` **[number][12]** tile size in pixels, a value between 1 and 8192. (optional, default `256`)
* `options.overlap` **[number][12]** tile overlap in pixels, a value between 0 and 8192. (optional, default `0`)
@@ -556,6 +584,7 @@ Use a `.zip` or `.szi` file extension with `toFile` to write to a compressed arc
* `options.centre` **[boolean][10]** centre image in tile. (optional, default `false`)
* `options.center` **[boolean][10]** alternative spelling of centre. (optional, default `false`)
* `options.id` **[string][2]** when `layout` is `iiif`/`iiif3`, sets the `@id`/`id` attribute of `info.json` (optional, default `'https://example.com/iiif'`)
* `options.basename` **[string][2]?** the name of the directory within the zip file when container is `zip`.
### Examples
@@ -571,9 +600,22 @@ sharp('input.tiff')
});
```
```javascript
const zipFileWithTiles = await sharp(input)
.tile({ basename: "tiles" })
.toBuffer();
```
```javascript
const iiififier = sharp().tile({ layout: "iiif" });
readableStream
.pipe(iiififier)
.pipe(writeableStream);
```
* Throws **[Error][4]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## timeout
@@ -585,11 +627,25 @@ Time spent waiting for a libuv thread to become available is not included.
### Parameters
* `options` **[Object][6]**
* `options` **[Object][6]**&#x20;
* `options.seconds` **[number][12]** Number of seconds after which processing will be stopped
Returns **Sharp**
### Examples
```javascript
// Ensure processing takes no longer than 3 seconds
try {
const data = await sharp(input)
.blur(1000)
.timeout({ seconds: 3 })
.toBuffer();
} catch (err) {
if (err.message.includes('timeout')) { ... }
}
```
Returns **Sharp**&#x20;
**Meta**

View File

@@ -36,17 +36,20 @@ Possible interpolation kernels are:
* `lanczos2`: Use a [Lanczos kernel][7] with `a=2`.
* `lanczos3`: Use a Lanczos kernel with `a=3` (the default).
Only one resize can occur per pipeline.
Previous calls to `resize` in the same pipeline will be ignored.
### Parameters
* `width` **[number][8]?** pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height.
* `height` **[number][8]?** pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
* `options` **[Object][9]?**
* `options` **[Object][9]?**&#x20;
* `options.width` **[String][10]?** alternative means of specifying `width`. If both are present this take priority.
* `options.height` **[String][10]?** alternative means of specifying `height`. If both are present this take priority.
* `options.fit` **[String][10]** how the image should be resized to fit both provided dimensions, one of `cover`, `contain`, `fill`, `inside` or `outside`. (optional, default `'cover'`)
* `options.position` **[String][10]** position, gravity or strategy to use when `fit` is `cover` or `contain`. (optional, default `'centre'`)
* `options.background` **([String][10] | [Object][9])** background colour when using a `fit` of `contain`, parsed by the [color][11] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
* `options.background` **([String][10] | [Object][9])** background colour when `fit` is `contain`, parsed by the [color][11] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
* `options.kernel` **[String][10]** the kernel to use for image reduction. (optional, default `'lanczos3'`)
* `options.withoutEnlargement` **[Boolean][12]** do not enlarge if the width *or* height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option. (optional, default `false`)
* `options.withoutReduction` **[Boolean][12]** do not reduce if the width *or* height are already greater than the specified dimensions, equivalent to GraphicsMagick's `<` geometry option. (optional, default `false`)
@@ -144,7 +147,7 @@ const scaleByHalf = await sharp(input)
* Throws **[Error][13]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## extend
@@ -190,7 +193,7 @@ sharp(input)
* Throws **[Error][13]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## extract
@@ -231,25 +234,70 @@ sharp(input)
* Throws **[Error][13]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
## trim
Trim "boring" pixels from all edges that contain values similar to the top-left pixel.
Images consisting entirely of a single colour will calculate "boring" using the alpha channel, if any.
Trim pixels from all edges that contain values similar to the given background colour, which defaults to that of the top-left pixel.
Images with an alpha channel will use the combined bounding box of alpha and non-alpha channels.
If the result of this operation would trim an image to nothing then no change is made.
The `info` response Object, obtained from callback of `.toFile()` or `.toBuffer()`,
will contain `trimOffsetLeft` and `trimOffsetTop` properties.
### Parameters
* `threshold` **[number][8]** the allowed difference from the top-left pixel, a number greater than zero. (optional, default `10`)
* `trim` **([string][10] | [number][8] | [Object][9])** the specific background colour to trim, the threshold for doing so or an Object with both.
<!---->
* `trim.background` **([string][10] | [Object][9])** background colour, parsed by the [color][11] module, defaults to that of the top-left pixel. (optional, default `'top-left pixel'`)
* `trim.threshold` **[number][8]** the allowed difference from the above colour, a positive number. (optional, default `10`)
### Examples
```javascript
// Trim pixels with a colour similar to that of the top-left pixel.
sharp(input)
.trim()
.toFile(output, function(err, info) {
...
});
```
```javascript
// Trim pixels with the exact same colour as that of the top-left pixel.
sharp(input)
.trim(0)
.toFile(output, function(err, info) {
...
});
```
```javascript
// Trim only pixels with a similar colour to red.
sharp(input)
.trim("#FF0000")
.toFile(output, function(err, info) {
...
});
```
```javascript
// Trim all "yellow-ish" pixels, being more lenient with the higher threshold.
sharp(input)
.trim({
background: "yellow",
threshold: 42,
})
.toFile(output, function(err, info) {
...
});
```
* Throws **[Error][13]** Invalid parameters
Returns **Sharp**
Returns **Sharp**&#x20;
[1]: https://developer.mozilla.org/en-US/docs/Web/CSS/object-fit

View File

@@ -10,7 +10,7 @@ An Object containing nested boolean values representing the available input and
console.log(sharp.format);
```
Returns **[Object][1]**
Returns **[Object][1]**&#x20;
## interpolators
@@ -90,7 +90,7 @@ sharp.cache( { files: 0 } );
sharp.cache(false);
```
Returns **[Object][1]**
Returns **[Object][1]**&#x20;
## concurrency
@@ -115,7 +115,7 @@ The maximum number of images that sharp can process in parallel
is controlled by libuv's `UV_THREADPOOL_SIZE` environment variable,
which defaults to 4.
[https://nodejs.org/api/cli.html#uv_threadpool_sizesize][12]
[https://nodejs.org/api/cli.html#uv\_threadpool\_sizesize][12]
For example, by default, a machine with 8 CPU cores will process
4 images in parallel and use up to 8 threads per image,
@@ -123,7 +123,7 @@ so there will be up to 32 concurrent threads.
### Parameters
* `concurrency` **[number][11]?**
* `concurrency` **[number][11]?**&#x20;
### Examples
@@ -163,7 +163,7 @@ Provides access to internal task counters.
const counters = sharp.counters(); // { queue: 2, process: 4 }
```
Returns **[Object][1]**
Returns **[Object][1]**&#x20;
## simd
@@ -189,7 +189,7 @@ const simd = sharp.simd(false);
// prevent libvips from using liborc at runtime
```
Returns **[boolean][10]**
Returns **[boolean][10]**&#x20;
[1]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object

View File

@@ -2,7 +2,6 @@
const fs = require('fs').promises;
const path = require('path');
const documentation = require('documentation');
[
'constructor',
@@ -15,6 +14,8 @@ const documentation = require('documentation');
'output',
'utility'
].forEach(async (m) => {
const documentation = await import('documentation');
const input = path.join('lib', `${m}.js`);
const output = path.join('docs', `api-${m}.md`);

View File

@@ -1,9 +1,87 @@
# Changelog
## v0.31 - *eagle*
Requires libvips v8.13.1
### v0.31.0 - 5th September 2022
* Drop support for Node.js 12, now requires Node.js >= 14.15.0.
* GIF output now re-uses input palette if possible. Use `reoptimise` option to generate a new palette.
* Add WebP `minSize` and `mixed` options for greater control over animation frames.
* Remove previously-deprecated WebP `reductionEffort` and HEIF `speed` options. Use `effort` to control these.
* The `flip` and `flop` operations will now occur before the `rotate` operation.
* Improve `normalise` operation with use of histogram.
[#200](https://github.com/lovell/sharp/issues/200)
* Use combined bounding box of alpha and non-alpha channels for `trim` operation.
[#2166](https://github.com/lovell/sharp/issues/2166)
* Add Buffer and Stream support to tile-based output.
[#2238](https://github.com/lovell/sharp/issues/2238)
* Add input `fileSuffix` and output `alias` to `format` information.
[#2642](https://github.com/lovell/sharp/issues/2642)
* Re-introduce support for greyscale ICC profiles (temporarily removed in 0.30.2).
[#3114](https://github.com/lovell/sharp/issues/3114)
* Add support for WebP and PackBits `compression` options with TIFF output.
[#3198](https://github.com/lovell/sharp/issues/3198)
* Ensure OpenSlide and FITS input works with custom libvips.
[#3226](https://github.com/lovell/sharp/issues/3226)
* Ensure `trim` operation is a no-op when it would reduce an image to nothing.
[#3223](https://github.com/lovell/sharp/issues/3223)
* Expose `vips_text` to create an image containing rendered text.
[#3252](https://github.com/lovell/sharp/pull/3252)
[@brahima](https://github.com/brahima)
* Ensure only properties owned by the `withMetadata` EXIF Object are parsed.
[#3292](https://github.com/lovell/sharp/issues/3292)
* Expand `linear` operation to allow use of per-channel arrays.
[#3303](https://github.com/lovell/sharp/pull/3303)
[@antonmarsden](https://github.com/antonmarsden)
* Ensure the order of `rotate`, `resize` and `extend` operations is respected where possible.
Emit warnings when previous calls in the same pipeline will be ignored.
[#3319](https://github.com/lovell/sharp/issues/3319)
* Ensure PNG bitdepth can be set for non-palette output.
[#3322](https://github.com/lovell/sharp/issues/3322)
* Add trim option to provide a specific background colour.
[#3332](https://github.com/lovell/sharp/pull/3332)
[@mart-jansink](https://github.com/mart-jansink)
* Ensure resized image is unpremultiplied before composite.
[#3334](https://github.com/lovell/sharp/issues/3334)
## v0.30 - *dresser*
Requires libvips v8.12.2
### v0.30.7 - 22nd June 2022
* Ensure tiled composition always works with outside resizing.
[#3227](https://github.com/lovell/sharp/issues/3227)
* Allow WebP encoding effort of 0.
[#3261](https://github.com/lovell/sharp/pull/3261)
[@AlexanderTheGrey](https://github.com/AlexanderTheGrey)
* Prevent upsampling via libwebp.
[#3267](https://github.com/lovell/sharp/pull/3267)
[@blacha](https://github.com/blacha)
### v0.30.6 - 30th May 2022
* Allow values for `limitInputPixels` larger than 32-bit.

View File

@@ -248,3 +248,15 @@ GitHub: https://github.com/ankurparihar
Name: Joona Heinikoski
GitHub: https://github.com/joonamo
Name: AlexanderTheGrey
GitHub: https://github.com/AlexanderTheGrey
Name: Blayne Chard
GitHub: https://github.com/blacha
Name: Brahim
GitHub: https://github.com/brahima
Name: Anton Marsden
GitHub: https://github.com/antonmarsden

View File

@@ -10,7 +10,7 @@ yarn add sharp
## Prerequisites
* Node.js >= 12.13.0
* Node.js >= 14.15.0
## Prebuilt binaries
@@ -98,6 +98,14 @@ use the following flags:
npm install --arch=x64 --platform=linux --libc=glibc sharp
```
Multiple platforms and architectures can be supported within the same installation tree.
The following example for macOS installs x64 binaries then adds (via a rebuild) arm64 binaries:
```sh
npm install --platform=darwin --arch=x64 sharp
npm rebuild --platform=darwin --arch=arm64 sharp
```
## Custom libvips
To use a custom, globally-installed version of libvips instead of the provided binaries,
@@ -136,6 +144,16 @@ To install the prebuilt sharp binaries from a directory on the local filesystem,
set the `sharp_local_prebuilds` npm config option
or the `npm_config_sharp_local_prebuilds` environment variable.
URL example:
if `sharp_binary_host` is set to `https://hostname/path`
and the sharp version is `1.2.3` then the resultant URL will be
`https://hostname/path/sharp-v1.2.3-napi-v5-platform-arch.tar.gz`.
Filename example:
if `sharp_local_prebuilds` is set to `/path`
and the sharp version is `1.2.3` then the resultant filename will be
`/path/sharp-v1.2.3-napi-v5-platform-arch.tar.gz`.
### Prebuilt libvips binaries
To install the prebuilt libvips binaries from a custom URL,
@@ -146,10 +164,17 @@ To install the prebuilt libvips binaries from a directory on the local filesyste
set the `sharp_libvips_local_prebuilds` npm config option
or the `npm_config_sharp_libvips_local_prebuilds` environment variable.
The version subpath and file name are appended to these.
For example, if `sharp_libvips_binary_host` is set to `https://hostname/path`
and the libvips version is `1.2.3` then the resultant URL will be
`https://hostname/path/v1.2.3/libvips-1.2.3-platform-arch.tar.br`.
The version subpath and filename are appended to these.
URL example:
if `sharp_libvips_binary_host` is set to `https://hostname/path`
and the libvips version is `4.5.6` then the resultant URL will be
`https://hostname/path/v4.5.6/libvips-4.5.6-platform-arch.tar.br`.
Filename example:
if `sharp_libvips_local_prebuilds` is set to `/path`
and the libvips version is `4.5.6` then the resultant filename will be
`/path/v4.5.6/libvips-4.5.6-platform-arch.tar.br`.
See the Chinese mirror below for a further example.
@@ -263,6 +288,44 @@ buildSync({
esbuild app.js --bundle --platform=node --external:sharp
```
For `serverless-esbuild`, ensure platform-specific binaries are installed
via the `serverless.yml` configuration.
```yaml
custom:
esbuild:
external:
- sharp
packagerOptions:
scripts:
- npm install --arch=x64 --platform=linux sharp
```
## Fonts
When creating text images or rendering SVG images that contain text elements,
`fontconfig` is used to find the relevant fonts.
On Windows and macOS systems, all system fonts are available for use.
On macOS systems using Homebrew, you may need to set the
`PANGOCAIRO_BACKEND` environment variable to a value of `fontconfig`
to ensure it is used for font discovery instead of Core Text.
On Linux systems, fonts that include the relevant
[`fontconfig` configuration](https://www.freedesktop.org/software/fontconfig/fontconfig-user.html)
when installed via package manager are available for use.
If `fontconfig` configuration is not found, the following error will occur:
```
Fontconfig error: Cannot load default config file
```
In serverless environments where there is no control over font packages,
use the `FONTCONFIG_PATH` environment variable to point to a custom location.
Embedded SVG fonts are unsupported.
## Worker threads
On some platforms, including glibc-based Linux,

View File

@@ -10,7 +10,7 @@ A test to benchmark the performance of this module relative to alternatives.
* [gm](https://www.npmjs.com/package/gm) v1.23.1 - Fully featured wrapper around GraphicsMagick's `gm` command line utility.
* [@squoosh/lib](https://www.npmjs.com/package/@squoosh/lib) v0.4.0 - Image libraries transpiled to WebAssembly, includes GPLv3 code.
* [@squoosh/cli](https://www.npmjs.com/package/@squoosh/cli) v0.7.2 - Command line wrapper around `@squoosh/lib`, avoids GPLv3 by spawning process.
* sharp v0.30.0 / libvips v8.12.2 - Caching within libvips disabled to ensure a fair comparison.
* sharp v0.31.0 / libvips v8.13.1 - Caching within libvips disabled to ensure a fair comparison.
## The task
@@ -20,24 +20,24 @@ then compress to JPEG at a "quality" setting of 80.
## Test environment
* AWS EC2 eu-west-1 [c5ad.xlarge](https://aws.amazon.com/ec2/instance-types/c5/) (4x AMD EPYC 7R32)
* Ubuntu 21.10 (ami-0258eeb71ddf238b3)
* Node.js 16.13.2
* AWS EC2 eu-west-1 [c6a.xlarge](https://aws.amazon.com/ec2/instance-types/c6a/) (4x AMD EPYC 7R13)
* Ubuntu 22.04 (ami-051f7c00cb18501ee)
* Node.js 16.17.0
## Results
| Module | Input | Output | Ops/sec | Speed-up |
| :----------------- | :----- | :----- | ------: | -------: |
| jimp | buffer | buffer | 0.84 | 1.0 |
| squoosh-cli | file | file | 1.08 | 1.3 |
| squoosh-lib | buffer | buffer | 1.85 | 2.2 |
| mapnik | buffer | buffer | 3.45 | 4.1 |
| gm | buffer | buffer | 8.60 | 10.2 |
| gm | file | file | 8.66 | 10.3 |
| imagemagick | file | file | 8.79 | 10.5 |
| sharp | stream | stream | 28.90 | 34.4 |
| sharp | file | file | 30.08 | 35.8 |
| sharp | buffer | buffer | 30.42 | 36.2 |
| jimp | buffer | buffer | 0.96 | 1.0 |
| squoosh-cli | file | file | 1.10 | 1.1 |
| squoosh-lib | buffer | buffer | 1.87 | 1.9 |
| mapnik | buffer | buffer | 3.48 | 3.6 |
| gm | buffer | buffer | 8.53 | 8.9 |
| gm | file | file | 8.60 | 9.0 |
| imagemagick | file | file | 9.30 | 9.7 |
| sharp | stream | stream | 32.86 | 34.2 |
| sharp | file | file | 34.82 | 36.3 |
| sharp | buffer | buffer | 35.41 | 36.9 |
Greater libvips performance can be expected with caching enabled (default)
and using 8+ core machines, especially those with larger L1/L2 CPU caches.
@@ -46,27 +46,10 @@ The I/O limits of the relevant (de)compression library will generally determine
## Running the benchmark test
Requires _ImageMagick_, _GraphicsMagick_ and _Mapnik_:
```sh
brew install imagemagick
brew install graphicsmagick
brew install mapnik
```
```sh
sudo apt-get install build-essential imagemagick libmagick++-dev graphicsmagick libmapnik-dev
```
```sh
sudo yum install ImageMagick-devel ImageMagick-c++-devel GraphicsMagick mapnik-devel
```
Requires Docker.
```sh
git clone https://github.com/lovell/sharp.git
cd sharp
npm install --build-from-source
cd test/bench
npm install
npm test
cd sharp/test/bench
./run-with-docker.sh
```

File diff suppressed because one or more lines are too long

View File

@@ -43,6 +43,9 @@ const blend = {
* The images to composite must be the same size or smaller than the processed image.
* If both `top` and `left` options are provided, they take precedence over `gravity`.
*
* Any resize or rotate operations in the same processing pipeline
* will always be applied to the input image before composition.
*
* The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
* `dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
* `xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
@@ -93,6 +96,17 @@ const blend = {
* @param {Number} [images[].input.create.height]
* @param {Number} [images[].input.create.channels] - 3-4
* @param {String|Object} [images[].input.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @param {Object} [images[].input.text] - describes a new text image to be created.
* @param {string} [images[].input.text.text] - text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
* @param {string} [images[].input.text.font] - font name to render with.
* @param {string} [images[].input.text.fontfile] - absolute filesystem path to a font file that can be used by `font`.
* @param {number} [images[].input.text.width=0] - integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries.
* @param {number} [images[].input.text.height=0] - integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0.
* @param {string} [images[].input.text.align='left'] - text alignment (`'left'`, `'centre'`, `'center'`, `'right'`).
* @param {boolean} [images[].input.text.justify=false] - set this to true to apply justification to the text.
* @param {number} [images[].input.text.dpi=72] - the resolution (size) at which to render the text. Does not take effect if `height` is specified.
* @param {boolean} [images[].input.text.rgba=false] - set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`.
* @param {number} [images[].input.text.spacing=0] - text line height in points. Will use the font line height if none is specified.
* @param {String} [images[].blend='over'] - how to blend this image with the image below.
* @param {String} [images[].gravity='centre'] - gravity at which to place the overlay.
* @param {Number} [images[].top] - the pixel offset from the top edge.

View File

@@ -92,6 +92,27 @@ const debuglog = util.debuglog('sharp');
* }
* }).toFile('noise.png');
*
* @example
* // Generate an image from text
* await sharp({
* text: {
* text: 'Hello, world!',
* width: 400, // max width
* height: 300 // max height
* }
* }).toFile('text_bw.png');
*
* @example
* // Generate an rgba image from text using pango markup and font
* await sharp({
* text: {
* text: '<span foreground="red">Red!</span><span background="cyan">blue</span>',
* font: 'sans',
* rgba: true,
* dpi: 300
* }
* }).toFile('text_rgba.png');
*
* @param {(Buffer|Uint8Array|Uint8ClampedArray|Int8Array|Uint16Array|Int16Array|Uint32Array|Int32Array|Float32Array|Float64Array|string)} [input] - if present, can be
* a Buffer / Uint8Array / Uint8ClampedArray containing JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image data, or
* a TypedArray containing raw pixel image data, or
@@ -102,7 +123,7 @@ const debuglog = util.debuglog('sharp');
* @param {number|boolean} [options.limitInputPixels=268402689] - Do not process input images where the number of pixels
* (width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted.
* An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF).
* @param {boolean} [options.unlimited=false] - Set this to `true` to remove safety features that help prevent memory exhaustion (SVG, PNG).
* @param {boolean} [options.unlimited=false] - Set this to `true` to remove safety features that help prevent memory exhaustion (JPEG, PNG, SVG, HEIF).
* @param {boolean} [options.sequentialRead=false] - Set this to `true` to use sequential rather than random access where possible.
* This can reduce memory usage and might improve performance on some systems.
* @param {number} [options.density=72] - number representing the DPI for vector images in the range 1 to 100000.
@@ -126,6 +147,17 @@ const debuglog = util.debuglog('sharp');
* @param {string} [options.create.noise.type] - type of generated noise, currently only `gaussian` is supported.
* @param {number} [options.create.noise.mean] - mean of pixels in generated noise.
* @param {number} [options.create.noise.sigma] - standard deviation of pixels in generated noise.
* @param {Object} [options.text] - describes a new text image to be created.
* @param {string} [options.text.text] - text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
* @param {string} [options.text.font] - font name to render with.
* @param {string} [options.text.fontfile] - absolute filesystem path to a font file that can be used by `font`.
* @param {number} [options.text.width=0] - integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries.
* @param {number} [options.text.height=0] - integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0.
* @param {string} [options.text.align='left'] - text alignment (`'left'`, `'centre'`, `'center'`, `'right'`).
* @param {boolean} [options.text.justify=false] - set this to true to apply justification to the text.
* @param {number} [options.text.dpi=72] - the resolution (size) at which to render the text. Does not take effect if `height` is specified.
* @param {boolean} [options.text.rgba=false] - set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`.
* @param {number} [options.text.spacing=0] - text line height in points. Will use the font line height if none is specified.
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
@@ -192,6 +224,7 @@ const Sharp = function (input, options) {
sharpenY3: 20,
threshold: 0,
thresholdGrayscale: true,
trimBackground: [],
trimThreshold: 0,
gamma: 0,
gammaOut: 0,
@@ -251,9 +284,12 @@ const Sharp = function (input, options) {
webpNearLossless: false,
webpSmartSubsample: false,
webpEffort: 4,
webpMinSize: false,
webpMixed: false,
gifBitdepth: 8,
gifEffort: 7,
gifDither: 1,
gifReoptimise: false,
tiffQuality: 80,
tiffCompression: 'jpeg',
tiffPredictor: 'horizontal',
@@ -282,9 +318,10 @@ const Sharp = function (input, options) {
tileBackground: [255, 255, 255, 255],
tileCentre: false,
tileId: 'https://example.com/iiif',
tileBasename: '',
timeoutSeconds: 0,
linearA: 1,
linearB: 0,
linearA: [],
linearB: [],
// Function to notify of libvips warnings
debuglog: warning => {
this.emit('warning', warning);
@@ -346,7 +383,7 @@ Object.setPrototypeOf(Sharp, stream.Duplex);
* .toFile("optimized-500.webp")
* );
*
* // https://github.com/sindresorhus/got#gotstreamurl-options
* // https://github.com/sindresorhus/got/blob/main/documentation/3-streams.md
* got.stream("https://www.example.com/some-file.jpg").pipe(sharpStream);
*
* Promise.all(promises)

View File

@@ -4,6 +4,18 @@ const color = require('color');
const is = require('./is');
const sharp = require('./sharp');
/**
* Justication alignment
* @member
* @private
*/
const align = {
left: 'low',
center: 'centre',
centre: 'centre',
right: 'high'
};
/**
* Extract input options, if any, from an object.
* @private
@@ -245,6 +257,81 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
throw new Error('Expected valid width, height and channels to create a new input image');
}
}
// Create a new image with text
if (is.defined(inputOptions.text)) {
if (is.object(inputOptions.text) && is.string(inputOptions.text.text)) {
inputDescriptor.textValue = inputOptions.text.text;
if (is.defined(inputOptions.text.height) && is.defined(inputOptions.text.dpi)) {
throw new Error('Expected only one of dpi or height');
}
if (is.defined(inputOptions.text.font)) {
if (is.string(inputOptions.text.font)) {
inputDescriptor.textFont = inputOptions.text.font;
} else {
throw is.invalidParameterError('text.font', 'string', inputOptions.text.font);
}
}
if (is.defined(inputOptions.text.fontfile)) {
if (is.string(inputOptions.text.fontfile)) {
inputDescriptor.textFontfile = inputOptions.text.fontfile;
} else {
throw is.invalidParameterError('text.fontfile', 'string', inputOptions.text.fontfile);
}
}
if (is.defined(inputOptions.text.width)) {
if (is.number(inputOptions.text.width)) {
inputDescriptor.textWidth = inputOptions.text.width;
} else {
throw is.invalidParameterError('text.textWidth', 'number', inputOptions.text.width);
}
}
if (is.defined(inputOptions.text.height)) {
if (is.number(inputOptions.text.height)) {
inputDescriptor.textHeight = inputOptions.text.height;
} else {
throw is.invalidParameterError('text.height', 'number', inputOptions.text.height);
}
}
if (is.defined(inputOptions.text.align)) {
if (is.string(inputOptions.text.align) && is.string(this.constructor.align[inputOptions.text.align])) {
inputDescriptor.textAlign = this.constructor.align[inputOptions.text.align];
} else {
throw is.invalidParameterError('text.align', 'valid alignment', inputOptions.text.align);
}
}
if (is.defined(inputOptions.text.justify)) {
if (is.bool(inputOptions.text.justify)) {
inputDescriptor.textJustify = inputOptions.text.justify;
} else {
throw is.invalidParameterError('text.justify', 'boolean', inputOptions.text.justify);
}
}
if (is.defined(inputOptions.text.dpi)) {
if (is.number(inputOptions.text.dpi) && is.inRange(inputOptions.text.dpi, 1, 100000)) {
inputDescriptor.textDpi = inputOptions.text.dpi;
} else {
throw is.invalidParameterError('text.dpi', 'number between 1 and 100000', inputOptions.text.dpi);
}
}
if (is.defined(inputOptions.text.rgba)) {
if (is.bool(inputOptions.text.rgba)) {
inputDescriptor.textRgba = inputOptions.text.rgba;
} else {
throw is.invalidParameterError('text.rgba', 'bool', inputOptions.text.rgba);
}
}
if (is.defined(inputOptions.text.spacing)) {
if (is.number(inputOptions.text.spacing)) {
inputDescriptor.textSpacing = inputOptions.text.spacing;
} else {
throw is.invalidParameterError('text.spacing', 'number', inputOptions.text.spacing);
}
}
delete inputDescriptor.buffer;
} else {
throw new Error('Expected a valid string to create an image with text.');
}
}
} else if (is.defined(inputOptions)) {
throw new Error('Invalid input options ' + inputOptions);
}
@@ -303,6 +390,9 @@ function _isStreamInput () {
* This is taken from the header of the input image.
* It does not include operations, such as resize, to be applied to the output image.
*
* Dimensions in the response will respect the `page` and `pages` properties of the
* {@link /api-constructor#parameters|constructor parameters}.
*
* A `Promise` is returned when `callback` is not provided.
*
* - `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
@@ -357,7 +447,7 @@ function _isStreamInput () {
* const size = getNormalSize(await sharp(input).metadata());
*
* function getNormalSize({ width, height, orientation }) {
* return orientation || 0 >= 5
* return (orientation || 0) >= 5
* ? { width: height, height: width }
* : { width, height };
* }
@@ -504,4 +594,6 @@ module.exports = function (Sharp) {
metadata,
stats
});
// Class attributes
Sharp.align = align;
};

View File

@@ -18,8 +18,11 @@ const is = require('./is');
*
* The use of `rotate` implies the removal of the EXIF `Orientation` tag, if any.
*
* Method order is important when both rotating and extracting regions,
* for example `rotate(x).extract(y)` will produce a different result to `extract(y).rotate(x)`.
* Only one rotation can occur per pipeline.
* Previous calls to `rotate` in the same pipeline will be ignored.
*
* Method order is important when rotating, resizing and/or extracting regions,
* for example `.rotate(x).extract(y)` will produce a different result to `.extract(y).rotate(x)`.
*
* @example
* const pipeline = sharp()
@@ -32,6 +35,16 @@ const is = require('./is');
* });
* readableStream.pipe(pipeline);
*
* @example
* const rotateThenResize = await sharp(input)
* .rotate(90)
* .resize({ width: 16, height: 8, fit: 'fill' })
* .toBuffer();
* const resizeThenRotate = await sharp(input)
* .resize({ width: 16, height: 8, fit: 'fill' })
* .rotate(90)
* .toBuffer();
*
* @param {number} [angle=auto] angle of rotation.
* @param {Object} [options] - if present, is an Object with optional attributes.
* @param {string|Object} [options.background="#000000"] parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
@@ -39,6 +52,9 @@ const is = require('./is');
* @throws {Error} Invalid parameters
*/
function rotate (angle, options) {
if (this.options.useExifOrientation || this.options.angle || this.options.rotationAngle) {
this.options.debuglog('ignoring previous rotate options');
}
if (!is.defined(angle)) {
this.options.useExifOrientation = true;
} else if (is.integer(angle) && !(angle % 90)) {
@@ -61,7 +77,7 @@ function rotate (angle, options) {
}
/**
* Flip the image about the vertical Y axis. This always occurs after rotation, if any.
* Flip the image about the vertical Y axis. This always occurs before rotation, if any.
* The use of `flip` implies the removal of the EXIF `Orientation` tag, if any.
*
* @example
@@ -76,7 +92,7 @@ function flip (flip) {
}
/**
* Flop the image about the horizontal X axis. This always occurs after rotation, if any.
* Flop the image about the horizontal X axis. This always occurs before rotation, if any.
* The use of `flop` implies the removal of the EXIF `Orientation` tag, if any.
*
* @example
@@ -205,7 +221,7 @@ function affine (matrix, options) {
* const data = await sharp(input)
* .sharpen({
* sigma: 2,
* m1: 0
* m1: 0,
* m2: 3,
* x1: 3,
* y2: 15,
@@ -628,26 +644,55 @@ function boolean (operand, operator, options) {
}
/**
* Apply the linear formula a * input + b to the image (levels adjustment)
* @param {number} [a=1.0] multiplier
* @param {number} [b=0.0] offset
* Apply the linear formula `a` * input + `b` to the image to adjust image levels.
*
* When a single number is provided, it will be used for all image channels.
* When an array of numbers is provided, the array length must match the number of channels.
*
* @example
* await sharp(input)
* .linear(0.5, 2)
* .toBuffer();
*
* @example
* await sharp(rgbInput)
* .linear(
* [0.25, 0.5, 0.75],
* [150, 100, 50]
* )
* .toBuffer();
*
* @param {(number|number[])} [a=[]] multiplier
* @param {(number|number[])} [b=[]] offset
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function linear (a, b) {
if (!is.defined(a) && is.number(b)) {
a = 1.0;
} else if (is.number(a) && !is.defined(b)) {
b = 0.0;
}
if (!is.defined(a)) {
this.options.linearA = 1.0;
this.options.linearA = [];
} else if (is.number(a)) {
this.options.linearA = [a];
} else if (Array.isArray(a) && a.length && a.every(is.number)) {
this.options.linearA = a;
} else {
throw is.invalidParameterError('a', 'numeric', a);
throw is.invalidParameterError('a', 'number or array of numbers', a);
}
if (!is.defined(b)) {
this.options.linearB = 0.0;
this.options.linearB = [];
} else if (is.number(b)) {
this.options.linearB = [b];
} else if (Array.isArray(b) && b.length && b.every(is.number)) {
this.options.linearB = b;
} else {
throw is.invalidParameterError('b', 'numeric', b);
throw is.invalidParameterError('b', 'number or array of numbers', b);
}
if (this.options.linearA.length !== this.options.linearB.length) {
throw new Error('Expected a and b to be arrays of the same length');
}
return this;
}

View File

@@ -10,6 +10,9 @@ const formats = new Map([
['avif', 'avif'],
['jpeg', 'jpeg'],
['jpg', 'jpeg'],
['jpe', 'jpeg'],
['tile', 'tile'],
['dz', 'tile'],
['png', 'png'],
['raw', 'raw'],
['tiff', 'tiff'],
@@ -55,6 +58,7 @@ const bitdepthFromColourCount = (colours) => 1 << 31 - Math.clz32(Math.ceil(Math
* `info` contains the output image `format`, `size` (bytes), `width`, `height`,
* `channels` and `premultiplied` (indicating if premultiplication was used).
* When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
* May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text.
* @returns {Promise<Object>} - when no callback is provided
* @throws {Error} Invalid parameters
*/
@@ -95,6 +99,7 @@ function toFile (fileOut, callback) {
* - `info` contains the output image `format`, `size` (bytes), `width`, `height`,
* `channels` and `premultiplied` (indicating if premultiplication was used).
* When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
* May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text.
*
* A `Promise` is returned when `callback` is not provided.
*
@@ -402,6 +407,14 @@ function png (options) {
if (is.defined(options.adaptiveFiltering)) {
this._setBooleanOption('pngAdaptiveFiltering', options.adaptiveFiltering);
}
const colours = options.colours || options.colors;
if (is.defined(colours)) {
if (is.integer(colours) && is.inRange(colours, 2, 256)) {
this.options.pngBitdepth = bitdepthFromColourCount(colours);
} else {
throw is.invalidParameterError('colours', 'integer between 2 and 256', colours);
}
}
if (is.defined(options.palette)) {
this._setBooleanOption('pngPalette', options.palette);
} else if ([options.quality, options.effort, options.colours, options.colors, options.dither].some(is.defined)) {
@@ -422,14 +435,6 @@ function png (options) {
throw is.invalidParameterError('effort', 'integer between 1 and 10', options.effort);
}
}
const colours = options.colours || options.colors;
if (is.defined(colours)) {
if (is.integer(colours) && is.inRange(colours, 2, 256)) {
this.options.pngBitdepth = bitdepthFromColourCount(colours);
} else {
throw is.invalidParameterError('colours', 'integer between 2 and 256', colours);
}
}
if (is.defined(options.dither)) {
if (is.number(options.dither) && is.inRange(options.dither, 0, 1)) {
this.options.pngDither = options.dither;
@@ -466,6 +471,8 @@ function png (options) {
* @param {number} [options.effort=4] - CPU effort, between 0 (fastest) and 6 (slowest)
* @param {number} [options.loop=0] - number of animation iterations, use 0 for infinite animation
* @param {number|number[]} [options.delay] - delay(s) between animation frames (in milliseconds)
* @param {boolean} [options.minSize=false] - prevent use of animation key frames to minimise file size (slow)
* @param {boolean} [options.mixed=false] - allow mixture of lossy and lossless animation frames (slow)
* @param {boolean} [options.force=true] - force WebP output, otherwise attempt to use input format
* @returns {Sharp}
* @throws {Error} Invalid options
@@ -495,14 +502,19 @@ function webp (options) {
if (is.defined(options.smartSubsample)) {
this._setBooleanOption('webpSmartSubsample', options.smartSubsample);
}
const effort = options.effort || options.reductionEffort;
if (is.defined(effort)) {
if (is.integer(effort) && is.inRange(effort, 0, 6)) {
this.options.webpEffort = effort;
if (is.defined(options.effort)) {
if (is.integer(options.effort) && is.inRange(options.effort, 0, 6)) {
this.options.webpEffort = options.effort;
} else {
throw is.invalidParameterError('effort', 'integer between 0 and 6', effort);
throw is.invalidParameterError('effort', 'integer between 0 and 6', options.effort);
}
}
if (is.defined(options.minSize)) {
this._setBooleanOption('webpMinSize', options.minSize);
}
if (is.defined(options.mixed)) {
this._setBooleanOption('webpMixed', options.mixed);
}
}
trySetAnimationOptions(options, this.options);
return this._updateFormatOut('webp', options);
@@ -513,6 +525,8 @@ function webp (options) {
*
* The first entry in the palette is reserved for transparency.
*
* The palette of the input image will be re-used if possible.
*
* @since 0.30.0
*
* @example
@@ -534,6 +548,8 @@ function webp (options) {
* .toBuffer();
*
* @param {Object} [options] - output options
* @param {boolean} [options.reoptimise=false] - always generate new palettes (slow), re-use existing by default
* @param {boolean} [options.reoptimize=false] - alternative spelling of `options.reoptimise`
* @param {number} [options.colours=256] - maximum number of palette entries, including transparency, between 2 and 256
* @param {number} [options.colors=256] - alternative spelling of `options.colours`
* @param {number} [options.effort=7] - CPU effort, between 1 (fastest) and 10 (slowest)
@@ -546,6 +562,11 @@ function webp (options) {
*/
function gif (options) {
if (is.object(options)) {
if (is.defined(options.reoptimise)) {
this._setBooleanOption('gifReoptimise', options.reoptimise);
} else if (is.defined(options.reoptimize)) {
this._setBooleanOption('gifReoptimise', options.reoptimize);
}
const colours = options.colours || options.colors;
if (is.defined(colours)) {
if (is.integer(colours) && is.inRange(colours, 2, 256)) {
@@ -702,7 +723,7 @@ function trySetAnimationOptions (source, target) {
* @param {Object} [options] - output options
* @param {number} [options.quality=80] - quality, integer 1-100
* @param {boolean} [options.force=true] - force TIFF output, otherwise attempt to use input format
* @param {string} [options.compression='jpeg'] - compression options: lzw, deflate, jpeg, ccittfax4
* @param {string} [options.compression='jpeg'] - compression options: none, jpeg, deflate, packbits, ccittfax4, lzw, webp, zstd, jp2k
* @param {string} [options.predictor='horizontal'] - compression predictor options: none, horizontal, float
* @param {boolean} [options.pyramid=false] - write an image pyramid
* @param {boolean} [options.tile=false] - write a tiled tiff
@@ -770,10 +791,10 @@ function tiff (options) {
}
// compression
if (is.defined(options.compression)) {
if (is.string(options.compression) && is.inArray(options.compression, ['lzw', 'deflate', 'jpeg', 'ccittfax4', 'none'])) {
if (is.string(options.compression) && is.inArray(options.compression, ['none', 'jpeg', 'deflate', 'packbits', 'ccittfax4', 'lzw', 'webp', 'zstd', 'jp2k'])) {
this.options.tiffCompression = options.compression;
} else {
throw is.invalidParameterError('compression', 'one of: lzw, deflate, jpeg, ccittfax4, none', options.compression);
throw is.invalidParameterError('compression', 'one of: none, jpeg, deflate, packbits, ccittfax4, lzw, webp, zstd, jp2k', options.compression);
}
}
// predictor
@@ -804,6 +825,16 @@ function tiff (options) {
*
* AVIF image sequences are not supported.
*
* @example
* const data = await sharp(input)
* .avif({ effort: 2 })
* .toBuffer();
*
* @example
* const data = await sharp(input)
* .avif({ lossless: true })
* .toBuffer();
*
* @since 0.27.0
*
* @param {Object} [options] - output options
@@ -821,9 +852,14 @@ function avif (options) {
/**
* Use these HEIF options for output image.
*
* Support for patent-encumbered HEIC images requires the use of a
* Support for patent-encumbered HEIC images using `hevc` compression requires the use of a
* globally-installed libvips compiled with support for libheif, libde265 and x265.
*
* @example
* const data = await sharp(input)
* .heif({ compression: 'hevc' })
* .toBuffer();
*
* @since 0.23.0
*
* @param {Object} [options] - output options
@@ -864,12 +900,6 @@ function heif (options) {
} else {
throw is.invalidParameterError('effort', 'integer between 0 and 9', options.effort);
}
} else if (is.defined(options.speed)) {
if (is.integer(options.speed) && is.inRange(options.speed, 0, 9)) {
this.options.heifEffort = 9 - options.speed;
} else {
throw is.invalidParameterError('speed', 'integer between 0 and 9', options.speed);
}
}
if (is.defined(options.chromaSubsampling)) {
if (is.string(options.chromaSubsampling) && is.inArray(options.chromaSubsampling, ['4:2:0', '4:4:4'])) {
@@ -923,9 +953,12 @@ function raw (options) {
/**
* Use tile-based deep zoom (image pyramid) output.
*
* Set the format and options for tile images via the `toFormat`, `jpeg`, `png` or `webp` functions.
* Use a `.zip` or `.szi` file extension with `toFile` to write to a compressed archive file format.
*
* The container will be set to `zip` when the output is a Buffer or Stream, otherwise it will default to `fs`.
*
* @example
* sharp('input.tiff')
* .png()
@@ -937,6 +970,17 @@ function raw (options) {
* // output_files contains 512x512 tiles grouped by zoom level
* });
*
* @example
* const zipFileWithTiles = await sharp(input)
* .tile({ basename: "tiles" })
* .toBuffer();
*
* @example
* const iiififier = sharp().tile({ layout: "iiif" });
* readableStream
* .pipe(iiififier)
* .pipe(writeableStream);
*
* @param {Object} [options]
* @param {number} [options.size=256] tile size in pixels, a value between 1 and 8192.
* @param {number} [options.overlap=0] tile overlap in pixels, a value between 0 and 8192.
@@ -949,6 +993,7 @@ function raw (options) {
* @param {boolean} [options.centre=false] centre image in tile.
* @param {boolean} [options.center=false] alternative spelling of centre.
* @param {string} [options.id='https://example.com/iiif'] when `layout` is `iiif`/`iiif3`, sets the `@id`/`id` attribute of `info.json`
* @param {string} [options.basename] the name of the directory within the zip file when container is `zip`.
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
@@ -1030,6 +1075,14 @@ function tile (options) {
throw is.invalidParameterError('id', 'string', options.id);
}
}
// Basename for zip container
if (is.defined(options.basename)) {
if (is.string(options.basename)) {
this.options.tileBasename = options.basename;
} else {
throw is.invalidParameterError('basename', 'string', options.basename);
}
}
}
// Format
if (is.inArray(this.options.formatOut, ['jpeg', 'png', 'webp'])) {
@@ -1047,6 +1100,17 @@ function tile (options) {
* The clock starts when libvips opens an input image for processing.
* Time spent waiting for a libuv thread to become available is not included.
*
* @example
* // Ensure processing takes no longer than 3 seconds
* try {
* const data = await sharp(input)
* .blur(1000)
* .timeout({ seconds: 3 })
* .toBuffer();
* } catch (err) {
* if (err.message.includes('timeout')) { ... }
* }
*
* @since 0.29.2
*
* @param {Object} options

View File

@@ -92,6 +92,13 @@ function isRotationExpected (options) {
return (options.angle % 360) !== 0 || options.useExifOrientation === true || options.rotationAngle !== 0;
}
/**
* @private
*/
function isResizeExpected (options) {
return options.width !== -1 || options.height !== -1;
}
/**
* Resize image to `width`, `height` or `width x height`.
*
@@ -123,6 +130,9 @@ function isRotationExpected (options) {
* - `lanczos2`: Use a [Lanczos kernel](https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel) with `a=2`.
* - `lanczos3`: Use a Lanczos kernel with `a=3` (the default).
*
* Only one resize can occur per pipeline.
* Previous calls to `resize` in the same pipeline will be ignored.
*
* @example
* sharp(input)
* .resize({ width: 100 })
@@ -211,7 +221,7 @@ function isRotationExpected (options) {
* @param {String} [options.height] - alternative means of specifying `height`. If both are present this take priority.
* @param {String} [options.fit='cover'] - how the image should be resized to fit both provided dimensions, one of `cover`, `contain`, `fill`, `inside` or `outside`.
* @param {String} [options.position='centre'] - position, gravity or strategy to use when `fit` is `cover` or `contain`.
* @param {String|Object} [options.background={r: 0, g: 0, b: 0, alpha: 1}] - background colour when using a `fit` of `contain`, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black without transparency.
* @param {String|Object} [options.background={r: 0, g: 0, b: 0, alpha: 1}] - background colour when `fit` is `contain`, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black without transparency.
* @param {String} [options.kernel='lanczos3'] - the kernel to use for image reduction.
* @param {Boolean} [options.withoutEnlargement=false] - do not enlarge if the width *or* height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option.
* @param {Boolean} [options.withoutReduction=false] - do not reduce if the width *or* height are already greater than the specified dimensions, equivalent to GraphicsMagick's `<` geometry option.
@@ -220,6 +230,9 @@ function isRotationExpected (options) {
* @throws {Error} Invalid parameters
*/
function resize (width, height, options) {
if (isResizeExpected(this.options)) {
this.options.debuglog('ignoring previous resize options');
}
if (is.defined(width)) {
if (is.object(width) && !is.defined(options)) {
options = width;
@@ -300,6 +313,9 @@ function resize (width, height, options) {
this._setBooleanOption('fastShrinkOnLoad', options.fastShrinkOnLoad);
}
}
if (isRotationExpected(this.options) && isResizeExpected(this.options)) {
this.options.rotateBeforePreExtract = true;
}
return this;
}
@@ -412,7 +428,10 @@ function extend (extend) {
* @throws {Error} Invalid parameters
*/
function extract (options) {
const suffix = this.options.width === -1 && this.options.height === -1 ? 'Pre' : 'Post';
const suffix = isResizeExpected(this.options) || isRotationExpected(this.options) ? 'Post' : 'Pre';
if (this.options[`width${suffix}`] !== -1) {
this.options.debuglog('ignoring previous extract options');
}
['left', 'top', 'width', 'height'].forEach(function (name) {
const value = options[name];
if (is.integer(value) && value >= 0) {
@@ -422,32 +441,90 @@ function extract (options) {
}
}, this);
// Ensure existing rotation occurs before pre-resize extraction
if (suffix === 'Pre' && isRotationExpected(this.options)) {
this.options.rotateBeforePreExtract = true;
if (isRotationExpected(this.options) && !isResizeExpected(this.options)) {
if (this.options.widthPre === -1 || this.options.widthPost === -1) {
this.options.rotateBeforePreExtract = true;
}
}
return this;
}
/**
* Trim "boring" pixels from all edges that contain values similar to the top-left pixel.
* Images consisting entirely of a single colour will calculate "boring" using the alpha channel, if any.
* Trim pixels from all edges that contain values similar to the given background colour, which defaults to that of the top-left pixel.
*
* Images with an alpha channel will use the combined bounding box of alpha and non-alpha channels.
*
* If the result of this operation would trim an image to nothing then no change is made.
*
* The `info` response Object, obtained from callback of `.toFile()` or `.toBuffer()`,
* will contain `trimOffsetLeft` and `trimOffsetTop` properties.
*
* @param {number} [threshold=10] the allowed difference from the top-left pixel, a number greater than zero.
* @example
* // Trim pixels with a colour similar to that of the top-left pixel.
* sharp(input)
* .trim()
* .toFile(output, function(err, info) {
* ...
* });
* @example
* // Trim pixels with the exact same colour as that of the top-left pixel.
* sharp(input)
* .trim(0)
* .toFile(output, function(err, info) {
* ...
* });
* @example
* // Trim only pixels with a similar colour to red.
* sharp(input)
* .trim("#FF0000")
* .toFile(output, function(err, info) {
* ...
* });
* @example
* // Trim all "yellow-ish" pixels, being more lenient with the higher threshold.
* sharp(input)
* .trim({
* background: "yellow",
* threshold: 42,
* })
* .toFile(output, function(err, info) {
* ...
* });
*
* @param {string|number|Object} trim - the specific background colour to trim, the threshold for doing so or an Object with both.
* @param {string|Object} [trim.background='top-left pixel'] - background colour, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to that of the top-left pixel.
* @param {number} [trim.threshold=10] - the allowed difference from the above colour, a positive number.
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function trim (threshold) {
if (!is.defined(threshold)) {
function trim (trim) {
if (!is.defined(trim)) {
this.options.trimThreshold = 10;
} else if (is.number(threshold) && threshold > 0) {
this.options.trimThreshold = threshold;
} else if (is.string(trim)) {
this._setBackgroundColourOption('trimBackground', trim);
this.options.trimThreshold = 10;
} else if (is.number(trim)) {
if (trim >= 0) {
this.options.trimThreshold = trim;
} else {
throw is.invalidParameterError('threshold', 'positive number', trim);
}
} else if (is.object(trim)) {
this._setBackgroundColourOption('trimBackground', trim.background);
if (!is.defined(trim.threshold)) {
this.options.trimThreshold = 10;
} else if (is.number(trim.threshold)) {
if (trim.threshold >= 0) {
this.options.trimThreshold = trim.threshold;
} else {
throw is.invalidParameterError('threshold', 'positive number', trim);
}
}
} else {
throw is.invalidParameterError('threshold', 'number greater than zero', threshold);
throw is.invalidParameterError('trim', 'string, number or object', trim);
}
if (this.options.trimThreshold && isRotationExpected(this.options)) {
if (isRotationExpected(this.options)) {
this.options.rotateBeforePreExtract = true;
}
return this;

View File

@@ -12,6 +12,9 @@ try {
help.push('- Update Homebrew: "brew update && brew upgrade vips"');
} else {
const [platform, arch] = platformAndArch.split('-');
if (platform === 'linux' && /Module did not self-register/.test(err.message)) {
help.push('- Using worker threads? See https://sharp.pixelplumbing.com/install#worker-threads');
}
help.push(
'- Install with verbose logging and look for errors: "npm install --ignore-scripts=false --foreground-scripts --verbose sharp"',
`- Install for the current ${platformAndArch} runtime: "npm install --platform=${platform} --arch=${arch} sharp"`

View File

@@ -17,6 +17,10 @@ const sharp = require('./sharp');
* @returns {Object}
*/
const format = sharp.format();
format.heif.output.alias = ['avif', 'heic'];
format.jpeg.output.alias = ['jpe', 'jpg'];
format.tiff.output.alias = ['tif'];
format.jp2k.output.alias = ['j2c', 'j2k', 'jp2', 'jpx'];
/**
* An Object containing the available interpolators and their proper values

View File

@@ -1,7 +1,7 @@
{
"name": "sharp",
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP, GIF, AVIF and TIFF images",
"version": "0.30.6",
"version": "0.31.0",
"author": "Lovell Fuller <npm@lovell.info>",
"homepage": "https://github.com/lovell/sharp",
"contributors": [
@@ -83,7 +83,9 @@
"Chris Banks <christopher.bradley.banks@gmail.com>",
"Ompal Singh <ompal.hitm09@gmail.com>",
"Brodan <christopher.hranj@gmail.com",
"Ankur Parihar <ankur.github@gmail.com>"
"Ankur Parihar <ankur.github@gmail.com>",
"Brahim Ait elhaj <brahima@gmail.com>",
"Mart Jansink <m.jansink@gmail.com>"
],
"scripts": {
"install": "(node install/libvips && node install/dll-copy && prebuild-install) || (node install/can-compile && node-gyp rebuild && node install/dll-copy)",
@@ -131,55 +133,55 @@
"color": "^4.2.3",
"detect-libc": "^2.0.1",
"node-addon-api": "^5.0.0",
"prebuild-install": "^7.1.0",
"prebuild-install": "^7.1.1",
"semver": "^7.3.7",
"simple-get": "^4.0.1",
"tar-fs": "^2.1.1",
"tunnel-agent": "^0.6.0"
},
"devDependencies": {
"async": "^3.2.3",
"async": "^3.2.4",
"cc": "^3.0.1",
"decompress-zip": "^0.3.3",
"documentation": "^13.2.5",
"documentation": "^14.0.0",
"exif-reader": "^1.0.3",
"extract-zip": "^2.0.1",
"icc": "^2.0.0",
"license-checker": "^25.0.1",
"mocha": "^10.0.0",
"mock-fs": "^5.1.2",
"mock-fs": "^5.1.4",
"nyc": "^15.1.0",
"prebuild": "^11.0.3",
"prebuild": "^11.0.4",
"rimraf": "^3.0.2",
"semistandard": "^16.0.1"
},
"license": "Apache-2.0",
"config": {
"libvips": "8.12.2",
"libvips": "8.13.1",
"integrity": {
"darwin-arm64v8": "sha512-p46s/bbJAjkOXzPISZt9HUpG9GWjwQkYnLLRLKzsBJHLtB3X6C6Y/zXI5Hd0DOojcFkks9a0kTN+uDQ/XJY19g==",
"darwin-x64": "sha512-6vOHVZnvXwe6EXRsy29jdkUzBE6ElNpXUwd+m8vV7qy32AnXu7B9YemHsZ44vWviIwPZeXF6Nhd9EFLM0wWohw==",
"linux-arm64v8": "sha512-XwZdS63yhqLtbFtx/0eoLF/Agf5qtTrI11FMnMRpuBJWd4jHB60RAH+uzYUgoChCmKIS+AeXYMLm4d8Ns2QX8w==",
"linux-armv6": "sha512-Rh0Q0kqwPG2MjXWOkPCuPEyiUKFgKJYWLgS835D4MrXgdKr8Tft/eVrc2iGIxt2re30VpDiZ1h0Rby1aCZt2zw==",
"linux-armv7": "sha512-heTS/MsmRvu4JljINxP+vDiS9ZZfuGhr3IStb5F7Gc0/QLRhllYAg4rcO8L1eTK9sIIzG5ARvI19+YUZe7WbzA==",
"linux-x64": "sha512-SSWAwBFi0hx8V/h/v82tTFGKWTFv9FiCK3Timz5OExuI+sX1Ngrd0PVQaWXOThGNdel/fcD3Bz9YjSt4feBR1g==",
"linuxmusl-arm64v8": "sha512-Rhks+5C7p7aO6AucLT1uvzo8ohlqcqCUPgZmN+LZjsPWob/Iix3MfiDYtv/+gTvdeEfXxbCU6/YuPBwHQ7/crA==",
"linuxmusl-x64": "sha512-IOyjSQqpWVntqOUpCHVWuQwACwmmjdi15H8Pc+Ma1JkhPogTfVsFQWyL7DuOTD3Yr23EuYGzovUX00duOtfy/g==",
"win32-arm64v8": "sha512-A+Qe8Ipewtvw9ldvF6nWed2J8kphzrUE04nFeKCtNx6pfGQ/MAlCKMjt/U8VgUKNjB01zJDUW9XE0+FhGZ/UpQ==",
"win32-ia32": "sha512-cMrAvwFdDeAVnLJt0IPMPRKaIFhyXYGTprsM0DND9VUHE8F7dJMR44tS5YkXsGh1QNDtjKT6YuxAVUglmiXtpA==",
"win32-x64": "sha512-vLFIfw6aW2zABa8jpgzWDhljnE6glktrddErVyazAIoHl6BFFe/Da+LK1DbXvIYHz7fyOoKhSfCJHCiJG1Vg6w=="
"darwin-arm64v8": "sha512-JdpGTx67RDbvRkg3ljFvTzqoq+oBXmMdDFEp0expDYXmP5HLH+GCkikmsROlGltgfKE2KqL/qwpxTEhIwMK/3A==",
"darwin-x64": "sha512-0Oh4/hEDnzV+X8MiiyUQ4G/Zh/MHw9rKstfuX0P1czgaxS2hX8Pxdbzdk1oqwTOEYVEGO/hMm9ItCVZ3RVPPaA==",
"linux-arm64v8": "sha512-9pSlPzEojt6ue5vXfASNMhQO1YS1p4i4Wydu+bzOfMtIPSBRXbu/+y8WELbbo03Ts7pftm9KtrMHitCVdy5EXw==",
"linux-armv6": "sha512-sv2FqS/ggpQly7h5/+nh8txQDulolE5ptaE90PO7iwfTont8N42pudeqootWKsuf0fRmkW4M92184VfVVYCvGw==",
"linux-armv7": "sha512-LmQIB8FDfasK6BsFhnE7ZI3LMlxh/rF5tZRNQ/uoTbF2xrtWQqqgiZgCifJByiEM+1tR7RxwNdnjxZhWvM9WmQ==",
"linux-x64": "sha512-JBRf8WBnlVw/K1jpSvmeZpnGZGjeqhG2NDEiQV/hUze3zgDGwDza4oiworaQExQmKcDrc2LJKF14Nsz1qQSNJw==",
"linuxmusl-arm64v8": "sha512-yzUQO5isDwsRpEUxbMXBeWp0sKhWghebrSK46SUF5mvB/kq6hZ7JbRuJ2aZjE84K/HUTyuCc0kE+M3m8naOs+g==",
"linuxmusl-x64": "sha512-H3Vz1QaaZ6X5iEbfPST7TPFwDO01tI8dk1osLm6l4a17BWCaOMaBQlqxgTgYrtd09JJ9CvGoq5fo5j5TPxUc4Q==",
"win32-arm64v8": "sha512-b5Ver+uwOJhdOGqvZVM+qF2KLKcowcac/wKK5Fg0czqlSMqP/KxDF2kxw2eKXUJNgfqe4eDH1QG/yTg2pQSetQ==",
"win32-ia32": "sha512-h/SJ/Yfn0ce9H70vt1wS8rZ4PfHnguCCTsOGik7e6O/e2AlBQOM0mKsPIB9jSOquoCP8rP0qF6AOPOjXKnCk+w==",
"win32-x64": "sha512-p9qpdWdhZooPteib92Kk+qF1vvzcScxvOwdIP8muhgo/A8uDI4/mqXCpEbMBw6vjETKlS3qo2JUbVF6+0/lyWQ=="
},
"runtime": "napi",
"target": 5
"target": 7
},
"engines": {
"node": ">=12.13.0"
"node": ">=14.15.0"
},
"funding": {
"url": "https://opencollective.com/libvips"
},
"binary": {
"napi_versions": [
5
7
]
},
"semistandard": {

View File

@@ -88,18 +88,14 @@ namespace sharp {
descriptor->buffer = buffer.Data();
descriptor->isBuffer = TRUE;
}
descriptor->failOn = static_cast<VipsFailOn>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FAIL_ON,
AttrAsStr(input, "failOn").data()));
descriptor->failOn = AttrAsEnum<VipsFailOn>(input, "failOn", VIPS_TYPE_FAIL_ON);
// Density for vector-based input
if (HasAttr(input, "density")) {
descriptor->density = AttrAsDouble(input, "density");
}
// Raw pixel input
if (HasAttr(input, "rawChannels")) {
descriptor->rawDepth = static_cast<VipsBandFormat>(
vips_enum_from_nick(nullptr, VIPS_TYPE_BAND_FORMAT,
AttrAsStr(input, "rawDepth").data()));
descriptor->rawDepth = AttrAsEnum<VipsBandFormat>(input, "rawDepth", VIPS_TYPE_BAND_FORMAT);
descriptor->rawChannels = AttrAsUint32(input, "rawChannels");
descriptor->rawWidth = AttrAsUint32(input, "rawWidth");
descriptor->rawHeight = AttrAsUint32(input, "rawHeight");
@@ -133,11 +129,42 @@ namespace sharp {
descriptor->createBackground = AttrAsVectorOfDouble(input, "createBackground");
}
}
// Create new image with text
if (HasAttr(input, "textValue")) {
descriptor->textValue = AttrAsStr(input, "textValue");
if (HasAttr(input, "textFont")) {
descriptor->textFont = AttrAsStr(input, "textFont");
}
if (HasAttr(input, "textFontfile")) {
descriptor->textFontfile = AttrAsStr(input, "textFontfile");
}
if (HasAttr(input, "textWidth")) {
descriptor->textWidth = AttrAsUint32(input, "textWidth");
}
if (HasAttr(input, "textHeight")) {
descriptor->textHeight = AttrAsUint32(input, "textHeight");
}
if (HasAttr(input, "textAlign")) {
descriptor->textAlign = AttrAsEnum<VipsAlign>(input, "textAlign", VIPS_TYPE_ALIGN);
}
if (HasAttr(input, "textJustify")) {
descriptor->textJustify = AttrAsBool(input, "textJustify");
}
if (HasAttr(input, "textDpi")) {
descriptor->textDpi = AttrAsUint32(input, "textDpi");
}
if (HasAttr(input, "textRgba")) {
descriptor->textRgba = AttrAsBool(input, "textRgba");
}
if (HasAttr(input, "textSpacing")) {
descriptor->textSpacing = AttrAsUint32(input, "textSpacing");
}
}
// Limit input images to a given number of pixels, where pixels = width * height
descriptor->limitInputPixels = static_cast<uint64_t>(AttrAsInt64(input, "limitInputPixels"));
// Allow switch from random to sequential access
descriptor->access = AttrAsBool(input, "sequentialRead") ? VIPS_ACCESS_SEQUENTIAL : VIPS_ACCESS_RANDOM;
// Remove safety features and allow unlimited SVG/PNG input
// Remove safety features and allow unlimited input
descriptor->unlimited = AttrAsBool(input, "unlimited");
return descriptor;
}
@@ -250,9 +277,9 @@ namespace sharp {
{ "VipsForeignLoadMagickBuffer", ImageType::MAGICK },
{ "VipsForeignLoadMagick7File", ImageType::MAGICK },
{ "VipsForeignLoadMagick7Buffer", ImageType::MAGICK },
{ "VipsForeignLoadOpenslide", ImageType::OPENSLIDE },
{ "VipsForeignLoadOpenslideFile", ImageType::OPENSLIDE },
{ "VipsForeignLoadPpmFile", ImageType::PPM },
{ "VipsForeignLoadFits", ImageType::FITS },
{ "VipsForeignLoadFitsFile", ImageType::FITS },
{ "VipsForeignLoadOpenexr", ImageType::EXR },
{ "VipsForeignLoadVips", ImageType::VIPS },
{ "VipsForeignLoadVipsFile", ImageType::VIPS },
@@ -307,6 +334,17 @@ namespace sharp {
imageType == ImageType::PDF;
}
/*
Does this image type support removal of safety limits?
*/
bool ImageTypeSupportsUnlimited(ImageType imageType) {
return
imageType == ImageType::JPEG ||
imageType == ImageType::PNG ||
imageType == ImageType::SVG ||
imageType == ImageType::HEIF;
}
/*
Open an image from the given InputDescriptor (filesystem, compressed buffer, raw pixel data)
*/
@@ -335,7 +373,7 @@ namespace sharp {
vips::VOption *option = VImage::option()
->set("access", descriptor->access)
->set("fail_on", descriptor->failOn);
if (descriptor->unlimited && (imageType == ImageType::SVG || imageType == ImageType::PNG)) {
if (descriptor->unlimited && ImageTypeSupportsUnlimited(imageType)) {
option->set("unlimited", TRUE);
}
if (imageType == ImageType::SVG || imageType == ImageType::PDF) {
@@ -366,34 +404,63 @@ namespace sharp {
}
}
} else {
if (descriptor->createChannels > 0) {
int const channels = descriptor->createChannels;
if (channels > 0) {
// Create new image
if (descriptor->createNoiseType == "gaussian") {
int const channels = descriptor->createChannels;
image = VImage::new_matrix(descriptor->createWidth, descriptor->createHeight);
std::vector<VImage> bands = {};
bands.reserve(channels);
for (int _band = 0; _band < channels; _band++) {
bands.push_back(image.gaussnoise(
descriptor->createWidth,
descriptor->createHeight,
VImage::option()->set("mean", descriptor->createNoiseMean)->set("sigma", descriptor->createNoiseSigma)));
bands.push_back(VImage::gaussnoise(descriptor->createWidth, descriptor->createHeight, VImage::option()
->set("mean", descriptor->createNoiseMean)
->set("sigma", descriptor->createNoiseSigma)));
}
image = image.bandjoin(bands);
image = VImage::bandjoin(bands).copy(VImage::option()->set("interpretation",
channels < 3 ? VIPS_INTERPRETATION_B_W: VIPS_INTERPRETATION_sRGB));
} else {
std::vector<double> background = {
descriptor->createBackground[0],
descriptor->createBackground[1],
descriptor->createBackground[2]
};
if (descriptor->createChannels == 4) {
if (channels == 4) {
background.push_back(descriptor->createBackground[3]);
}
image = VImage::new_matrix(descriptor->createWidth, descriptor->createHeight).new_from_image(background);
image = VImage::new_matrix(descriptor->createWidth, descriptor->createHeight)
.copy(VImage::option()->set("interpretation",
channels < 3 ? VIPS_INTERPRETATION_B_W : VIPS_INTERPRETATION_sRGB))
.new_from_image(background);
}
image.get_image()->Type = image.guess_interpretation();
image = image.cast(VIPS_FORMAT_UCHAR);
imageType = ImageType::RAW;
} else if (descriptor->textValue.length() > 0) {
// Create a new image with text
vips::VOption *textOptions = VImage::option()
->set("align", descriptor->textAlign)
->set("justify", descriptor->textJustify)
->set("rgba", descriptor->textRgba)
->set("spacing", descriptor->textSpacing)
->set("autofit_dpi", &descriptor->textAutofitDpi);
if (descriptor->textWidth > 0) {
textOptions->set("width", descriptor->textWidth);
}
// Ignore dpi if height is set
if (descriptor->textWidth > 0 && descriptor->textHeight > 0) {
textOptions->set("height", descriptor->textHeight);
} else if (descriptor->textDpi > 0) {
textOptions->set("dpi", descriptor->textDpi);
}
if (descriptor->textFont.length() > 0) {
textOptions->set("font", const_cast<char*>(descriptor->textFont.data()));
}
if (descriptor->textFontfile.length() > 0) {
textOptions->set("fontfile", const_cast<char*>(descriptor->textFontfile.data()));
}
image = VImage::text(const_cast<char *>(descriptor->textValue.data()), textOptions);
if (!descriptor->textRgba) {
image = image.copy(VImage::option()->set("interpretation", VIPS_INTERPRETATION_B_W));
}
imageType = ImageType::RAW;
} else {
// From filesystem
imageType = DetermineImageType(descriptor->file.data());
@@ -409,7 +476,7 @@ namespace sharp {
vips::VOption *option = VImage::option()
->set("access", descriptor->access)
->set("fail_on", descriptor->failOn);
if (descriptor->unlimited && (imageType == ImageType::SVG || imageType == ImageType::PNG)) {
if (descriptor->unlimited && ImageTypeSupportsUnlimited(imageType)) {
option->set("unlimited", TRUE);
}
if (imageType == ImageType::SVG || imageType == ImageType::PDF) {
@@ -637,7 +704,6 @@ namespace sharp {
Event listener for progress updates, used to detect timeout
*/
void VipsProgressCallBack(VipsImage *im, VipsProgress *progress, int *timeout) {
// printf("VipsProgressCallBack progress=%d run=%d timeout=%d\n", progress->percent, progress->run, *timeout);
if (*timeout > 0 && progress->run >= *timeout) {
vips_image_set_kill(im, TRUE);
vips_error("timeout", "%d%% complete", progress->percent);
@@ -794,22 +860,6 @@ namespace sharp {
return Is16Bit(interpretation) ? 65535.0 : 255.0;
}
/*
Get boolean operation type from string
*/
VipsOperationBoolean GetBooleanOperation(std::string const opStr) {
return static_cast<VipsOperationBoolean>(
vips_enum_from_nick(nullptr, VIPS_TYPE_OPERATION_BOOLEAN, opStr.data()));
}
/*
Get interpretation type from string
*/
VipsInterpretation GetInterpretation(std::string const typeStr) {
return static_cast<VipsInterpretation>(
vips_enum_from_nick(nullptr, VIPS_TYPE_INTERPRETATION, typeStr.data()));
}
/*
Convert RGBA value to another colourspace
*/
@@ -890,7 +940,7 @@ namespace sharp {
std::pair<double, double> ResolveShrink(int width, int height, int targetWidth, int targetHeight,
Canvas canvas, bool swap, bool withoutEnlargement, bool withoutReduction) {
if (swap) {
if (swap && canvas != Canvas::IGNORE_ASPECT) {
// Swap input width and height when requested.
std::swap(width, height);
}
@@ -921,9 +971,6 @@ namespace sharp {
}
break;
case Canvas::IGNORE_ASPECT:
if (swap) {
std::swap(hshrink, vshrink);
}
break;
}
} else if (targetWidth > 0) {

View File

@@ -25,9 +25,9 @@
// Verify platform and compiler compatibility
#if (VIPS_MAJOR_VERSION < 8) || \
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 12) || \
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION == 12 && VIPS_MICRO_VERSION < 2)
#error "libvips version 8.12.2+ is required - please see https://sharp.pixelplumbing.com/install"
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 13) || \
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION == 13 && VIPS_MICRO_VERSION < 1)
#error "libvips version 8.13.1+ is required - please see https://sharp.pixelplumbing.com/install"
#endif
#if ((!defined(__clang__)) && defined(__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 6)))
@@ -71,6 +71,17 @@ namespace sharp {
std::string createNoiseType;
double createNoiseMean;
double createNoiseSigma;
std::string textValue;
std::string textFont;
std::string textFontfile;
int textWidth;
int textHeight;
VipsAlign textAlign;
bool textJustify;
int textDpi;
bool textRgba;
int textSpacing;
int textAutofitDpi;
InputDescriptor():
buffer(nullptr),
@@ -95,7 +106,15 @@ namespace sharp {
createHeight(0),
createBackground{ 0.0, 0.0, 0.0, 255.0 },
createNoiseMean(0.0),
createNoiseSigma(0.0) {}
createNoiseSigma(0.0),
textWidth(0),
textHeight(0),
textAlign(VIPS_ALIGN_LOW),
textJustify(FALSE),
textDpi(72),
textRgba(FALSE),
textSpacing(0),
textAutofitDpi(0) {}
};
// Convenience methods to access the attributes of a Napi::Object
@@ -110,6 +129,10 @@ namespace sharp {
bool AttrAsBool(Napi::Object obj, std::string attr);
std::vector<double> AttrAsVectorOfDouble(Napi::Object obj, std::string attr);
std::vector<int32_t> AttrAsInt32Vector(Napi::Object obj, std::string attr);
template <class T> T AttrAsEnum(Napi::Object obj, std::string attr, GType type) {
return static_cast<T>(
vips_enum_from_nick(nullptr, type, AttrAsStr(obj, attr).data()));
}
// Create an InputDescriptor instance from a Napi::Object describing an input image
InputDescriptor* CreateInputDescriptor(Napi::Object input);
@@ -183,6 +206,11 @@ namespace sharp {
*/
bool ImageTypeSupportsPage(ImageType imageType);
/*
Does this image type support removal of safety limits?
*/
bool ImageTypeSupportsUnlimited(ImageType imageType);
/*
Open an image from the given InputDescriptor (filesystem, compressed buffer, raw pixel data)
*/
@@ -307,16 +335,6 @@ namespace sharp {
*/
double MaximumImageAlpha(VipsInterpretation const interpretation);
/*
Get boolean operation type from string
*/
VipsOperationBoolean GetBooleanOperation(std::string const opStr);
/*
Get interpretation type from string
*/
VipsInterpretation GetInterpretation(std::string const typeStr);
/*
Convert RGBA value to another colourspace
*/

View File

@@ -31,7 +31,6 @@
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif /*HAVE_CONFIG_H*/
#include <vips/intl.h>
#include <vips/vips8>

View File

@@ -30,7 +30,6 @@
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif /*HAVE_CONFIG_H*/
#include <vips/intl.h>
#include <vips/vips8>

View File

@@ -38,7 +38,6 @@
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif /*HAVE_CONFIG_H*/
#include <vips/intl.h>
#include <vips/vips8>
@@ -733,7 +732,7 @@ VImage::write_to_buffer( const char *suffix, void **buf, size_t *size,
set( "in", *this )->
set( "target", target ) );
g_object_get( target.get_target(), "blob", &blob, NULL );
g_object_get( target.get_target(), "blob", &blob, (void *) NULL );
}
else if( (operation_name = vips_foreign_find_save_buffer( filename )) ) {
call_option_string( operation_name, option_string,
@@ -778,6 +777,32 @@ VImage::write_to_target( const char *suffix, VTarget target,
set( "target", target ) );
}
VRegion
VImage::region() const
{
return VRegion::new_from_image( *this );
}
VRegion
VImage::region( VipsRect *rect ) const
{
VRegion region = VRegion::new_from_image( *this );
region.prepare( rect );
return region;
}
VRegion
VImage::region( int left, int top, int width, int height ) const
{
VRegion region = VRegion::new_from_image( *this );
region.prepare( left, top, width, height );
return region;
}
#include "vips-operators.cpp"
std::vector<VImage>

View File

@@ -31,7 +31,6 @@
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif /*HAVE_CONFIG_H*/
#include <vips/intl.h>
#include <vips/vips8>

View File

@@ -0,0 +1,27 @@
// Object part of VRegion class
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif /*HAVE_CONFIG_H*/
#include <vips/vips8>
#include <vips/debug.h>
VIPS_NAMESPACE_START
VRegion
VRegion::new_from_image( VImage image )
{
VipsRegion *region;
if( !(region = vips_region_new( image.get_image() )) ) {
throw VError();
}
VRegion out( region );
return( out );
}
VIPS_NAMESPACE_END

View File

@@ -1,5 +1,4 @@
// bodies for vips operations
// Mon Nov 1 03:31:09 PM CET 2021
// this file is generated automatically, do not edit!
VImage VImage::CMC2LCh( VOption *options ) const
@@ -943,6 +942,14 @@ VipsBlob *VImage::dzsave_buffer( VOption *options ) const
return( buffer );
}
void VImage::dzsave_target( VTarget target, VOption *options ) const
{
call( "dzsave_target",
(options ? options : VImage::option())->
set( "in", *this )->
set( "target", target ) );
}
VImage VImage::embed( int x, int y, int width, int height, VOption *options ) const
{
VImage out;
@@ -3521,6 +3528,14 @@ VipsBlob *VImage::tiffsave_buffer( VOption *options ) const
return( buffer );
}
void VImage::tiffsave_target( VTarget target, VOption *options ) const
{
call( "tiffsave_target",
(options ? options : VImage::option())->
set( "in", *this )->
set( "target", target ) );
}
VImage VImage::tilecache( VOption *options ) const
{
VImage out;

View File

@@ -68,10 +68,9 @@ namespace sharp {
// Extract luminance
VImage luminance = lab[0];
// Find luminance range
VImage stats = luminance.stats();
double min = stats(0, 0)[0];
double max = stats(1, 0)[0];
if (min != max) {
int const min = luminance.percent(1);
int const max = luminance.percent(99);
if (std::abs(max - min) > 1) {
// Extract chroma
VImage chroma = lab.extract_band(1, VImage::option()->set("n", 2));
// Calculate multiplication factor and addition
@@ -112,6 +111,19 @@ namespace sharp {
}
}
/*
* Flatten image to remove alpha channel
*/
VImage Flatten(VImage image, std::vector<double> flattenBackground) {
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
std::vector<double> background {
flattenBackground[0] * multiplier,
flattenBackground[1] * multiplier,
flattenBackground[2] * multiplier
};
return image.flatten(VImage::option()->set("background", background));
}
/**
* Produce the "negative" of the image.
*/
@@ -262,42 +274,74 @@ namespace sharp {
/*
Trim an image
*/
VImage Trim(VImage image, double const threshold) {
VImage Trim(VImage image, std::vector<double> background, double threshold) {
if (image.width() < 3 && image.height() < 3) {
throw VError("Image to trim must be at least 3x3 pixels");
}
// Top-left pixel provides the background colour
VImage background = image.extract_area(0, 0, 1, 1);
if (HasAlpha(background)) {
background = background.flatten();
// Scale up 8-bit values to match 16-bit input image
double multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
threshold *= multiplier;
std::vector<double> backgroundAlpha(1);
if (background.size() == 0) {
// Top-left pixel provides the default background colour if none is given
background = image.extract_area(0, 0, 1, 1)(0, 0);
multiplier = 1.0;
}
if (background.size() == 4) {
// Just discard the alpha because flattening the background colour with
// itself (effectively what find_trim() does) gives the same result
backgroundAlpha[0] = background[3] * multiplier;
}
background = {
background[0] * multiplier,
background[1] * multiplier,
background[2] * multiplier
};
int left, top, width, height;
left = image.find_trim(&top, &width, &height, VImage::option()
->set("background", background(0, 0))
->set("background", background)
->set("threshold", threshold));
if (width == 0 || height == 0) {
if (HasAlpha(image)) {
// Search alpha channel
VImage alpha = image[image.bands() - 1];
VImage backgroundAlpha = alpha.extract_area(0, 0, 1, 1);
left = alpha.find_trim(&top, &width, &height, VImage::option()
->set("background", backgroundAlpha(0, 0))
->set("threshold", threshold));
}
if (width == 0 || height == 0) {
throw VError("Unexpected error while trimming. Try to lower the tolerance");
if (HasAlpha(image)) {
// Search alpha channel (A)
int leftA, topA, widthA, heightA;
VImage alpha = image[image.bands() - 1];
leftA = alpha.find_trim(&topA, &widthA, &heightA, VImage::option()
->set("background", backgroundAlpha)
->set("threshold", threshold));
if (widthA > 0 && heightA > 0) {
if (width > 0 && height > 0) {
// Combined bounding box (B)
int const leftB = std::min(left, leftA);
int const topB = std::min(top, topA);
int const widthB = std::max(left + width, leftA + widthA) - leftB;
int const heightB = std::max(top + height, topA + heightA) - topB;
return image.extract_area(leftB, topB, widthB, heightB);
} else {
// Use alpha only
return image.extract_area(leftA, topA, widthA, heightA);
}
}
}
return image.extract_area(left, top, width, height);
if (width > 0 && height > 0) {
return image.extract_area(left, top, width, height);
}
return image;
}
/*
* Calculate (a * in + b)
*/
VImage Linear(VImage image, double const a, double const b) {
if (HasAlpha(image)) {
VImage Linear(VImage image, std::vector<double> const a, std::vector<double> const b) {
size_t const bands = static_cast<size_t>(image.bands());
if (a.size() > bands) {
throw VError("Band expansion using linear is unsupported");
}
if (HasAlpha(image) && a.size() != bands && (a.size() == 1 || a.size() == bands - 1 || bands - 1 == 1)) {
// Separate alpha channel
VImage alpha = image[image.bands() - 1];
VImage alpha = image[bands - 1];
return RemoveAlpha(image).linear(a, b).bandjoin(alpha);
} else {
return image.linear(a, b);

View File

@@ -45,6 +45,11 @@ namespace sharp {
*/
VImage Gamma(VImage image, double const exponent);
/*
* Flatten image to remove alpha channel
*/
VImage Flatten(VImage image, std::vector<double> flattenBackground);
/*
* Produce the "negative" of the image.
*/
@@ -85,12 +90,12 @@ namespace sharp {
/*
Trim an image
*/
VImage Trim(VImage image, double const threshold);
VImage Trim(VImage image, std::vector<double> background, double const threshold);
/*
* Linear adjustment (a * in + b)
*/
VImage Linear(VImage image, double const a, double const b);
VImage Linear(VImage image, std::vector<double> const a, std::vector<double> const b);
/*
* Recomb with a Matrix of the given bands/channel size.

View File

@@ -118,7 +118,7 @@ class PipelineWorker : public Napi::AsyncWorker {
// Trim
if (baton->trimThreshold > 0.0) {
MultiPageUnsupported(nPages, "Trim");
image = sharp::Trim(image, baton->trimThreshold);
image = sharp::Trim(image, baton->trimBackground, baton->trimThreshold);
baton->trimOffsetLeft = image.xoffset();
baton->trimOffsetTop = image.yoffset();
}
@@ -188,8 +188,10 @@ class PipelineWorker : public Napi::AsyncWorker {
if (jpegShrinkOnLoad > 1 && static_cast<int>(shrink) == jpegShrinkOnLoad) {
jpegShrinkOnLoad /= 2;
}
} else if (inputImageType == sharp::ImageType::WEBP ||
inputImageType == sharp::ImageType::SVG ||
} else if (inputImageType == sharp::ImageType::WEBP && shrink > 1.0) {
// Avoid upscaling via webp
scale = 1.0 / shrink;
} else if (inputImageType == sharp::ImageType::SVG ||
inputImageType == sharp::ImageType::PDF) {
scale = 1.0 / shrink;
}
@@ -202,6 +204,7 @@ class PipelineWorker : public Napi::AsyncWorker {
vips::VOption *option = VImage::option()
->set("access", baton->input->access)
->set("shrink", jpegShrinkOnLoad)
->set("unlimited", baton->input->unlimited)
->set("fail_on", baton->input->failOn);
if (baton->input->buffer != nullptr) {
// Reload JPEG buffer
@@ -300,8 +303,7 @@ class PipelineWorker : public Napi::AsyncWorker {
if (
sharp::HasProfile(image) &&
image.interpretation() != VIPS_INTERPRETATION_LABS &&
image.interpretation() != VIPS_INTERPRETATION_GREY16 &&
image.interpretation() != VIPS_INTERPRETATION_B_W
image.interpretation() != VIPS_INTERPRETATION_GREY16
) {
// Convert to sRGB/P3 using embedded profile
try {
@@ -320,16 +322,7 @@ class PipelineWorker : public Napi::AsyncWorker {
// Flatten image to remove alpha channel
if (baton->flatten && sharp::HasAlpha(image)) {
// Scale up 8-bit values to match 16-bit input image
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
// Background colour
std::vector<double> background {
baton->flattenBackground[0] * multiplier,
baton->flattenBackground[1] * multiplier,
baton->flattenBackground[2] * multiplier
};
image = image.flatten(VImage::option()
->set("background", background));
image = sharp::Flatten(image, baton->flattenBackground);
}
// Negate the colours in the image
@@ -351,11 +344,7 @@ class PipelineWorker : public Napi::AsyncWorker {
bool const shouldBlur = baton->blurSigma != 0.0;
bool const shouldConv = baton->convKernelWidth * baton->convKernelHeight > 0;
bool const shouldSharpen = baton->sharpenSigma != 0.0;
bool const shouldApplyMedian = baton->medianSize > 0;
bool const shouldComposite = !baton->composite.empty();
bool const shouldModulate = baton->brightness != 1.0 || baton->saturation != 1.0 ||
baton->hue != 0.0 || baton->lightness != 0.0;
bool const shouldApplyClahe = baton->claheWidth != 0 && baton->claheHeight != 0;
if (shouldComposite && !sharp::HasAlpha(image)) {
image = sharp::EnsureAlpha(image, 1);
@@ -364,26 +353,27 @@ class PipelineWorker : public Napi::AsyncWorker {
bool const shouldPremultiplyAlpha = sharp::HasAlpha(image) &&
(shouldResize || shouldBlur || shouldConv || shouldSharpen);
// Premultiply image alpha channel before all transformations to avoid
// dark fringing around bright pixels
// See: http://entropymine.com/imageworsener/resizealpha/
if (shouldPremultiplyAlpha) {
image = image.premultiply();
}
// Resize
if (shouldResize) {
VipsKernel kernel = static_cast<VipsKernel>(
vips_enum_from_nick(nullptr, VIPS_TYPE_KERNEL, baton->kernel.data()));
if (
kernel != VIPS_KERNEL_NEAREST && kernel != VIPS_KERNEL_CUBIC && kernel != VIPS_KERNEL_LANCZOS2 &&
kernel != VIPS_KERNEL_LANCZOS3 && kernel != VIPS_KERNEL_MITCHELL
) {
throw vips::VError("Unknown kernel");
}
image = image.resize(1.0 / hshrink, VImage::option()
->set("vscale", 1.0 / vshrink)
->set("kernel", kernel));
->set("kernel", baton->kernel));
}
// Flip (mirror about Y axis)
if (baton->flip || flip) {
image = image.flip(VIPS_DIRECTION_VERTICAL);
image = sharp::RemoveExifOrientation(image);
}
// Flop (mirror about X axis)
if (baton->flop || flop) {
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
image = sharp::RemoveExifOrientation(image);
}
// Rotate post-extract 90-angle
@@ -400,18 +390,6 @@ class PipelineWorker : public Napi::AsyncWorker {
image = sharp::RemoveExifOrientation(image);
}
// Flip (mirror about Y axis)
if (baton->flip || flip) {
image = image.flip(VIPS_DIRECTION_VERTICAL);
image = sharp::RemoveExifOrientation(image);
}
// Flop (mirror about X axis)
if (baton->flop || flop) {
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
image = sharp::RemoveExifOrientation(image);
}
// Join additional color channels to the image
if (baton->joinChannelIn.size() > 0) {
VImage joinImage;
@@ -443,18 +421,12 @@ class PipelineWorker : public Napi::AsyncWorker {
std::tie(image, background) = sharp::ApplyAlpha(image, baton->resizeBackground, shouldPremultiplyAlpha);
// Embed
// Calculate where to position the embedded image if gravity specified, else center.
int left;
int top;
left = static_cast<int>(round((baton->width - inputWidth) / 2));
top = static_cast<int>(round((baton->height - inputHeight) / 2));
int width = std::max(inputWidth, baton->width);
int height = std::max(inputHeight, baton->height);
std::tie(left, top) = sharp::CalculateEmbedPosition(
inputWidth, inputHeight, baton->width, baton->height, baton->position);
int width = std::max(inputWidth, baton->width);
int height = std::max(inputHeight, baton->height);
image = nPages > 1
? sharp::EmbedMultiPage(image,
@@ -553,7 +525,7 @@ class PipelineWorker : public Napi::AsyncWorker {
VImage::option()->set("extend", VIPS_EXTEND_BACKGROUND)->set("background", background));
}
// Median - must happen before blurring, due to the utility of blurring after thresholding
if (shouldApplyMedian) {
if (baton->medianSize > 0) {
image = image.median(baton->medianSize);
}
// Threshold - must happen before blurring, due to the utility of blurring after thresholding
@@ -579,7 +551,8 @@ class PipelineWorker : public Napi::AsyncWorker {
image = sharp::Recomb(image, baton->recombMatrix);
}
if (shouldModulate) {
// Modulate
if (baton->brightness != 1.0 || baton->saturation != 1.0 || baton->hue != 0.0 || baton->lightness != 0.0) {
image = sharp::Modulate(image, baton->brightness, baton->saturation, baton->hue, baton->lightness);
}
@@ -589,6 +562,18 @@ class PipelineWorker : public Napi::AsyncWorker {
baton->sharpenX1, baton->sharpenY2, baton->sharpenY3);
}
// Reverse premultiplication after all transformations
if (shouldPremultiplyAlpha) {
image = image.unpremultiply();
// Cast pixel values to integer
if (sharp::Is16Bit(image.interpretation())) {
image = image.cast(VIPS_FORMAT_USHORT);
} else {
image = image.cast(VIPS_FORMAT_UCHAR);
}
}
baton->premultiplied = shouldPremultiplyAlpha;
// Composite
if (shouldComposite) {
std::vector<VImage> images = { image };
@@ -607,14 +592,14 @@ class PipelineWorker : public Napi::AsyncWorker {
int across = 0;
int down = 0;
// Use gravity in overlay
if (compositeImage.width() <= baton->width) {
if (compositeImage.width() <= image.width()) {
across = static_cast<int>(ceil(static_cast<double>(image.width()) / compositeImage.width()));
// Ensure odd number of tiles across when gravity is centre, north or south
if (composite->gravity == 0 || composite->gravity == 1 || composite->gravity == 3) {
across |= 1;
}
}
if (compositeImage.height() <= baton->height) {
if (compositeImage.height() <= image.height()) {
down = static_cast<int>(ceil(static_cast<double>(image.height()) / compositeImage.height()));
// Ensure odd number of tiles down when gravity is centre, east or west
if (composite->gravity == 0 || composite->gravity == 2 || composite->gravity == 4) {
@@ -666,28 +651,16 @@ class PipelineWorker : public Napi::AsyncWorker {
xs.push_back(left);
ys.push_back(top);
}
image = image.composite(images, modes, VImage::option()->set("x", xs)->set("y", ys));
image = VImage::composite(images, modes, VImage::option()->set("x", xs)->set("y", ys));
}
// Reverse premultiplication after all transformations:
if (shouldPremultiplyAlpha) {
image = image.unpremultiply();
// Cast pixel values to integer
if (sharp::Is16Bit(image.interpretation())) {
image = image.cast(VIPS_FORMAT_USHORT);
} else {
image = image.cast(VIPS_FORMAT_UCHAR);
}
}
baton->premultiplied = shouldPremultiplyAlpha;
// Gamma decoding (brighten)
if (baton->gammaOut >= 1 && baton->gammaOut <= 3) {
image = sharp::Gamma(image, baton->gammaOut);
}
// Linear adjustment (a * in + b)
if (baton->linearA != 1.0 || baton->linearB != 0.0) {
if (!baton->linearA.empty()) {
image = sharp::Linear(image, baton->linearA, baton->linearB);
}
@@ -697,7 +670,7 @@ class PipelineWorker : public Napi::AsyncWorker {
}
// Apply contrast limiting adaptive histogram equalization (CLAHE)
if (shouldApplyClahe) {
if (baton->claheWidth != 0 && baton->claheHeight != 0) {
image = sharp::Clahe(image, baton->claheWidth, baton->claheHeight, baton->claheMaxSlope);
}
@@ -871,6 +844,8 @@ class PipelineWorker : public Napi::AsyncWorker {
->set("near_lossless", baton->webpNearLossless)
->set("smart_subsample", baton->webpSmartSubsample)
->set("effort", baton->webpEffort)
->set("min_size", baton->webpMinSize)
->set("mixed", baton->webpMixed)
->set("alpha_q", baton->webpAlphaQuality)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
@@ -885,6 +860,7 @@ class PipelineWorker : public Napi::AsyncWorker {
->set("strip", !baton->withMetadata)
->set("bitdepth", baton->gifBitdepth)
->set("effort", baton->gifEffort)
->set("reoptimise", baton->gifReoptimise)
->set("dither", baton->gifDither)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
@@ -937,6 +913,19 @@ class PipelineWorker : public Napi::AsyncWorker {
area->free_fn = nullptr;
vips_area_unref(area);
baton->formatOut = "heif";
} else if (baton->formatOut == "dz") {
// Write DZ to buffer
baton->tileContainer = VIPS_FOREIGN_DZ_CONTAINER_ZIP;
if (!sharp::HasAlpha(image)) {
baton->tileBackground.pop_back();
}
vips::VOption *options = BuildOptionsDZ(baton);
VipsArea *area = reinterpret_cast<VipsArea*>(image.dzsave_buffer(options));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
area->free_fn = nullptr;
vips_area_unref(area);
baton->formatOut = "dz";
} else if (baton->formatOut == "raw" ||
(baton->formatOut == "input" && inputImageType == sharp::ImageType::RAW)) {
// Write raw, uncompressed image data to buffer
@@ -1038,6 +1027,8 @@ class PipelineWorker : public Napi::AsyncWorker {
->set("near_lossless", baton->webpNearLossless)
->set("smart_subsample", baton->webpSmartSubsample)
->set("effort", baton->webpEffort)
->set("min_size", baton->webpMinSize)
->set("mixed", baton->webpMixed)
->set("alpha_q", baton->webpAlphaQuality));
baton->formatOut = "webp";
} else if (baton->formatOut == "gif" || (mightMatchInput && isGif) ||
@@ -1048,6 +1039,7 @@ class PipelineWorker : public Napi::AsyncWorker {
->set("strip", !baton->withMetadata)
->set("bitdepth", baton->gifBitdepth)
->set("effort", baton->gifEffort)
->set("reoptimise", baton->gifReoptimise)
->set("dither", baton->gifDither));
baton->formatOut = "gif";
} else if (baton->formatOut == "tiff" || (mightMatchInput && isTiff) ||
@@ -1089,64 +1081,14 @@ class PipelineWorker : public Napi::AsyncWorker {
->set("lossless", baton->heifLossless));
baton->formatOut = "heif";
} else if (baton->formatOut == "dz" || isDz || isDzZip) {
// Write DZ to file
if (isDzZip) {
baton->tileContainer = VIPS_FOREIGN_DZ_CONTAINER_ZIP;
}
// Forward format options through suffix
std::string suffix;
if (baton->tileFormat == "png") {
std::vector<std::pair<std::string, std::string>> options {
{"interlace", baton->pngProgressive ? "TRUE" : "FALSE"},
{"compression", std::to_string(baton->pngCompressionLevel)},
{"filter", baton->pngAdaptiveFiltering ? "all" : "none"}
};
suffix = AssembleSuffixString(".png", options);
} else if (baton->tileFormat == "webp") {
std::vector<std::pair<std::string, std::string>> options {
{"Q", std::to_string(baton->webpQuality)},
{"alpha_q", std::to_string(baton->webpAlphaQuality)},
{"lossless", baton->webpLossless ? "TRUE" : "FALSE"},
{"near_lossless", baton->webpNearLossless ? "TRUE" : "FALSE"},
{"smart_subsample", baton->webpSmartSubsample ? "TRUE" : "FALSE"},
{"effort", std::to_string(baton->webpEffort)}
};
suffix = AssembleSuffixString(".webp", options);
} else {
std::vector<std::pair<std::string, std::string>> options {
{"Q", std::to_string(baton->jpegQuality)},
{"interlace", baton->jpegProgressive ? "TRUE" : "FALSE"},
{"subsample_mode", baton->jpegChromaSubsampling == "4:4:4" ? "off" : "on"},
{"trellis_quant", baton->jpegTrellisQuantisation ? "TRUE" : "FALSE"},
{"quant_table", std::to_string(baton->jpegQuantisationTable)},
{"overshoot_deringing", baton->jpegOvershootDeringing ? "TRUE": "FALSE"},
{"optimize_scans", baton->jpegOptimiseScans ? "TRUE": "FALSE"},
{"optimize_coding", baton->jpegOptimiseCoding ? "TRUE": "FALSE"}
};
std::string extname = baton->tileLayout == VIPS_FOREIGN_DZ_LAYOUT_DZ ? ".jpeg" : ".jpg";
suffix = AssembleSuffixString(extname, options);
}
// Remove alpha channel from tile background if image does not contain an alpha channel
if (!sharp::HasAlpha(image)) {
baton->tileBackground.pop_back();
}
// Write DZ to file
vips::VOption *options = VImage::option()
->set("strip", !baton->withMetadata)
->set("tile_size", baton->tileSize)
->set("overlap", baton->tileOverlap)
->set("container", baton->tileContainer)
->set("layout", baton->tileLayout)
->set("suffix", const_cast<char*>(suffix.data()))
->set("angle", CalculateAngleRotation(baton->tileAngle))
->set("background", baton->tileBackground)
->set("centre", baton->tileCentre)
->set("id", const_cast<char*>(baton->tileId.data()))
->set("skip_blanks", baton->tileSkipBlanks);
// libvips chooses a default depth based on layout. Instead of replicating that logic here by
// not passing anything - libvips will handle choice
if (baton->tileDepth < VIPS_FOREIGN_DZ_DEPTH_LAST) {
options->set("depth", baton->tileDepth);
}
vips::VOption *options = BuildOptionsDZ(baton);
image.dzsave(const_cast<char*>(baton->fileOut.data()), options);
baton->formatOut = "dz";
} else if (baton->formatOut == "v" || (mightMatchInput && isV) ||
@@ -1215,6 +1157,10 @@ class PipelineWorker : public Napi::AsyncWorker {
info.Set("trimOffsetTop", static_cast<int32_t>(baton->trimOffsetTop));
}
if (baton->input->textAutofitDpi) {
info.Set("textAutofitDpi", static_cast<uint32_t>(baton->input->textAutofitDpi));
}
if (baton->bufferOutLength > 0) {
// Add buffer size to info
info.Set("size", static_cast<uint32_t>(baton->bufferOutLength));
@@ -1303,7 +1249,7 @@ class PipelineWorker : public Napi::AsyncWorker {
/*
Assemble the suffix argument to dzsave, which is the format (by extname)
alongisde comma-separated arguments to the corresponding `formatsave` vips
alongside comma-separated arguments to the corresponding `formatsave` vips
action.
*/
std::string
@@ -1318,6 +1264,67 @@ class PipelineWorker : public Napi::AsyncWorker {
return extname + "[" + argument + "]";
}
/*
Build VOption for dzsave
*/
vips::VOption*
BuildOptionsDZ(PipelineBaton *baton) {
// Forward format options through suffix
std::string suffix;
if (baton->tileFormat == "png") {
std::vector<std::pair<std::string, std::string>> options {
{"interlace", baton->pngProgressive ? "TRUE" : "FALSE"},
{"compression", std::to_string(baton->pngCompressionLevel)},
{"filter", baton->pngAdaptiveFiltering ? "all" : "none"}
};
suffix = AssembleSuffixString(".png", options);
} else if (baton->tileFormat == "webp") {
std::vector<std::pair<std::string, std::string>> options {
{"Q", std::to_string(baton->webpQuality)},
{"alpha_q", std::to_string(baton->webpAlphaQuality)},
{"lossless", baton->webpLossless ? "TRUE" : "FALSE"},
{"near_lossless", baton->webpNearLossless ? "TRUE" : "FALSE"},
{"smart_subsample", baton->webpSmartSubsample ? "TRUE" : "FALSE"},
{"min_size", baton->webpMinSize ? "TRUE" : "FALSE"},
{"mixed", baton->webpMixed ? "TRUE" : "FALSE"},
{"effort", std::to_string(baton->webpEffort)}
};
suffix = AssembleSuffixString(".webp", options);
} else {
std::vector<std::pair<std::string, std::string>> options {
{"Q", std::to_string(baton->jpegQuality)},
{"interlace", baton->jpegProgressive ? "TRUE" : "FALSE"},
{"subsample_mode", baton->jpegChromaSubsampling == "4:4:4" ? "off" : "on"},
{"trellis_quant", baton->jpegTrellisQuantisation ? "TRUE" : "FALSE"},
{"quant_table", std::to_string(baton->jpegQuantisationTable)},
{"overshoot_deringing", baton->jpegOvershootDeringing ? "TRUE": "FALSE"},
{"optimize_scans", baton->jpegOptimiseScans ? "TRUE": "FALSE"},
{"optimize_coding", baton->jpegOptimiseCoding ? "TRUE": "FALSE"}
};
std::string extname = baton->tileLayout == VIPS_FOREIGN_DZ_LAYOUT_DZ ? ".jpeg" : ".jpg";
suffix = AssembleSuffixString(extname, options);
}
vips::VOption *options = VImage::option()
->set("strip", !baton->withMetadata)
->set("tile_size", baton->tileSize)
->set("overlap", baton->tileOverlap)
->set("container", baton->tileContainer)
->set("layout", baton->tileLayout)
->set("suffix", const_cast<char*>(suffix.data()))
->set("angle", CalculateAngleRotation(baton->tileAngle))
->set("background", baton->tileBackground)
->set("centre", baton->tileCentre)
->set("id", const_cast<char*>(baton->tileId.data()))
->set("skip_blanks", baton->tileSkipBlanks);
if (baton->tileDepth < VIPS_FOREIGN_DZ_DEPTH_LAST) {
options->set("depth", baton->tileDepth);
}
if (!baton->tileBasename.empty()) {
options->set("basename", const_cast<char*>(baton->tileBasename.data()));
}
return options;
}
/*
Clear all thread-local data.
*/
@@ -1363,17 +1370,13 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
} else if (canvas == "ignore_aspect") {
baton->canvas = sharp::Canvas::IGNORE_ASPECT;
}
// Tint chroma
baton->tintA = sharp::AttrAsDouble(options, "tintA");
baton->tintB = sharp::AttrAsDouble(options, "tintB");
// Composite
Napi::Array compositeArray = options.Get("composite").As<Napi::Array>();
for (unsigned int i = 0; i < compositeArray.Length(); i++) {
Napi::Object compositeObject = compositeArray.Get(i).As<Napi::Object>();
Composite *composite = new Composite;
composite->input = sharp::CreateInputDescriptor(compositeObject.Get("input").As<Napi::Object>());
composite->mode = static_cast<VipsBlendMode>(
vips_enum_from_nick(nullptr, VIPS_TYPE_BLEND_MODE, sharp::AttrAsStr(compositeObject, "blend").data()));
composite->mode = sharp::AttrAsEnum<VipsBlendMode>(compositeObject, "blend", VIPS_TYPE_BLEND_MODE);
composite->gravity = sharp::AttrAsUint32(compositeObject, "gravity");
composite->left = sharp::AttrAsInt32(compositeObject, "left");
composite->top = sharp::AttrAsInt32(compositeObject, "top");
@@ -1387,7 +1390,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->withoutReduction = sharp::AttrAsBool(options, "withoutReduction");
baton->position = sharp::AttrAsInt32(options, "position");
baton->resizeBackground = sharp::AttrAsVectorOfDouble(options, "resizeBackground");
baton->kernel = sharp::AttrAsStr(options, "kernel");
baton->kernel = sharp::AttrAsEnum<VipsKernel>(options, "kernel", VIPS_TYPE_KERNEL);
baton->fastShrinkOnLoad = sharp::AttrAsBool(options, "fastShrinkOnLoad");
// Join Channel Options
if (options.Has("joinChannelIn")) {
@@ -1416,13 +1419,16 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->sharpenY3 = sharp::AttrAsDouble(options, "sharpenY3");
baton->threshold = sharp::AttrAsInt32(options, "threshold");
baton->thresholdGrayscale = sharp::AttrAsBool(options, "thresholdGrayscale");
baton->trimBackground = sharp::AttrAsVectorOfDouble(options, "trimBackground");
baton->trimThreshold = sharp::AttrAsDouble(options, "trimThreshold");
baton->gamma = sharp::AttrAsDouble(options, "gamma");
baton->gammaOut = sharp::AttrAsDouble(options, "gammaOut");
baton->linearA = sharp::AttrAsDouble(options, "linearA");
baton->linearB = sharp::AttrAsDouble(options, "linearB");
baton->linearA = sharp::AttrAsVectorOfDouble(options, "linearA");
baton->linearB = sharp::AttrAsVectorOfDouble(options, "linearB");
baton->greyscale = sharp::AttrAsBool(options, "greyscale");
baton->normalise = sharp::AttrAsBool(options, "normalise");
baton->tintA = sharp::AttrAsDouble(options, "tintA");
baton->tintB = sharp::AttrAsDouble(options, "tintB");
baton->claheWidth = sharp::AttrAsUint32(options, "claheWidth");
baton->claheHeight = sharp::AttrAsUint32(options, "claheHeight");
baton->claheMaxSlope = sharp::AttrAsUint32(options, "claheMaxSlope");
@@ -1450,10 +1456,10 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->ensureAlpha = sharp::AttrAsDouble(options, "ensureAlpha");
if (options.Has("boolean")) {
baton->boolean = sharp::CreateInputDescriptor(options.Get("boolean").As<Napi::Object>());
baton->booleanOp = sharp::GetBooleanOperation(sharp::AttrAsStr(options, "booleanOp"));
baton->booleanOp = sharp::AttrAsEnum<VipsOperationBoolean>(options, "booleanOp", VIPS_TYPE_OPERATION_BOOLEAN);
}
if (options.Has("bandBoolOp")) {
baton->bandBoolOp = sharp::GetBooleanOperation(sharp::AttrAsStr(options, "bandBoolOp"));
baton->bandBoolOp = sharp::AttrAsEnum<VipsOperationBoolean>(options, "bandBoolOp", VIPS_TYPE_OPERATION_BOOLEAN);
}
if (options.Has("convKernel")) {
Napi::Object kernel = options.Get("convKernel").As<Napi::Object>();
@@ -1475,11 +1481,12 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->recombMatrix[i] = sharp::AttrAsDouble(recombMatrix, i);
}
}
baton->colourspaceInput = sharp::GetInterpretation(sharp::AttrAsStr(options, "colourspaceInput"));
baton->colourspaceInput = sharp::AttrAsEnum<VipsInterpretation>(
options, "colourspaceInput", VIPS_TYPE_INTERPRETATION);
if (baton->colourspaceInput == VIPS_INTERPRETATION_ERROR) {
baton->colourspaceInput = VIPS_INTERPRETATION_LAST;
}
baton->colourspace = sharp::GetInterpretation(sharp::AttrAsStr(options, "colourspace"));
baton->colourspace = sharp::AttrAsEnum<VipsInterpretation>(options, "colourspace", VIPS_TYPE_INTERPRETATION);
if (baton->colourspace == VIPS_INTERPRETATION_ERROR) {
baton->colourspace = VIPS_INTERPRETATION_sRGB;
}
@@ -1494,7 +1501,9 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
Napi::Array mdStrKeys = mdStrs.GetPropertyNames();
for (unsigned int i = 0; i < mdStrKeys.Length(); i++) {
std::string k = sharp::AttrAsStr(mdStrKeys, i);
baton->withMetadataStrs.insert(std::make_pair(k, sharp::AttrAsStr(mdStrs, k)));
if (mdStrs.HasOwnProperty(k)) {
baton->withMetadataStrs.insert(std::make_pair(k, sharp::AttrAsStr(mdStrs, k)));
}
}
baton->timeoutSeconds = sharp::AttrAsUint32(options, "timeoutSeconds");
// Format-specific
@@ -1525,9 +1534,12 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->webpNearLossless = sharp::AttrAsBool(options, "webpNearLossless");
baton->webpSmartSubsample = sharp::AttrAsBool(options, "webpSmartSubsample");
baton->webpEffort = sharp::AttrAsUint32(options, "webpEffort");
baton->webpMinSize = sharp::AttrAsBool(options, "webpMinSize");
baton->webpMixed = sharp::AttrAsBool(options, "webpMixed");
baton->gifBitdepth = sharp::AttrAsUint32(options, "gifBitdepth");
baton->gifEffort = sharp::AttrAsUint32(options, "gifEffort");
baton->gifDither = sharp::AttrAsDouble(options, "gifDither");
baton->gifReoptimise = sharp::AttrAsBool(options, "gifReoptimise");
baton->tiffQuality = sharp::AttrAsUint32(options, "tiffQuality");
baton->tiffPyramid = sharp::AttrAsBool(options, "tiffPyramid");
baton->tiffBitdepth = sharp::AttrAsUint32(options, "tiffBitdepth");
@@ -1539,28 +1551,19 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
if (baton->tiffXres == 1.0 && baton->tiffYres == 1.0 && baton->withMetadataDensity > 0) {
baton->tiffXres = baton->tiffYres = baton->withMetadataDensity / 25.4;
}
// tiff compression options
baton->tiffCompression = static_cast<VipsForeignTiffCompression>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_TIFF_COMPRESSION,
sharp::AttrAsStr(options, "tiffCompression").data()));
baton->tiffPredictor = static_cast<VipsForeignTiffPredictor>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_TIFF_PREDICTOR,
sharp::AttrAsStr(options, "tiffPredictor").data()));
baton->tiffResolutionUnit = static_cast<VipsForeignTiffResunit>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_TIFF_RESUNIT,
sharp::AttrAsStr(options, "tiffResolutionUnit").data()));
baton->tiffCompression = sharp::AttrAsEnum<VipsForeignTiffCompression>(
options, "tiffCompression", VIPS_TYPE_FOREIGN_TIFF_COMPRESSION);
baton->tiffPredictor = sharp::AttrAsEnum<VipsForeignTiffPredictor>(
options, "tiffPredictor", VIPS_TYPE_FOREIGN_TIFF_PREDICTOR);
baton->tiffResolutionUnit = sharp::AttrAsEnum<VipsForeignTiffResunit>(
options, "tiffResolutionUnit", VIPS_TYPE_FOREIGN_TIFF_RESUNIT);
baton->heifQuality = sharp::AttrAsUint32(options, "heifQuality");
baton->heifLossless = sharp::AttrAsBool(options, "heifLossless");
baton->heifCompression = static_cast<VipsForeignHeifCompression>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_HEIF_COMPRESSION,
sharp::AttrAsStr(options, "heifCompression").data()));
baton->heifCompression = sharp::AttrAsEnum<VipsForeignHeifCompression>(
options, "heifCompression", VIPS_TYPE_FOREIGN_HEIF_COMPRESSION);
baton->heifEffort = sharp::AttrAsUint32(options, "heifEffort");
baton->heifChromaSubsampling = sharp::AttrAsStr(options, "heifChromaSubsampling");
// Raw output
baton->rawDepth = static_cast<VipsBandFormat>(
vips_enum_from_nick(nullptr, VIPS_TYPE_BAND_FORMAT,
sharp::AttrAsStr(options, "rawDepth").data()));
baton->rawDepth = sharp::AttrAsEnum<VipsBandFormat>(options, "rawDepth", VIPS_TYPE_BAND_FORMAT);
// Animated output properties
if (sharp::HasAttr(options, "loop")) {
baton->loop = sharp::AttrAsUint32(options, "loop");
@@ -1568,24 +1571,19 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
if (sharp::HasAttr(options, "delay")) {
baton->delay = sharp::AttrAsInt32Vector(options, "delay");
}
// Tile output
baton->tileSize = sharp::AttrAsUint32(options, "tileSize");
baton->tileOverlap = sharp::AttrAsUint32(options, "tileOverlap");
baton->tileAngle = sharp::AttrAsInt32(options, "tileAngle");
baton->tileBackground = sharp::AttrAsVectorOfDouble(options, "tileBackground");
baton->tileSkipBlanks = sharp::AttrAsInt32(options, "tileSkipBlanks");
baton->tileContainer = static_cast<VipsForeignDzContainer>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_DZ_CONTAINER,
sharp::AttrAsStr(options, "tileContainer").data()));
baton->tileLayout = static_cast<VipsForeignDzLayout>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_DZ_LAYOUT,
sharp::AttrAsStr(options, "tileLayout").data()));
baton->tileContainer = sharp::AttrAsEnum<VipsForeignDzContainer>(
options, "tileContainer", VIPS_TYPE_FOREIGN_DZ_CONTAINER);
baton->tileLayout = sharp::AttrAsEnum<VipsForeignDzLayout>(options, "tileLayout", VIPS_TYPE_FOREIGN_DZ_LAYOUT);
baton->tileFormat = sharp::AttrAsStr(options, "tileFormat");
baton->tileDepth = static_cast<VipsForeignDzDepth>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_DZ_DEPTH,
sharp::AttrAsStr(options, "tileDepth").data()));
baton->tileDepth = sharp::AttrAsEnum<VipsForeignDzDepth>(options, "tileDepth", VIPS_TYPE_FOREIGN_DZ_DEPTH);
baton->tileCentre = sharp::AttrAsBool(options, "tileCentre");
baton->tileId = sharp::AttrAsStr(options, "tileId");
baton->tileBasename = sharp::AttrAsStr(options, "tileBasename");
// Force random access for certain operations
if (baton->input->access == VIPS_ACCESS_SEQUENTIAL) {

View File

@@ -67,6 +67,7 @@ struct PipelineBaton {
int width;
int height;
int channels;
VipsKernel kernel;
sharp::Canvas canvas;
int position;
std::vector<double> resizeBackground;
@@ -75,7 +76,6 @@ struct PipelineBaton {
int cropOffsetTop;
bool premultiplied;
bool tileCentre;
std::string kernel;
bool fastShrinkOnLoad;
double tintA;
double tintB;
@@ -97,11 +97,12 @@ struct PipelineBaton {
double sharpenY3;
int threshold;
bool thresholdGrayscale;
std::vector<double> trimBackground;
double trimThreshold;
int trimOffsetLeft;
int trimOffsetTop;
double linearA;
double linearB;
std::vector<double> linearA;
std::vector<double> linearB;
double gamma;
double gammaOut;
bool greyscale;
@@ -157,9 +158,12 @@ struct PipelineBaton {
bool webpLossless;
bool webpSmartSubsample;
int webpEffort;
bool webpMinSize;
bool webpMixed;
int gifBitdepth;
int gifEffort;
double gifDither;
bool gifReoptimise;
int tiffQuality;
VipsForeignTiffCompression tiffCompression;
VipsForeignTiffPredictor tiffPredictor;
@@ -209,6 +213,7 @@ struct PipelineBaton {
int tileSkipBlanks;
VipsForeignDzDepth tileDepth;
std::string tileId;
std::string tileBasename;
std::unique_ptr<double[]> recombMatrix;
PipelineBaton():
@@ -217,6 +222,7 @@ struct PipelineBaton {
topOffsetPre(-1),
topOffsetPost(-1),
channels(0),
kernel(VIPS_KERNEL_LANCZOS3),
canvas(sharp::Canvas::CROP),
position(0),
resizeBackground{ 0.0, 0.0, 0.0, 255.0 },
@@ -244,11 +250,12 @@ struct PipelineBaton {
sharpenY3(20.0),
threshold(0),
thresholdGrayscale(true),
trimBackground{},
trimThreshold(0.0),
trimOffsetLeft(0),
trimOffsetTop(0),
linearA(1.0),
linearB(0.0),
linearA{},
linearB{},
gamma(0.0),
greyscale(false),
normalise(false),
@@ -302,6 +309,12 @@ struct PipelineBaton {
webpLossless(false),
webpSmartSubsample(false),
webpEffort(4),
webpMinSize(false),
webpMixed(false),
gifBitdepth(8),
gifEffort(7),
gifDither(1.0),
gifReoptimise(false),
tiffQuality(80),
tiffCompression(VIPS_FOREIGN_TIFF_COMPRESSION_JPEG),
tiffPredictor(VIPS_FOREIGN_TIFF_PREDICTOR_HORIZONTAL),

View File

@@ -22,7 +22,6 @@
#include "stats.h"
static void* sharp_vips_init(void*) {
g_setenv("VIPS_MIN_STACK_SIZE", "2m", FALSE);
vips_init("sharp");
return nullptr;
}

View File

@@ -118,14 +118,25 @@ Napi::Value format(const Napi::CallbackInfo& info) {
"ppm", "fits", "gif", "svg", "heif", "pdf", "vips", "jp2k"
}) {
// Input
Napi::Boolean hasInputFile =
Napi::Boolean::New(env, vips_type_find("VipsOperation", (f + "load").c_str()));
const VipsObjectClass *oc = vips_class_find("VipsOperation", (f + "load").c_str());
Napi::Boolean hasInputFile = Napi::Boolean::New(env, oc);
Napi::Boolean hasInputBuffer =
Napi::Boolean::New(env, vips_type_find("VipsOperation", (f + "load_buffer").c_str()));
Napi::Object input = Napi::Object::New(env);
input.Set("file", hasInputFile);
input.Set("buffer", hasInputBuffer);
input.Set("stream", hasInputBuffer);
if (hasInputFile) {
const VipsForeignClass *fc = VIPS_FOREIGN_CLASS(oc);
if (fc->suffs) {
Napi::Array fileSuffix = Napi::Array::New(env);
const char **suffix = fc->suffs;
for (int i = 0; *suffix; i++, suffix++) {
fileSuffix.Set(i, Napi::String::New(env, *suffix));
}
input.Set("fileSuffix", fileSuffix);
}
}
// Output
Napi::Boolean hasOutputFile =
Napi::Boolean::New(env, vips_type_find("VipsOperation", (f + "save").c_str()));

26
test/bench/Dockerfile Normal file
View File

@@ -0,0 +1,26 @@
FROM ubuntu:22.04
ARG BRANCH=main
# Install basic dependencies
RUN apt-get -y update && apt-get install -y build-essential curl git
# Install latest Node.js LTS
RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash -
RUN apt-get install -y nodejs
# Install benchmark dependencies
RUN apt-get install -y imagemagick libmagick++-dev graphicsmagick libmapnik-dev
# Install sharp
RUN mkdir /tmp/sharp
RUN cd /tmp && git clone --single-branch --branch $BRANCH https://github.com/lovell/sharp.git
RUN cd /tmp/sharp && npm install --build-from-source
# Install benchmark test
RUN cd /tmp/sharp/test/bench && npm install
RUN cat /etc/os-release | grep VERSION=
RUN node -v
WORKDIR /tmp/sharp/test/bench
CMD [ "node", "perf" ]

View File

@@ -10,13 +10,13 @@
"devDependencies": {
"@squoosh/cli": "0.7.2",
"@squoosh/lib": "0.4.0",
"async": "3.2.3",
"async": "3.2.4",
"benchmark": "2.1.4",
"gm": "1.23.1",
"imagemagick": "0.1.3",
"jimp": "0.16.1",
"mapnik": "4.5.9",
"semver": "7.3.5"
"semver": "7.3.7"
},
"license": "Apache-2.0",
"engines": {

13
test/bench/run-with-docker.sh Executable file
View File

@@ -0,0 +1,13 @@
#!/usr/bin/env bash
set -e
if ! type docker >/dev/null; then
echo "Please install docker"
exit 1
fi
BRANCH=$(git branch --show-current)
echo "Running sharp performance tests using $BRANCH branch"
docker build --build-arg "BRANCH=$BRANCH" -t sharp-test-bench .
docker run --rm -it sharp-test-bench

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 812 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 794 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.6 KiB

After

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.7 KiB

After

Width:  |  Height:  |  Size: 6.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 424 KiB

After

Width:  |  Height:  |  Size: 424 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 161 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View File

@@ -94,6 +94,10 @@ module.exports = {
inputPngSolidAlpha: getPath('with-alpha.png'), // https://github.com/lovell/sharp/issues/1599
inputPngP3: getPath('p3.png'), // https://github.com/lovell/sharp/issues/2862
inputPngPalette: getPath('swiss.png'), // https://github.com/randy408/libspng/issues/188
inputPngTrimIncludeAlpha: getPath('trim-mc.png'), // https://github.com/lovell/sharp/issues/2166
inputPngTrimSpecificColour: getPath('Flag_of_the_Netherlands.png'), // https://commons.wikimedia.org/wiki/File:Flag_of_the_Netherlands.svg
inputPngTrimSpecificColour16bit: getPath('Flag_of_the_Netherlands-16bit.png'), // convert Flag_of_the_Netherlands.png -depth 16 Flag_of_the_Netherlands-16bit.png
inputPngTrimSpecificColourIncludeAlpha: getPath('Flag_of_the_Netherlands-alpha.png'), // convert Flag_of_the_Netherlands.png -alpha set -background none -channel A -evaluate multiply 0.5 +channel Flag_of_the_Netherlands-alpha.png
inputWebP: getPath('4.webp'), // http://www.gstatic.com/webp/gallery/4.webp
inputWebPWithTransparency: getPath('5_webp_a.webp'), // http://www.gstatic.com/webp/gallery3/5_webp_a.webp

BIN
test/fixtures/trim-mc.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

View File

@@ -232,6 +232,14 @@
fun:XML_ParseBuffer
obj:*/libfontconfig.so.*
}
{
leak_fontconfig_XML_ParseBuffer_indirect
Memcheck:Leak
match-leak-kinds: indirect
...
fun:XML_ParseBuffer
obj:*/libfontconfig.so.*
}
{
leak_fontconfig_FcInitLoadConfigAndFonts
Memcheck:Leak
@@ -242,6 +250,36 @@
...
fun:FcInitLoadConfigAndFonts
}
{
leak_fontconfig_FcDefaultSubstitute
Memcheck:Leak
match-leak-kinds: indirect
fun:calloc
...
fun:FcDefaultSubstitute
...
fun:pango_itemize_with_base_dir
...
fun:pango_layout_get_pixel_extents
fun:vips_text_get_extents
}
{
leak_fontconfig_FcLangSetCreate
Memcheck:Leak
match-leak-kinds: indirect
fun:malloc
fun:FcLangSetCreate
fun:FcLangSetCopy
fun:FcValueSave
...
fun:FcFontRenderPrepare
fun:FcFontMatch
...
fun:pango_itemize_with_base_dir
...
fun:pango_layout_get_pixel_extents
fun:vips_text_get_extents
}
# heif
{

View File

@@ -23,6 +23,7 @@ describe('Alpha transparency', function () {
.flatten({
background: { r: 255, g: 102, b: 0 }
})
.jpeg({ chromaSubsampling: '4:4:4' })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(400, info.width);
@@ -35,6 +36,7 @@ describe('Alpha transparency', function () {
sharp(fixtures.inputPngWithTransparency)
.resize(400, 300)
.flatten({ background: '#ff6600' })
.jpeg({ chromaSubsampling: '4:4:4' })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(400, info.width);

View File

@@ -27,7 +27,9 @@ describe('AVIF', () => {
format: 'jpeg',
hasAlpha: false,
hasProfile: false,
height: 14,
// 32 / (2048 / 858) = 13.40625
// Math.round(13.40625) = 13
height: 13,
isProgressive: false,
space: 'srgb',
width: 32
@@ -70,7 +72,7 @@ describe('AVIF', () => {
format: 'heif',
hasAlpha: false,
hasProfile: false,
height: 14,
height: 13,
isProgressive: false,
pagePrimary: 0,
pages: 1,

View File

@@ -4,12 +4,10 @@ const detectLibc = require('detect-libc');
const sharp = require('../../');
const libcFamily = detectLibc.familySync();
const usingCache = libcFamily !== detectLibc.MUSL;
const usingSimd = !process.env.G_DEBUG;
const concurrency =
libcFamily === detectLibc.MUSL || process.arch === 'arm'
? 1
: undefined;
const usingCache = !(process.env.G_DEBUG || libcFamily === detectLibc.MUSL);
const usingSimd = !(process.env.G_DEBUG || process.env.VIPS_NOVECTOR);
const concurrency = process.env.VIPS_CONCURRENCY ||
(libcFamily === detectLibc.MUSL || process.arch === 'arm' ? 1 : undefined);
beforeEach(function () {
sharp.cache(usingCache);

View File

@@ -420,4 +420,53 @@ describe('composite', () => {
assert.deepStrictEqual(red, { r, g, b });
});
it('Ensure tiled composition works with resized fit=outside', async () => {
const { info } = await sharp({
create: {
width: 41, height: 41, channels: 3, background: 'red'
}
})
.resize({
width: 10,
height: 40,
fit: 'outside'
})
.composite([
{
input: {
create: {
width: 16, height: 16, channels: 3, background: 'green'
}
},
tile: true
}
])
.toBuffer({ resolveWithObject: true });
assert.strictEqual(info.width, 40);
assert.strictEqual(info.height, 40);
});
it('Ensure implict unpremultiply after resize but before composite', async () => {
const [r, g, b, a] = await sharp({
create: {
width: 1, height: 1, channels: 4, background: 'saddlebrown'
}
})
.resize({ width: 8 })
.composite([{
input: Buffer.from([255, 255, 255, 128]),
raw: { width: 1, height: 1, channels: 4 },
tile: true,
blend: 'dest-in'
}])
.raw()
.toBuffer();
assert.strictEqual(r, 139);
assert.strictEqual(g, 69);
assert.strictEqual(b, 19);
assert.strictEqual(a, 128);
});
});

View File

@@ -138,7 +138,20 @@ describe('Partial image extraction', function () {
if (err) throw err;
assert.strictEqual(280, info.width);
assert.strictEqual(380, info.height);
fixtures.assertSimilar(fixtures.expected('rotate-extract.jpg'), data, { threshold: 7 }, done);
fixtures.assertSimilar(fixtures.expected('rotate-extract.jpg'), data, done);
});
});
it('Extract then rotate then extract', function (done) {
sharp(fixtures.inputPngWithGreyAlpha)
.extract({ left: 20, top: 10, width: 180, height: 280 })
.rotate(90)
.extract({ left: 20, top: 10, width: 200, height: 100 })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
fixtures.assertSimilar(fixtures.expected('extract-rotate-extract.jpg'), data, done);
});
});
@@ -164,7 +177,7 @@ describe('Partial image extraction', function () {
if (err) throw err;
assert.strictEqual(380, info.width);
assert.strictEqual(280, info.height);
fixtures.assertSimilar(fixtures.expected('rotate-extract-45.jpg'), data, { threshold: 7 }, done);
fixtures.assertSimilar(fixtures.expected('rotate-extract-45.jpg'), data, done);
});
});
@@ -281,5 +294,27 @@ describe('Partial image extraction', function () {
done();
});
});
it('Multiple extract emits warning', () => {
let warningMessage = '';
const s = sharp();
s.on('warning', function (msg) { warningMessage = msg; });
const options = { top: 0, left: 0, width: 1, height: 1 };
s.extract(options);
assert.strictEqual(warningMessage, '');
s.extract(options);
assert.strictEqual(warningMessage, 'ignoring previous extract options');
});
it('Multiple rotate+extract emits warning', () => {
let warningMessage = '';
const s = sharp().rotate();
s.on('warning', function (msg) { warningMessage = msg; });
const options = { top: 0, left: 0, width: 1, height: 1 };
s.extract(options);
assert.strictEqual(warningMessage, '');
s.extract(options);
assert.strictEqual(warningMessage, 'ignoring previous extract options');
});
});
});

View File

@@ -23,7 +23,9 @@ describe('failOn', () => {
let isWarningEmitted = false;
sharp(fixtures.inputPngTruncated, { failOn: 'none' })
.on('warning', function (warning) {
assert.ok(warning.includes('not enough data') || warning.includes('end of stream'));
assert.ok(
['read gave 2 warnings', 'not enough data', 'end of stream']
.some(m => warning.includes(m)));
isWarningEmitted = true;
})
.resize(32, 24)

View File

@@ -80,6 +80,22 @@ describe('GIF input', () => {
assert.strictEqual(true, reduced.length < original.length);
});
it('valid optimise', () => {
assert.doesNotThrow(() => sharp().gif({ reoptimise: true }));
assert.doesNotThrow(() => sharp().gif({ reoptimize: true }));
});
it('invalid reoptimise throws', () => {
assert.throws(
() => sharp().gif({ reoptimise: -1 }),
/Expected boolean for gifReoptimise but received -1 of type number/
);
assert.throws(
() => sharp().gif({ reoptimize: 'fail' }),
/Expected boolean for gifReoptimise but received fail of type string/
);
});
it('invalid loop throws', () => {
assert.throws(() => {
sharp().gif({ loop: -1 });

View File

@@ -52,7 +52,7 @@ describe('HEIF', () => {
});
it('valid effort does not throw an error', () => {
assert.doesNotThrow(() => {
sharp().heif({ speed: 6 });
sharp().heif({ effort: 6 });
});
});
it('out of range effort should throw an error', () => {
@@ -65,21 +65,6 @@ describe('HEIF', () => {
sharp().heif({ effort: 'fail' });
});
});
it('valid speed does not throw an error', () => {
assert.doesNotThrow(() => {
sharp().heif({ speed: 6 });
});
});
it('out of range speed should throw an error', () => {
assert.throws(() => {
sharp().heif({ speed: 10 });
});
});
it('invalid speed should throw an error', () => {
assert.throws(() => {
sharp().heif({ speed: 'fail' });
});
});
it('invalid chromaSubsampling should throw an error', () => {
assert.throws(() => {
sharp().heif({ chromaSubsampling: 'fail' });

View File

@@ -680,7 +680,7 @@ describe('Input/output', function () {
});
});
describe('Switch off safety limits for PNG/SVG input', () => {
describe('Switch off safety limits for certain formats', () => {
it('Valid', () => {
assert.doesNotThrow(() => {
sharp({ unlimited: true });

View File

@@ -65,15 +65,34 @@ describe('Linear adjustment', function () {
});
});
it('Invalid linear arguments', function () {
assert.throws(function () {
sharp(fixtures.inputPngOverlayLayer1)
.linear('foo');
});
it('per channel level adjustment', function (done) {
sharp(fixtures.inputWebP)
.linear([0.25, 0.5, 0.75], [150, 100, 50]).toBuffer(function (err, data, info) {
if (err) throw err;
fixtures.assertSimilar(fixtures.expected('linear-per-channel.jpg'), data, done);
});
});
assert.throws(function () {
sharp(fixtures.inputPngOverlayLayer1)
.linear(undefined, { bar: 'baz' });
});
it('Invalid linear arguments', function () {
assert.throws(
() => sharp().linear('foo'),
/Expected number or array of numbers for a but received foo of type string/
);
assert.throws(
() => sharp().linear(undefined, { bar: 'baz' }),
/Expected number or array of numbers for b but received \[object Object\] of type object/
);
assert.throws(
() => sharp().linear([], [1]),
/Expected number or array of numbers for a but received {2}of type object/
);
assert.throws(
() => sharp().linear([1, 2], [1]),
/Expected a and b to be arrays of the same length/
);
assert.throws(
() => sharp().linear([1]),
/Expected a and b to be arrays of the same length/
);
});
});

View File

@@ -343,7 +343,7 @@ describe('Image metadata', function () {
assert.strictEqual(depth, 'uchar');
assert.strictEqual(isProgressive, false);
assert.strictEqual(pages, 10);
assert.strictEqual(loop, 2);
assert.strictEqual(loop, 3);
assert.deepStrictEqual(delay, [...Array(9).fill(3000), 15000]);
assert.strictEqual(hasProfile, false);
assert.strictEqual(hasAlpha, true);

View File

@@ -13,7 +13,7 @@ const assertNormalized = function (data) {
max = Math.max(max, data[i]);
}
assert.strictEqual(0, min);
assert.strictEqual(255, max);
assert.ok([254, 255].includes(max));
};
describe('Normalization', function () {
@@ -30,7 +30,6 @@ describe('Normalization', function () {
it('spreads grayscaled image values between 0 and 255', function (done) {
sharp(fixtures.inputJpgWithLowContrast)
.gamma()
.greyscale()
.normalize(true)
.raw()

View File

@@ -190,6 +190,23 @@ describe('PNG', function () {
});
});
it('Can set bitdepth of PNG without palette', async () => {
const data = await sharp({
create: {
width: 8, height: 8, channels: 3, background: 'red'
}
})
.toColourspace('b-w')
.png({ colours: 2, palette: false })
.toBuffer();
const { channels, paletteBitDepth, size, space } = await sharp(data).metadata();
assert.strictEqual(channels, 1);
assert.strictEqual(paletteBitDepth, undefined);
assert.strictEqual(size, 89);
assert.strictEqual(space, 'b-w');
});
it('Valid PNG libimagequant dither value produces image of same size or smaller', function () {
const inputPngBuffer = fs.readFileSync(fixtures.inputPng);
return Promise.all([

View File

@@ -121,6 +121,20 @@ describe('Resize dimensions', function () {
});
});
it('Webp resize then extract large image', function (done) {
sharp(fixtures.inputWebP)
.resize(0x4000, 0x4000)
.extract({ top: 0x2000, left: 0x2000, width: 256, height: 256 })
.webp()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('webp', info.format);
assert.strictEqual(256, info.width);
assert.strictEqual(256, info.height);
done();
});
});
it('WebP shrink-on-load rounds to zero, ensure recalculation is correct', function (done) {
sharp(fixtures.inputJpg)
.resize(1080, 607)
@@ -777,4 +791,14 @@ describe('Resize dimensions', function () {
sharp().resize(null, null, { position: 'unknown' });
});
});
it('Multiple resize emits warning', () => {
let warningMessage = '';
const s = sharp();
s.on('warning', function (msg) { warningMessage = msg; });
s.resize(1);
assert.strictEqual(warningMessage, '');
s.resize(2);
assert.strictEqual(warningMessage, 'ignoring previous resize options');
});
});

View File

@@ -25,8 +25,8 @@ describe('Rotation', function () {
it('Rotate by 30 degrees with semi-transparent background', function (done) {
sharp(fixtures.inputJpg)
.rotate(30, { background: { r: 255, g: 0, b: 0, alpha: 0.5 } })
.resize(320)
.rotate(30, { background: { r: 255, g: 0, b: 0, alpha: 0.5 } })
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
@@ -39,8 +39,8 @@ describe('Rotation', function () {
it('Rotate by 30 degrees with solid background', function (done) {
sharp(fixtures.inputJpg)
.rotate(30, { background: { r: 255, g: 0, b: 0, alpha: 0.5 } })
.resize(320)
.rotate(30, { background: { r: 255, g: 0, b: 0, alpha: 0.5 } })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
@@ -51,25 +51,31 @@ describe('Rotation', function () {
});
it('Rotate by 90 degrees, respecting output input size', function (done) {
sharp(fixtures.inputJpg).rotate(90).resize(320, 240).toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
sharp(fixtures.inputJpg)
.rotate(90)
.resize(320, 240)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('Rotate by 30 degrees, respecting output input size', function (done) {
sharp(fixtures.inputJpg).rotate(30).resize(320, 240).toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(397, info.width);
assert.strictEqual(368, info.height);
done();
});
it('Resize then rotate by 30 degrees, respecting output input size', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.rotate(30)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(397, info.width);
assert.strictEqual(368, info.height);
done();
});
});
[-3690, -450, -90, 90, 450, 3690].forEach(function (angle) {
@@ -141,8 +147,8 @@ describe('Rotation', function () {
it('Rotate by 270 degrees, rectangular output ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240, { fit: sharp.fit.fill })
.rotate(270)
.resize(320, 240, { fit: sharp.fit.fill })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(320, info.width);
@@ -300,6 +306,16 @@ describe('Rotation', function () {
)
);
it('Multiple rotate emits warning', () => {
let warningMessage = '';
const s = sharp();
s.on('warning', function (msg) { warningMessage = msg; });
s.rotate();
assert.strictEqual(warningMessage, '');
s.rotate();
assert.strictEqual(warningMessage, 'ignoring previous rotate options');
});
it('Flip - vertical', function (done) {
sharp(fixtures.inputJpg)
.resize(320)

297
test/unit/text.js Normal file
View File

@@ -0,0 +1,297 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Text to image', () => {
it('text with default values', async () => {
const output = fixtures.path('output.text-default.png');
const text = sharp({
text: {
text: 'Hello, world !'
}
});
const info = await text.png().toFile(output);
assert.strictEqual('png', info.format);
assert.strictEqual(3, info.channels);
assert.strictEqual(false, info.premultiplied);
assert.ok(info.width > 10);
assert.ok(info.height > 8);
const metadata = await sharp(output).metadata();
assert.strictEqual('uchar', metadata.depth);
assert.strictEqual('srgb', metadata.space);
assert.strictEqual(72, metadata.density);
const stats = await sharp(output).stats();
assert.strictEqual(0, stats.channels[0].min);
assert.strictEqual(255, stats.channels[0].max);
assert.strictEqual(0, stats.channels[1].min);
assert.strictEqual(255, stats.channels[1].max);
assert.strictEqual(0, stats.channels[2].min);
assert.strictEqual(255, stats.channels[2].max);
assert.ok(info.textAutofitDpi > 0);
});
it('text with width and height', function (done) {
const output = fixtures.path('output.text-width-height.png');
const maxWidth = 500;
const maxHeight = 500;
const text = sharp({
text: {
text: 'Hello, world!',
width: maxWidth,
height: maxHeight
}
});
text.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(3, info.channels);
assert.ok(info.width > 10 && info.width <= maxWidth);
assert.ok(info.height > 10 && info.height <= maxHeight);
assert.ok(Math.abs(info.width - maxWidth) < 50);
assert.ok(info.textAutofitDpi > 0);
done();
});
});
it('text with dpi', function (done) {
const output = fixtures.path('output.text-dpi.png');
const dpi = 300;
const text = sharp({
text: {
text: 'Hello, world!',
dpi: dpi
}
});
text.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
sharp(output).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual(dpi, metadata.density);
done();
});
});
});
it('text with color and pango markup', function (done) {
const output = fixtures.path('output.text-color-pango.png');
const dpi = 300;
const text = sharp({
text: {
text: '<span foreground="red" font="100">red</span><span font="50" background="cyan">blue</span>',
rgba: true,
dpi: dpi
}
});
text.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
sharp(output).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual(dpi, metadata.density);
assert.strictEqual('uchar', metadata.depth);
assert.strictEqual(true, metadata.hasAlpha);
done();
});
});
});
it('text with font', function (done) {
const output = fixtures.path('output.text-with-font.png');
const text = sharp({
text: {
text: 'Hello, world!',
font: 'sans 100'
}
});
text.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(3, info.channels);
assert.ok(info.width > 30);
assert.ok(info.height > 10);
done();
});
});
it('text with justify and composite', done => {
const output = fixtures.path('output.text-composite.png');
const width = 500;
const dpi = 300;
const text = sharp(fixtures.inputJpg)
.resize(width)
.composite([{
input: {
text: {
text: '<span foreground="#ffff00">Watermark</span> <span foreground="white"><i>is cool</i></span>',
width: 300,
height: 300,
justify: true,
align: 'right',
spacing: 50,
rgba: true
}
},
gravity: 'northeast'
}, {
input: {
text: {
text: '<span background="cyan">cool</span>',
font: 'sans 30',
dpi: dpi,
rgba: true
}
},
left: 30,
top: 250
}]);
text.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(width, info.width);
assert.strictEqual(true, info.premultiplied);
sharp(output).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual('srgb', metadata.space);
assert.strictEqual('uchar', metadata.depth);
assert.strictEqual(true, metadata.hasAlpha);
done();
});
});
});
it('bad text input', function () {
assert.throws(function () {
sharp({
text: {
}
});
});
});
it('fontfile input', function () {
// Added for code coverage
sharp({
text: {
text: 'text',
fontfile: 'UnknownFont.ttf'
}
});
});
it('bad font input', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
font: 12
}
});
});
});
it('bad fontfile input', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
fontfile: true
}
});
});
});
it('bad width input', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
width: 'bad'
}
});
});
});
it('bad height input', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
height: 'bad'
}
});
});
});
it('bad align input', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
align: 'unknown'
}
});
});
});
it('bad justify input', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
justify: 'unknown'
}
});
});
});
it('bad dpi input', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
dpi: -10
}
});
});
});
it('bad rgba input', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
rgba: -10
}
});
});
});
it('bad spacing input', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
spacing: 'number expected'
}
});
});
});
it('only height or dpi not both', function () {
assert.throws(function () {
sharp({
text: {
text: 'text',
height: 400,
dpi: 100
}
});
});
});
});

View File

@@ -6,7 +6,7 @@ const assert = require('assert');
const eachLimit = require('async/eachLimit');
const rimraf = require('rimraf');
const DecompressZip = require('decompress-zip');
const extractZip = require('extract-zip');
const sharp = require('../../');
const fixtures = require('../fixtures');
@@ -14,7 +14,8 @@ const fixtures = require('../fixtures');
// Verifies all tiles in a given dz output directory are <= size
const assertDeepZoomTiles = function (directory, expectedSize, expectedLevels, done) {
// Get levels
const levels = fs.readdirSync(directory);
const dirents = fs.readdirSync(directory, { withFileTypes: true });
const levels = dirents.filter(dirent => dirent.isDirectory()).map(dirent => dirent.name);
assert.strictEqual(expectedLevels, levels.length);
// Get tiles
const tiles = [];
@@ -67,8 +68,10 @@ const assertZoomifyTiles = function (directory, expectedTileSize, expectedLevels
};
const assertGoogleTiles = function (directory, expectedTileSize, expectedLevels, done) {
const levels = fs.readdirSync(directory);
assert.strictEqual(expectedLevels, levels.length - 1); // subtract one to account for default blank tile
// Get levels
const dirents = fs.readdirSync(directory, { withFileTypes: true });
const levels = dirents.filter(dirent => dirent.isDirectory()).map(dirent => dirent.name);
assert.strictEqual(expectedLevels, levels.length);
fs.stat(path.join(directory, 'blank.png'), function (err, stat) {
if (err) throw err;
@@ -94,7 +97,8 @@ const assertGoogleTiles = function (directory, expectedTileSize, expectedLevels,
// Verifies tiles at specified level in a given output directory are > size+overlap
const assertTileOverlap = function (directory, tileSize, done) {
// Get sorted levels
const levels = fs.readdirSync(directory).sort((a, b) => a - b);
const dirents = fs.readdirSync(directory, { withFileTypes: true });
const levels = dirents.filter(dirent => dirent.isDirectory()).map(dirent => dirent.name).sort((a, b) => a - b);
// Select the highest tile level
const highestLevel = levels[levels.length - 1];
// Get sorted tiles from greatest level
@@ -313,6 +317,14 @@ describe('Tile', function () {
});
});
it('Invalid basename parameter value fails', function () {
assert.throws(function () {
sharp().tile({
basename: true
});
});
});
it('Deep Zoom layout', function (done) {
const directory = fixtures.path('output.dzi_files');
rimraf(directory, function () {
@@ -908,14 +920,11 @@ describe('Tile', function () {
if (err) throw err;
assert.strictEqual(true, stat.isFile());
assert.strictEqual(true, stat.size > 0);
new DecompressZip(container)
.on('extract', function () {
extractZip(container, { dir: path.dirname(extractTo) })
.then(() => {
assertDeepZoomTiles(directory, 256, 13, done);
})
.on('error', function (err) {
throw err;
})
.extract({ path: path.dirname(extractTo) });
.catch(done);
});
});
});
@@ -942,14 +951,40 @@ describe('Tile', function () {
if (err) throw err;
assert.strictEqual(true, stat.isFile());
assert.strictEqual(true, stat.size > 0);
new DecompressZip(container)
.on('extract', function () {
extractZip(container, { dir: path.dirname(extractTo) })
.then(() => {
assertDeepZoomTiles(directory, 256, 13, done);
})
.on('error', function (err) {
throw err;
.catch(done);
});
});
});
});
it('Write ZIP container to Buffer', function (done) {
const container = fixtures.path('output.dz.tiles.zip');
const extractTo = fixtures.path('output.dz.tiles');
const directory = path.join(extractTo, 'output.dz.tiles_files');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({ basename: 'output.dz.tiles' })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('dz', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
assert.strictEqual(3, info.channels);
assert.strictEqual('number', typeof info.size);
fs.writeFileSync(container, data);
fs.stat(container, function (err, stat) {
if (err) throw err;
assert.strictEqual(true, stat.isFile());
assert.strictEqual(true, stat.size > 0);
extractZip(container, { dir: path.dirname(extractTo) })
.then(() => {
assertDeepZoomTiles(directory, 256, 13, done);
})
.extract({ path: path.dirname(extractTo) });
.catch(done);
});
});
});

View File

@@ -7,22 +7,6 @@ const inRange = require('../../lib/is').inRange;
const fixtures = require('../fixtures');
describe('Trim borders', function () {
it('Threshold default', function (done) {
const expected = fixtures.expected('alpha-layer-1-fill-trim-resize.png');
sharp(fixtures.inputPngOverlayLayer1)
.resize(450, 322)
.trim()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(450, info.width);
assert.strictEqual(322, info.height);
assert.strictEqual(-204, info.trimOffsetLeft);
assert.strictEqual(0, info.trimOffsetTop);
fixtures.assertSimilar(expected, data, done);
});
});
it('Skip shrink-on-load', function (done) {
const expected = fixtures.expected('alpha-layer-2-trim-resize.jpg');
sharp(fixtures.inputJpgOverlayLayer2)
@@ -41,7 +25,7 @@ describe('Trim borders', function () {
});
});
it('single colour PNG where alpha channel provides the image', () =>
it('Single colour PNG where alpha channel provides the image', () =>
sharp(fixtures.inputPngImageInAlpha)
.trim()
.toBuffer({ resolveWithObject: true })
@@ -94,7 +78,7 @@ describe('Trim borders', function () {
.catch(done);
});
it('should rotate before trim', () =>
it('Should rotate before trim', () =>
sharp({
create: {
width: 20,
@@ -128,13 +112,136 @@ describe('Trim borders', function () {
)
);
describe('Invalid thresholds', function () {
[-1, 'fail', {}].forEach(function (threshold) {
it(JSON.stringify(threshold), function () {
it('Ensure trim uses bounding box of alpha and non-alpha channels', async () => {
const { info } = await sharp(fixtures.inputPngTrimIncludeAlpha)
.trim()
.toBuffer({ resolveWithObject: true });
const { width, height, trimOffsetTop, trimOffsetLeft } = info;
assert.strictEqual(width, 179);
assert.strictEqual(height, 123);
assert.strictEqual(trimOffsetTop, -44);
assert.strictEqual(trimOffsetLeft, -13);
});
it('Ensure trim of image with all pixels same is no-op', async () => {
const { info } = await sharp({
create: {
width: 5,
height: 5,
channels: 3,
background: 'red'
}
})
.trim()
.toBuffer({ resolveWithObject: true });
const { width, height, trimOffsetTop, trimOffsetLeft } = info;
assert.strictEqual(width, 5);
assert.strictEqual(height, 5);
assert.strictEqual(trimOffsetTop, 0);
assert.strictEqual(trimOffsetLeft, 0);
});
describe('Valid parameters', function () {
const expected = fixtures.expected('alpha-layer-1-fill-trim-resize.png');
Object.entries({
'Background and threshold default': undefined,
'Background string': '#00000000',
'Background option': {
background: '#00000000'
},
'Threshold number': 10,
'Threshold option': {
threshold: 10
}
}).forEach(function ([description, parameter]) {
it(description, function (done) {
sharp(fixtures.inputPngOverlayLayer1)
.resize(450, 322)
.trim(parameter)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(450, info.width);
assert.strictEqual(322, info.height);
assert.strictEqual(-204, info.trimOffsetLeft);
assert.strictEqual(0, info.trimOffsetTop);
fixtures.assertSimilar(expected, data, done);
});
});
});
});
describe('Invalid parameters', function () {
Object.entries({
'Invalid background string': 'fail',
'Invalid background option': {
background: 'fail'
},
'Negative threshold number': -1,
'Negative threshold option': {
threshold: -1
},
Boolean: false
}).forEach(function ([description, parameter]) {
it(description, function () {
assert.throws(function () {
sharp().trim(threshold);
sharp().trim(parameter);
});
});
});
});
describe('Specific background colour', function () {
it('Doesn\'t trim at all', async () => {
const { info } = await sharp(fixtures.inputPngTrimSpecificColour)
.trim('yellow')
.toBuffer({ resolveWithObject: true });
const { width, height, trimOffsetTop, trimOffsetLeft } = info;
assert.strictEqual(width, 900);
assert.strictEqual(height, 600);
assert.strictEqual(trimOffsetTop, 0);
assert.strictEqual(trimOffsetLeft, 0);
});
it('Only trims the bottom', async () => {
const { info } = await sharp(fixtures.inputPngTrimSpecificColour)
.trim('#21468B')
.toBuffer({ resolveWithObject: true });
const { width, height, trimOffsetTop, trimOffsetLeft } = info;
assert.strictEqual(width, 900);
assert.strictEqual(height, 401);
assert.strictEqual(trimOffsetTop, 0);
assert.strictEqual(trimOffsetLeft, 0);
});
it('Only trims the bottom, in 16-bit', async () => {
const { info } = await sharp(fixtures.inputPngTrimSpecificColour16bit)
.trim('#21468B')
.toBuffer({ resolveWithObject: true });
const { width, height, trimOffsetTop, trimOffsetLeft } = info;
assert.strictEqual(width, 900);
assert.strictEqual(height, 401);
assert.strictEqual(trimOffsetTop, 0);
assert.strictEqual(trimOffsetLeft, 0);
});
it('Only trims the bottom, including alpha', async () => {
const { info } = await sharp(fixtures.inputPngTrimSpecificColourIncludeAlpha)
.trim('#21468B80')
.toBuffer({ resolveWithObject: true });
const { width, height, trimOffsetTop, trimOffsetLeft } = info;
assert.strictEqual(width, 900);
assert.strictEqual(height, 401);
assert.strictEqual(trimOffsetTop, 0);
assert.strictEqual(trimOffsetLeft, 0);
});
});
});

View File

@@ -102,7 +102,7 @@ describe('Utilities', function () {
['input', 'output'].forEach(function (direction) {
assert.strictEqual(true, direction in sharp.format[format]);
assert.strictEqual('object', typeof sharp.format[format][direction]);
assert.strictEqual(3, Object.keys(sharp.format[format][direction]).length);
assert.strictEqual(true, [3, 4].includes(Object.keys(sharp.format[format][direction]).length));
assert.strictEqual(true, 'file' in sharp.format[format][direction]);
assert.strictEqual(true, 'buffer' in sharp.format[format][direction]);
assert.strictEqual(true, 'stream' in sharp.format[format][direction]);
@@ -126,6 +126,12 @@ describe('Utilities', function () {
assert.strictEqual(false, sharp.format.vips[direction].stream);
});
});
it('input fileSuffix', function () {
assert.deepStrictEqual(['.jpg', '.jpeg', '.jpe'], sharp.format.jpeg.input.fileSuffix);
});
it('output alias', function () {
assert.deepStrictEqual(['jpe', 'jpg'], sharp.format.jpeg.output.alias);
});
});
describe('Versions', function () {
@@ -140,7 +146,6 @@ describe('Utilities', function () {
assert.strictEqual('object', typeof sharp.vendor);
assert.strictEqual('string', typeof sharp.vendor.current);
assert.strictEqual(true, Array.isArray(sharp.vendor.installed));
assert.strictEqual(true, sharp.vendor.installed.length > 0);
});
});
});

View File

@@ -121,18 +121,40 @@ describe('WebP', function () {
});
});
it('invalid reductionEffort (deprecated) throws', () => {
assert.throws(() => {
sharp().webp({ reductionEffort: true });
});
});
it('out of range effort throws', () => {
assert.throws(() => {
sharp().webp({ effort: -1 });
});
});
it('should set effort to 0', () => {
const effort = sharp().webp({ effort: 0 }).options.webpEffort;
assert.strictEqual(effort, 0);
});
it('valid minSize', () => {
assert.doesNotThrow(() => sharp().webp({ minSize: true }));
});
it('invalid minSize throws', () => {
assert.throws(
() => sharp().webp({ minSize: 1 }),
/Expected boolean for webpMinSize but received 1 of type number/
);
});
it('valid mixed', () => {
assert.doesNotThrow(() => sharp().webp({ mixed: true }));
});
it('invalid mixed throws', () => {
assert.throws(
() => sharp().webp({ mixed: 'fail' }),
/Expected boolean for webpMixed but received fail of type string/
);
});
it('invalid loop throws', () => {
assert.throws(() => {
sharp().webp({ loop: -1 });