Compare commits
87 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83bb6a4554 | ||
|
|
c72d42816d | ||
|
|
35a81a7af2 | ||
|
|
9dc8db4370 | ||
|
|
47ae1f52db | ||
|
|
ec17d7f580 | ||
|
|
da5453a7c0 | ||
|
|
f7bed69ffb | ||
|
|
7aa340232e | ||
|
|
68823a5edb | ||
|
|
1c3ba303ea | ||
|
|
76f8112952 | ||
|
|
24150eac89 | ||
|
|
51121a1440 | ||
|
|
3b370b6c01 | ||
|
|
884947a069 | ||
|
|
f8340e1a82 | ||
|
|
5101f4e79c | ||
|
|
161d127bf3 | ||
|
|
638d540371 | ||
|
|
d8f1298511 | ||
|
|
d67e09ba7c | ||
|
|
4c3a8a7007 | ||
|
|
92399ee5e2 | ||
|
|
96992845ed | ||
|
|
2b8e4d20de | ||
|
|
dd6583044b | ||
|
|
446e4e3c3a | ||
|
|
3b492ea423 | ||
|
|
3da258f6fb | ||
|
|
9755629cfd | ||
|
|
5bb6702717 | ||
|
|
8c0660d71e | ||
|
|
513fb40f40 | ||
|
|
659cdabd8e | ||
|
|
add4c7928f | ||
|
|
336856dfc2 | ||
|
|
e1ba2a7fd8 | ||
|
|
2b1f5cbe07 | ||
|
|
72025051c5 | ||
|
|
140eeebb3d | ||
|
|
407bfcb42a | ||
|
|
549219f32a | ||
|
|
afab0d34dd | ||
|
|
2deced0fb9 | ||
|
|
2a0077c481 | ||
|
|
f7f3e43490 | ||
|
|
b876abaf88 | ||
|
|
dcd9a3c558 | ||
|
|
a06b8c296a | ||
|
|
602f988aba | ||
|
|
079bd7b9f5 | ||
|
|
1ff84b20b7 | ||
|
|
97655d2dfd | ||
|
|
d10d7b02d4 | ||
|
|
2ffdae2914 | ||
|
|
342de36973 | ||
|
|
b33231d4bd | ||
|
|
319db21f29 | ||
|
|
d359331426 | ||
|
|
7ae151362b | ||
|
|
648a1e05da | ||
|
|
b9f211fe34 | ||
|
|
e475d9e47f | ||
|
|
f37ca8249a | ||
|
|
1dd4be670d | ||
|
|
197d4cf835 | ||
|
|
83eed86b53 | ||
|
|
bbf612cb9e | ||
|
|
2679bb567b | ||
|
|
481e350f39 | ||
|
|
50c7a08754 | ||
|
|
9a0bb60737 | ||
|
|
deb5d81221 | ||
|
|
916b04dbac | ||
|
|
52307fad5d | ||
|
|
afb21135c2 | ||
|
|
b7fbffb3f7 | ||
|
|
5d98bcd8d8 | ||
|
|
e044788f63 | ||
|
|
4a9267ce12 | ||
|
|
104464c2e0 | ||
|
|
60adc110f5 | ||
|
|
2031d7d112 | ||
|
|
3402656ec5 | ||
|
|
4e84f743e4 | ||
|
|
17e50de5f0 |
@@ -43,13 +43,14 @@ jobs:
|
||||
steps:
|
||||
- checkout
|
||||
- run: |
|
||||
sudo chown 0.0 ${PWD}
|
||||
sudo docker run -dit --name sharp --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp arm64v8/debian:bullseye
|
||||
sudo docker run -dit --name sharp --workdir /mnt/sharp arm64v8/debian:bullseye
|
||||
sudo docker exec sharp sh -c "apt-get update && apt-get install -y build-essential git python3 curl"
|
||||
sudo docker exec sharp sh -c "curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add -"
|
||||
sudo docker exec sharp sh -c "echo 'deb https://deb.nodesource.com/node_16.x sid main' >/etc/apt/sources.list.d/nodesource.list"
|
||||
sudo docker exec sharp sh -c "apt-get update && apt-get install -y nodejs"
|
||||
- run: sudo docker exec sharp sh -c "npm install --build-from-source --unsafe-perm"
|
||||
sudo docker exec sharp sh -c "mkdir -p /mnt/sharp"
|
||||
sudo docker cp . sharp:/mnt/sharp/.
|
||||
- run: sudo docker exec sharp sh -c "npm install --build-from-source"
|
||||
- run: sudo docker exec sharp sh -c "npm test"
|
||||
linux-arm64-musl-node-12:
|
||||
resource_class: arm.medium
|
||||
@@ -70,8 +71,9 @@ jobs:
|
||||
steps:
|
||||
- checkout
|
||||
- run: |
|
||||
sudo chown 0.0 ${PWD}
|
||||
sudo docker run -dit --name sharp --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp node:16-alpine3.11
|
||||
sudo docker run -dit --name sharp --workdir /mnt/sharp node:16-alpine3.11
|
||||
sudo docker exec sharp sh -c "apk add build-base git python3 --update-cache"
|
||||
- run: sudo docker exec sharp sh -c "npm install --build-from-source --unsafe-perm"
|
||||
sudo docker exec sharp sh -c "mkdir -p /mnt/sharp"
|
||||
sudo docker cp . sharp:/mnt/sharp/.
|
||||
- run: sudo docker exec sharp sh -c "npm install --build-from-source"
|
||||
- run: sudo docker exec sharp sh -c "npm test"
|
||||
|
||||
15
.github/workflows/ci-darwin-arm64v8.yml
vendored
@@ -5,6 +5,15 @@ on:
|
||||
jobs:
|
||||
CI:
|
||||
runs-on: macos-m1
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- nodejs_version: 12
|
||||
nodejs_architecture: x64
|
||||
- nodejs_version: 16
|
||||
nodejs_architecture: arm64
|
||||
prebuild: true
|
||||
defaults:
|
||||
run:
|
||||
shell: /usr/bin/arch -arch arm64e /bin/bash -l {0}
|
||||
@@ -12,8 +21,8 @@ jobs:
|
||||
- name: Dependencies
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 16
|
||||
architecture: arm64
|
||||
node-version: ${{ matrix.nodejs_version }}
|
||||
architecture: ${{ matrix.nodejs_architecture }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Install
|
||||
@@ -21,7 +30,7 @@ jobs:
|
||||
- name: Test
|
||||
run: npm test
|
||||
- name: Prebuild
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
if: matrix.prebuild && startsWith(github.ref, 'refs/tags/')
|
||||
env:
|
||||
prebuild_upload: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: npx prebuild --runtime napi --target 5
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
The typical use case for this high speed Node.js module
|
||||
is to convert large images in common formats to
|
||||
smaller, web-friendly JPEG, PNG, WebP and AVIF images of varying dimensions.
|
||||
smaller, web-friendly JPEG, PNG, WebP, GIF and AVIF images of varying dimensions.
|
||||
|
||||
Resizing an image is typically 4x-5x faster than using the
|
||||
quickest ImageMagick and GraphicsMagick settings
|
||||
@@ -103,7 +103,7 @@ covers reporting bugs, requesting features and submitting code changes.
|
||||
|
||||
## Licensing
|
||||
|
||||
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021 Lovell Fuller and contributors.
|
||||
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Lovell Fuller and contributors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -39,6 +39,7 @@
|
||||
'VCCLCompilerTool': {
|
||||
'ExceptionHandling': 1,
|
||||
'Optimization': 1,
|
||||
'RuntimeLibrary': '2', # /MD
|
||||
'WholeProgramOptimization': 'true'
|
||||
},
|
||||
'VCLibrarianTool': {
|
||||
@@ -205,6 +206,7 @@
|
||||
'VCCLCompilerTool': {
|
||||
'ExceptionHandling': 1,
|
||||
'Optimization': 1,
|
||||
'RuntimeLibrary': '2', # /MD
|
||||
'WholeProgramOptimization': 'true'
|
||||
},
|
||||
'VCLibrarianTool': {
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
The typical use case for this high speed Node.js module
|
||||
is to convert large images in common formats to
|
||||
smaller, web-friendly JPEG, PNG, AVIF and WebP images of varying dimensions.
|
||||
smaller, web-friendly JPEG, PNG, WebP, GIF and AVIF images of varying dimensions.
|
||||
|
||||
Resizing an image is typically 4x-5x faster than using the
|
||||
quickest ImageMagick and GraphicsMagick settings
|
||||
@@ -21,9 +21,9 @@ do not require any additional install or runtime dependencies.
|
||||
|
||||
### Formats
|
||||
|
||||
This module supports reading JPEG, PNG, WebP, AVIF, TIFF, GIF and SVG images.
|
||||
This module supports reading JPEG, PNG, WebP, GIF, AVIF, TIFF and SVG images.
|
||||
|
||||
Output images can be in JPEG, PNG, WebP, AVIF and TIFF formats as well as uncompressed raw pixel data.
|
||||
Output images can be in JPEG, PNG, WebP, GIF, AVIF and TIFF formats as well as uncompressed raw pixel data.
|
||||
|
||||
Streams, Buffer objects and the filesystem can be used for input and output.
|
||||
|
||||
@@ -70,7 +70,7 @@ covers reporting bugs, requesting features and submitting code changes.
|
||||
|
||||
### Licensing
|
||||
|
||||
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021 Lovell Fuller and contributors.
|
||||
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Lovell Fuller and contributors.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
Constructor factory to create an instance of `sharp`, to which further methods are chained.
|
||||
|
||||
JPEG, PNG, WebP, AVIF or TIFF format image data can be streamed out from this object.
|
||||
JPEG, PNG, WebP, GIF, AVIF or TIFF format image data can be streamed out from this object.
|
||||
When using Stream based output, derived attributes are available from the `info` event.
|
||||
|
||||
Non-critical problems encountered during processing are emitted as `warning` events.
|
||||
@@ -25,6 +25,7 @@ Implements the [stream.Duplex][1] class.
|
||||
* `options.limitInputPixels` **([number][15] | [boolean][14])** Do not process input images where the number of pixels
|
||||
(width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted.
|
||||
An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF). (optional, default `268402689`)
|
||||
* `options.unlimited` **[boolean][14]** Set this to `true` to remove safety features that help prevent memory exhaustion (SVG, PNG). (optional, default `false`)
|
||||
* `options.sequentialRead` **[boolean][14]** Set this to `true` to use sequential rather than random access where possible.
|
||||
This can reduce memory usage and might improve performance on some systems. (optional, default `false`)
|
||||
* `options.density` **[number][15]** number representing the DPI for vector images in the range 1 to 100000. (optional, default `72`)
|
||||
|
||||
@@ -2,7 +2,11 @@
|
||||
|
||||
## metadata
|
||||
|
||||
Fast access to (uncached) image metadata without decoding any compressed image data.
|
||||
Fast access to (uncached) image metadata without decoding any compressed pixel data.
|
||||
|
||||
This is taken from the header of the input image.
|
||||
It does not include operations, such as resize, to be applied to the output image.
|
||||
|
||||
A `Promise` is returned when `callback` is not provided.
|
||||
|
||||
* `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
|
||||
@@ -24,6 +28,7 @@ A `Promise` is returned when `callback` is not provided.
|
||||
* `subifds`: Number of Sub Image File Directories in an OME-TIFF image
|
||||
* `background`: Default background colour, if present, for PNG (bKGD) and GIF images, either an RGB Object or a single greyscale value
|
||||
* `compression`: The encoder used to compress an HEIF file, `av1` (AVIF) or `hevc` (HEIC)
|
||||
* `resolutionUnit`: The unit of resolution (density), either `inch` or `cm`, if present
|
||||
* `hasProfile`: Boolean indicating the presence of an embedded ICC profile
|
||||
* `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
|
||||
* `orientation`: Number value of the EXIF Orientation header, if present
|
||||
@@ -39,6 +44,10 @@ A `Promise` is returned when `callback` is not provided.
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
const metadata = await sharp(input).metadata();
|
||||
```
|
||||
|
||||
```javascript
|
||||
const image = sharp(inputJpg);
|
||||
image
|
||||
|
||||
@@ -159,14 +159,28 @@ Returns **Sharp**
|
||||
## blur
|
||||
|
||||
Blur the image.
|
||||
When used without parameters, performs a fast, mild blur of the output image.
|
||||
|
||||
When used without parameters, performs a fast 3x3 box blur (equivalent to a box linear filter).
|
||||
|
||||
When a `sigma` is provided, performs a slower, more accurate Gaussian blur.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `sigma` **[number][1]?** a value between 0.3 and 1000 representing the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`.
|
||||
|
||||
<!---->
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
const boxBlurred = await sharp(input)
|
||||
.blur()
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
const gaussianBlurred = await sharp(input)
|
||||
.blur(5)
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
@@ -400,7 +414,9 @@ Returns **Sharp**
|
||||
|
||||
## modulate
|
||||
|
||||
Transforms the image using brightness, saturation and hue rotation.
|
||||
Transforms the image using brightness, saturation, hue rotation, and lightness.
|
||||
Brightness and lightness both operate on luminance, with the difference being that
|
||||
brightness is multiplicative whereas lightness is additive.
|
||||
|
||||
### Parameters
|
||||
|
||||
@@ -409,13 +425,14 @@ Transforms the image using brightness, saturation and hue rotation.
|
||||
* `options.brightness` **[number][1]?** Brightness multiplier
|
||||
* `options.saturation` **[number][1]?** Saturation multiplier
|
||||
* `options.hue` **[number][1]?** Degrees for hue rotation
|
||||
* `options.lightness` **[number][1]?** Lightness addend
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
sharp(input)
|
||||
.modulate({
|
||||
brightness: 2 // increase lightness by a factor of 2
|
||||
brightness: 2 // increase brightness by a factor of 2
|
||||
});
|
||||
|
||||
sharp(input)
|
||||
@@ -423,6 +440,11 @@ sharp(input)
|
||||
hue: 180 // hue-rotate by 180 degrees
|
||||
});
|
||||
|
||||
sharp(input)
|
||||
.modulate({
|
||||
lightness: 50 // increase lightness by +50
|
||||
});
|
||||
|
||||
// decreate brightness and saturation while also hue-rotating by 90 degrees
|
||||
sharp(input)
|
||||
.modulate({
|
||||
|
||||
@@ -5,12 +5,14 @@
|
||||
Write output image data to a file.
|
||||
|
||||
If an explicit output format is not selected, it will be inferred from the extension,
|
||||
with JPEG, PNG, WebP, AVIF, TIFF, DZI, and libvips' V format supported.
|
||||
with JPEG, PNG, WebP, AVIF, TIFF, GIF, DZI, and libvips' V format supported.
|
||||
Note that raw pixel data is only supported for buffer output.
|
||||
|
||||
By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
See [withMetadata][1] for control over this.
|
||||
|
||||
The caller is responsible for ensuring directory structures and permissions exist.
|
||||
|
||||
A `Promise` is returned when `callback` is not provided.
|
||||
|
||||
### Parameters
|
||||
@@ -42,9 +44,9 @@ Returns **[Promise][5]<[Object][6]>** when no callback is provided
|
||||
## toBuffer
|
||||
|
||||
Write output to a Buffer.
|
||||
JPEG, PNG, WebP, AVIF, TIFF and raw pixel data output are supported.
|
||||
JPEG, PNG, WebP, AVIF, TIFF, GIF and raw pixel data output are supported.
|
||||
|
||||
If no explicit format is set, the output format will match the input image, except GIF and SVG input which become PNG output.
|
||||
If no explicit format is set, the output format will match the input image, except SVG input which becomes PNG output.
|
||||
|
||||
By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
See [withMetadata][1] for control over this.
|
||||
@@ -145,8 +147,9 @@ const data = await sharp(input)
|
||||
}
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* @example
|
||||
```javascript
|
||||
// Set output metadata to 96 DPI
|
||||
const data = await sharp(input)
|
||||
.withMetadata({ density: 96 })
|
||||
@@ -241,6 +244,7 @@ Set `palette` to `true` for slower, indexed PNG output.
|
||||
* `options.adaptiveFiltering` **[boolean][7]** use adaptive row filtering (optional, default `false`)
|
||||
* `options.palette` **[boolean][7]** quantise to a palette-based image with alpha transparency support (optional, default `false`)
|
||||
* `options.quality` **[number][9]** use the lowest number of colours needed to achieve given quality, sets `palette` to `true` (optional, default `100`)
|
||||
* `options.effort` **[number][9]** CPU effort, between 1 (fastest) and 10 (slowest), sets `palette` to `true` (optional, default `7`)
|
||||
* `options.colours` **[number][9]** maximum number of palette entries, sets `palette` to `true` (optional, default `256`)
|
||||
* `options.colors` **[number][9]** alternative spelling of `options.colours`, sets `palette` to `true` (optional, default `256`)
|
||||
* `options.dither` **[number][9]** level of Floyd-Steinberg error diffusion, sets `palette` to `true` (optional, default `1.0`)
|
||||
@@ -279,10 +283,9 @@ Use these WebP options for output image.
|
||||
* `options.lossless` **[boolean][7]** use lossless compression mode (optional, default `false`)
|
||||
* `options.nearLossless` **[boolean][7]** use near_lossless compression mode (optional, default `false`)
|
||||
* `options.smartSubsample` **[boolean][7]** use high quality chroma subsampling (optional, default `false`)
|
||||
* `options.reductionEffort` **[number][9]** level of CPU effort to reduce file size, integer 0-6 (optional, default `4`)
|
||||
* `options.pageHeight` **[number][9]?** page height for animated output
|
||||
* `options.effort` **[number][9]** CPU effort, between 0 (fastest) and 6 (slowest) (optional, default `4`)
|
||||
* `options.loop` **[number][9]** number of animation iterations, use 0 for infinite animation (optional, default `0`)
|
||||
* `options.delay` **[Array][10]<[number][9]>?** list of delays between animation frames (in milliseconds)
|
||||
* `options.delay` **([number][9] | [Array][10]<[number][9]>)?** delay(s) between animation frames (in milliseconds)
|
||||
* `options.force` **[boolean][7]** force WebP output, otherwise attempt to use input format (optional, default `true`)
|
||||
|
||||
### Examples
|
||||
@@ -297,7 +300,7 @@ const data = await sharp(input)
|
||||
```javascript
|
||||
// Optimise the file size of an animated WebP
|
||||
const outputWebp = await sharp(inputWebp, { animated: true })
|
||||
.webp({ reductionEffort: 6 })
|
||||
.webp({ effort: 6 })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
@@ -307,9 +310,58 @@ Returns **Sharp**
|
||||
|
||||
## gif
|
||||
|
||||
Use these GIF options for output image.
|
||||
Use these GIF options for the output image.
|
||||
|
||||
Requires libvips compiled with support for ImageMagick or GraphicsMagick.
|
||||
The first entry in the palette is reserved for transparency.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
|
||||
* `options.colours` **[number][9]** maximum number of palette entries, including transparency, between 2 and 256 (optional, default `256`)
|
||||
* `options.colors` **[number][9]** alternative spelling of `options.colours` (optional, default `256`)
|
||||
* `options.effort` **[number][9]** CPU effort, between 1 (fastest) and 10 (slowest) (optional, default `7`)
|
||||
* `options.dither` **[number][9]** level of Floyd-Steinberg error diffusion, between 0 (least) and 1 (most) (optional, default `1.0`)
|
||||
* `options.loop` **[number][9]** number of animation iterations, use 0 for infinite animation (optional, default `0`)
|
||||
* `options.delay` **([number][9] | [Array][10]<[number][9]>)?** delay(s) between animation frames (in milliseconds)
|
||||
* `options.force` **[boolean][7]** force GIF output, otherwise attempt to use input format (optional, default `true`)
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
// Convert PNG to GIF
|
||||
await sharp(pngBuffer)
|
||||
.gif()
|
||||
.toBuffer());
|
||||
```
|
||||
|
||||
```javascript
|
||||
// Convert animated WebP to animated GIF
|
||||
await sharp('animated.webp', { animated: true })
|
||||
.toFile('animated.gif');
|
||||
```
|
||||
|
||||
```javascript
|
||||
// Create a 128x128, cropped, non-dithered, animated thumbnail of an animated GIF
|
||||
const out = await sharp('in.gif', { animated: true })
|
||||
.resize({ width: 128, height: 128 })
|
||||
.gif({ dither: 0 })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp**
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.30.0
|
||||
|
||||
## jp2
|
||||
|
||||
Use these JP2 options for output image.
|
||||
|
||||
Requires libvips compiled with support for OpenJPEG.
|
||||
The prebuilt binaries do not include this - see
|
||||
[installing a custom libvips][11].
|
||||
|
||||
@@ -317,21 +369,45 @@ The prebuilt binaries do not include this - see
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
|
||||
* `options.pageHeight` **[number][9]?** page height for animated output
|
||||
* `options.loop` **[number][9]** number of animation iterations, use 0 for infinite animation (optional, default `0`)
|
||||
* `options.delay` **[Array][10]<[number][9]>?** list of delays between animation frames (in milliseconds)
|
||||
* `options.force` **[boolean][7]** force GIF output, otherwise attempt to use input format (optional, default `true`)
|
||||
* `options.quality` **[number][9]** quality, integer 1-100 (optional, default `80`)
|
||||
* `options.lossless` **[boolean][7]** use lossless compression mode (optional, default `false`)
|
||||
* `options.tileWidth` **[number][9]** horizontal tile size (optional, default `512`)
|
||||
* `options.tileHeight` **[number][9]** vertical tile size (optional, default `512`)
|
||||
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
|
||||
|
||||
<!---->
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
// Convert any input to lossless JP2 output
|
||||
const data = await sharp(input)
|
||||
.jp2({ lossless: true })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
// Convert any input to very high quality JP2 output
|
||||
const data = await sharp(input)
|
||||
.jp2({
|
||||
quality: 100,
|
||||
chromaSubsampling: '4:4:4'
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp**
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.29.1
|
||||
|
||||
## tiff
|
||||
|
||||
Use these TIFF options for output image.
|
||||
|
||||
The `density` can be set in pixels/inch via [withMetadata][1] instead of providing `xres` and `yres` in pixels/mm.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
@@ -346,6 +422,7 @@ Use these TIFF options for output image.
|
||||
* `options.tileHeight` **[number][9]** vertical tile size (optional, default `256`)
|
||||
* `options.xres` **[number][9]** horizontal resolution in pixels/mm (optional, default `1.0`)
|
||||
* `options.yres` **[number][9]** vertical resolution in pixels/mm (optional, default `1.0`)
|
||||
* `options.resolutionUnit` **[string][2]** resolution unit options: inch, cm (optional, default `'inch'`)
|
||||
* `options.bitdepth` **[number][9]** reduce bitdepth to 1, 2 or 4 bit (optional, default `8`)
|
||||
|
||||
### Examples
|
||||
@@ -372,13 +449,15 @@ Use these AVIF options for output image.
|
||||
Whilst it is possible to create AVIF images smaller than 16x16 pixels,
|
||||
most web browsers do not display these properly.
|
||||
|
||||
AVIF image sequences are not supported.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
|
||||
* `options.quality` **[number][9]** quality, integer 1-100 (optional, default `50`)
|
||||
* `options.lossless` **[boolean][7]** use lossless compression (optional, default `false`)
|
||||
* `options.speed` **[number][9]** CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest) (optional, default `5`)
|
||||
* `options.effort` **[number][9]** CPU effort, between 0 (fastest) and 9 (slowest) (optional, default `4`)
|
||||
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
|
||||
|
||||
<!---->
|
||||
@@ -405,7 +484,7 @@ globally-installed libvips compiled with support for libheif, libde265 and x265.
|
||||
* `options.quality` **[number][9]** quality, integer 1-100 (optional, default `50`)
|
||||
* `options.compression` **[string][2]** compression format: av1, hevc (optional, default `'av1'`)
|
||||
* `options.lossless` **[boolean][7]** use lossless compression (optional, default `false`)
|
||||
* `options.speed` **[number][9]** CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest) (optional, default `5`)
|
||||
* `options.effort` **[number][9]** CPU effort, between 0 (fastest) and 9 (slowest) (optional, default `4`)
|
||||
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
|
||||
|
||||
<!---->
|
||||
@@ -457,8 +536,6 @@ Use tile-based deep zoom (image pyramid) output.
|
||||
Set the format and options for tile images via the `toFormat`, `jpeg`, `png` or `webp` functions.
|
||||
Use a `.zip` or `.szi` file extension with `toFile` to write to a compressed archive file format.
|
||||
|
||||
Warning: multiple sharp instances concurrently producing tile output can expose a possible race condition in some versions of libgsf.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?**
|
||||
@@ -470,10 +547,10 @@ Warning: multiple sharp instances concurrently producing tile output can expose
|
||||
* `options.depth` **[string][2]?** how deep to make the pyramid, possible values are `onepixel`, `onetile` or `one`, default based on layout.
|
||||
* `options.skipBlanks` **[number][9]** threshold to skip tile generation, a value 0 - 255 for 8-bit images or 0 - 65535 for 16-bit images (optional, default `-1`)
|
||||
* `options.container` **[string][2]** tile container, with value `fs` (filesystem) or `zip` (compressed file). (optional, default `'fs'`)
|
||||
* `options.layout` **[string][2]** filesystem layout, possible values are `dz`, `iiif`, `zoomify` or `google`. (optional, default `'dz'`)
|
||||
* `options.layout` **[string][2]** filesystem layout, possible values are `dz`, `iiif`, `iiif3`, `zoomify` or `google`. (optional, default `'dz'`)
|
||||
* `options.centre` **[boolean][7]** centre image in tile. (optional, default `false`)
|
||||
* `options.center` **[boolean][7]** alternative spelling of centre. (optional, default `false`)
|
||||
* `options.id` **[string][2]** when `layout` is `iiif`, sets the `@id` attribute of `info.json` (optional, default `'https://example.com/iiif'`)
|
||||
* `options.id` **[string][2]** when `layout` is `iiif`/`iiif3`, sets the `@id`/`id` attribute of `info.json` (optional, default `'https://example.com/iiif'`)
|
||||
|
||||
### Examples
|
||||
|
||||
@@ -493,6 +570,26 @@ sharp('input.tiff')
|
||||
|
||||
Returns **Sharp**
|
||||
|
||||
## timeout
|
||||
|
||||
Set a timeout for processing, in seconds.
|
||||
Use a value of zero to continue processing indefinitely, the default behaviour.
|
||||
|
||||
The clock starts when libvips opens an input image for processing.
|
||||
Time spent waiting for a libuv thread to become available is not included.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]**
|
||||
|
||||
* `options.seconds` **[number][9]** Number of seconds after which processing will be stopped
|
||||
|
||||
Returns **Sharp**
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.29.2
|
||||
|
||||
[1]: #withmetadata
|
||||
|
||||
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
|
||||
@@ -49,6 +49,7 @@ Possible interpolation kernels are:
|
||||
* `options.background` **([String][10] | [Object][9])** background colour when using a `fit` of `contain`, parsed by the [color][11] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
|
||||
* `options.kernel` **[String][10]** the kernel to use for image reduction. (optional, default `'lanczos3'`)
|
||||
* `options.withoutEnlargement` **[Boolean][12]** do not enlarge if the width *or* height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option. (optional, default `false`)
|
||||
* `options.withoutReduction` **[Boolean][12]** do not reduce if the width *or* height are already greater than the specified dimensions, equivalent to GraphicsMagick's `<` geometry option. (optional, default `false`)
|
||||
* `options.fastShrinkOnLoad` **[Boolean][12]** take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images. (optional, default `true`)
|
||||
|
||||
### Examples
|
||||
@@ -117,6 +118,21 @@ sharp(input)
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
sharp(input)
|
||||
.resize(200, 200, {
|
||||
fit: sharp.fit.outside,
|
||||
withoutReduction: true
|
||||
})
|
||||
.toFormat('jpeg')
|
||||
.toBuffer()
|
||||
.then(function(outputBuffer) {
|
||||
// outputBuffer contains JPEG image data
|
||||
// of at least 200 pixels wide and 200 pixels high while maintaining aspect ratio
|
||||
// and no smaller than the input image
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
const scaleByHalf = await sharp(input)
|
||||
.metadata()
|
||||
|
||||
@@ -52,6 +52,17 @@ An Object containing the version numbers of libvips and its dependencies.
|
||||
console.log(sharp.versions);
|
||||
```
|
||||
|
||||
## vendor
|
||||
|
||||
An Object containing the platform and architecture
|
||||
of the current and installed vendored binaries.
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
console.log(sharp.vendor);
|
||||
```
|
||||
|
||||
## cache
|
||||
|
||||
Gets or, when options are provided, sets the limits of *libvips'* operation cache.
|
||||
|
||||
@@ -1,9 +1,116 @@
|
||||
# Changelog
|
||||
|
||||
## v0.30 - *dresser*
|
||||
|
||||
Requires libvips v8.12.2
|
||||
|
||||
### v0.30.0 - 1st February 2022
|
||||
|
||||
* Add support for GIF output to prebuilt binaries.
|
||||
|
||||
* Reduce minimum Linux ARM64v8 glibc requirement to 2.17.
|
||||
|
||||
* Verify prebuilt binaries with a Subresource Integrity check.
|
||||
|
||||
* Standardise WebP `effort` option name, deprecate `reductionEffort`.
|
||||
|
||||
* Standardise HEIF `effort` option name, deprecate `speed`.
|
||||
|
||||
* Add support for IIIF v3 tile-based output.
|
||||
|
||||
* Expose control over CPU effort for palette-based PNG output.
|
||||
[#2541](https://github.com/lovell/sharp/issues/2541)
|
||||
|
||||
* Improve animated (multi-page) image resize and extract.
|
||||
[#2789](https://github.com/lovell/sharp/pull/2789)
|
||||
[@kleisauke](https://github.com/kleisauke)
|
||||
|
||||
* Expose platform and architecture of vendored binaries as `sharp.vendor`.
|
||||
[#2928](https://github.com/lovell/sharp/issues/2928)
|
||||
|
||||
* Ensure 16-bit PNG output uses correct bitdepth.
|
||||
[#2958](https://github.com/lovell/sharp/pull/2958)
|
||||
[@gforge](https://github.com/gforge)
|
||||
|
||||
* Properly emit close events for duplex streams.
|
||||
[#2976](https://github.com/lovell/sharp/pull/2976)
|
||||
[@driannaude](https://github.com/driannaude)
|
||||
|
||||
* Expose `unlimited` option for SVG and PNG input, switches off safety features.
|
||||
[#2984](https://github.com/lovell/sharp/issues/2984)
|
||||
|
||||
* Add `withoutReduction` option to resize operation.
|
||||
[#3006](https://github.com/lovell/sharp/pull/3006)
|
||||
[@christopherbradleybanks](https://github.com/christopherbradleybanks)
|
||||
|
||||
* Add `resolutionUnit` as `tiff` option and expose in metadata.
|
||||
[#3023](https://github.com/lovell/sharp/pull/3023)
|
||||
[@ompal-sisodiya](https://github.com/ompal-sisodiya)
|
||||
|
||||
* Ensure rotate-then-extract works with EXIF mirroring.
|
||||
[#3024](https://github.com/lovell/sharp/issues/3024)
|
||||
|
||||
## v0.29 - *circle*
|
||||
|
||||
Requires libvips v8.11.3
|
||||
|
||||
### v0.29.3 - 14th November 2021
|
||||
|
||||
* Ensure correct dimensions when containing image resized to 1px.
|
||||
[#2951](https://github.com/lovell/sharp/issues/2951)
|
||||
|
||||
* Impute TIFF `xres`/`yres` from `density` provided to `withMetadata`.
|
||||
[#2952](https://github.com/lovell/sharp/pull/2952)
|
||||
[@mbklein](https://github.com/mbklein)
|
||||
|
||||
### v0.29.2 - 21st October 2021
|
||||
|
||||
* Add `timeout` function to limit processing time.
|
||||
|
||||
* Ensure `sharp.versions` is populated from vendored libvips.
|
||||
|
||||
* Remove animation properties from single page images.
|
||||
[#2890](https://github.com/lovell/sharp/issues/2890)
|
||||
|
||||
* Allow use of 'tif' to select TIFF output.
|
||||
[#2893](https://github.com/lovell/sharp/pull/2893)
|
||||
[@erf](https://github.com/erf)
|
||||
|
||||
* Improve error message on Windows for version conflict.
|
||||
[#2918](https://github.com/lovell/sharp/pull/2918)
|
||||
[@dkrnl](https://github.com/dkrnl)
|
||||
|
||||
* Throw error rather than exit when invalid binaries detected.
|
||||
[#2931](https://github.com/lovell/sharp/issues/2931)
|
||||
|
||||
### v0.29.1 - 7th September 2021
|
||||
|
||||
* Add `lightness` option to `modulate` operation.
|
||||
[#2846](https://github.com/lovell/sharp/pull/2846)
|
||||
|
||||
* Ensure correct PNG bitdepth is set based on number of colours.
|
||||
[#2855](https://github.com/lovell/sharp/issues/2855)
|
||||
|
||||
* Ensure background is always premultiplied when compositing.
|
||||
[#2858](https://github.com/lovell/sharp/issues/2858)
|
||||
|
||||
* Ensure images with P3 profiles retain full gamut.
|
||||
[#2862](https://github.com/lovell/sharp/issues/2862)
|
||||
|
||||
* Add support for libvips compiled with OpenJPEG.
|
||||
[#2868](https://github.com/lovell/sharp/pull/2868)
|
||||
|
||||
* Remove unsupported animation properties from AVIF output.
|
||||
[#2870](https://github.com/lovell/sharp/issues/2870)
|
||||
|
||||
* Resolve paths before comparing input/output filenames.
|
||||
[#2878](https://github.com/lovell/sharp/pull/2878)
|
||||
[@rexxars](https://github.com/rexxars)
|
||||
|
||||
* Allow use of speed 9 (fastest) for HEIF encoding.
|
||||
[#2879](https://github.com/lovell/sharp/pull/2879)
|
||||
[@rexxars](https://github.com/rexxars)
|
||||
|
||||
### v0.29.0 - 17th August 2021
|
||||
|
||||
* Drop support for Node.js 10, now requires Node.js >= 12.13.0.
|
||||
|
||||
1
docs/docute.min.js
vendored
Normal file
@@ -218,3 +218,21 @@ GitHub: https://github.com/Daiz
|
||||
|
||||
Name: Mart Jansink
|
||||
GitHub: https://github.com/mart-jansink
|
||||
|
||||
Name: Tenpi
|
||||
GitHub: https://github.com/Tenpi
|
||||
|
||||
Name: Zaruike
|
||||
GitHub: https://github.com/Zaruike
|
||||
|
||||
Name: Erlend F
|
||||
GitHub: https://github.com/erf
|
||||
|
||||
Name: Drian Naude
|
||||
GitHub: https://github.com/driannaude
|
||||
|
||||
Name: Max Gordon
|
||||
GitHub: https://github.com/gforge
|
||||
|
||||
Name: Chris Banks
|
||||
GitHub: https://github.com/christopherbradleybanks
|
||||
|
||||
@@ -18,28 +18,29 @@ Ready-compiled sharp and libvips binaries are provided for use on the most commo
|
||||
|
||||
* macOS x64 (>= 10.13)
|
||||
* macOS ARM64
|
||||
* Linux x64 (glibc >= 2.17, musl >= 1.1.24)
|
||||
* Linux ARM64 (glibc >= 2.29, musl >= 1.1.24)
|
||||
* Linux x64 (glibc >= 2.17, musl >= 1.1.24, CPU with SSE4.2)
|
||||
* Linux ARM64 (glibc >= 2.17, musl >= 1.1.24)
|
||||
* Windows x64
|
||||
* Windows x86
|
||||
|
||||
An ~7MB tarball containing libvips and its most commonly used dependencies
|
||||
is downloaded via HTTPS and stored within `node_modules/sharp/vendor` during `npm install`.
|
||||
A ~7MB tarball containing libvips and its most commonly used dependencies
|
||||
is downloaded via HTTPS, verified via Subresource Integrity
|
||||
and decompressed into `node_modules/sharp/vendor` during `npm install`.
|
||||
|
||||
This provides support for the
|
||||
JPEG, PNG, WebP, AVIF, TIFF, GIF (input) and SVG (input) image formats.
|
||||
JPEG, PNG, WebP, AVIF, TIFF, GIF and SVG (input) image formats.
|
||||
|
||||
The following platforms have prebuilt libvips but not sharp:
|
||||
|
||||
* Linux ARMv6
|
||||
* Linux ARMv7 (glibc >= 2.28)
|
||||
* Linux ARMv6 (glibc >= 2.28)
|
||||
* Windows ARM64
|
||||
|
||||
The following platforms require compilation of both libvips and sharp from source:
|
||||
|
||||
* Linux x86
|
||||
* Linux x64 (glibc <= 2.16, includes RHEL/CentOS 6)
|
||||
* Linux ARM64 (glibc <= 2.28)
|
||||
* Linux ARMv7 (glibc <= 2.27, musl)
|
||||
* Linux ARMv6 (glibc <= 2.27, musl)
|
||||
* Linux PowerPC
|
||||
* FreeBSD
|
||||
* OpenBSD
|
||||
@@ -52,7 +53,7 @@ See the [cross-platform](#cross-platform) section if this is not the case.
|
||||
|
||||
When using npm v6 or earlier, the `npm install --unsafe-perm` flag must be used when installing as `root` or a `sudo` user.
|
||||
|
||||
When using npm v7, the user running `npm install` must own the directory it is run in.
|
||||
When using npm v7 or later, the user running `npm install` must own the directory it is run in.
|
||||
|
||||
The `npm install --ignore-scripts=false` flag must be used when `npm` has been configured to ignore installation scripts.
|
||||
|
||||
@@ -60,9 +61,7 @@ Check the output of running `npm install --verbose sharp` for useful error messa
|
||||
|
||||
## Apple M1
|
||||
|
||||
Prebuilt sharp and libvips binaries are provided for macOS on ARM64 from sharp v0.29.0.
|
||||
|
||||
Prebuilt libvips binaries were provided for macOS on ARM64 from sharp v0.28.0.
|
||||
Prebuilt sharp and libvips binaries have been provided for macOS on ARM64 since sharp v0.29.0.
|
||||
|
||||
## Cross-platform
|
||||
|
||||
@@ -78,7 +77,7 @@ npm install --platform=... --arch=... --arm-version=... sharp
|
||||
* `--platform`: one of `linux`, `linuxmusl`, `darwin` or `win32`.
|
||||
* `--arch`: one of `x64`, `ia32`, `arm` or `arm64`.
|
||||
* `--arm-version`: one of `6`, `7` or `8` (`arm` defaults to `6`, `arm64` defaults to `8`).
|
||||
* `--sharp-install-force`: skip version compatibility checks.
|
||||
* `--sharp-install-force`: skip version compatibility and Subresource Integrity checks.
|
||||
|
||||
These values can also be set via environment variables,
|
||||
`npm_config_platform`, `npm_config_arch`, `npm_config_arm_version`
|
||||
@@ -145,16 +144,16 @@ A mirror site based in China, provided by Alibaba, contains binaries for both sh
|
||||
To use this either set the following configuration:
|
||||
|
||||
```sh
|
||||
npm config set sharp_binary_host "https://npm.taobao.org/mirrors/sharp"
|
||||
npm config set sharp_libvips_binary_host "https://npm.taobao.org/mirrors/sharp-libvips"
|
||||
npm config set sharp_binary_host "https://npmmirror.com/mirrors/sharp"
|
||||
npm config set sharp_libvips_binary_host "https://npmmirror.com/mirrors/sharp-libvips"
|
||||
npm install sharp
|
||||
```
|
||||
|
||||
or set the following environment variables:
|
||||
|
||||
```sh
|
||||
npm_config_sharp_binary_host="https://npm.taobao.org/mirrors/sharp" \
|
||||
npm_config_sharp_libvips_binary_host="https://npm.taobao.org/mirrors/sharp-libvips" \
|
||||
npm_config_sharp_binary_host="https://npmmirror.com/mirrors/sharp" \
|
||||
npm_config_sharp_libvips_binary_host="https://npmmirror.com/mirrors/sharp-libvips" \
|
||||
npm install sharp
|
||||
```
|
||||
|
||||
|
||||
@@ -5,12 +5,12 @@ A test to benchmark the performance of this module relative to alternatives.
|
||||
## The contenders
|
||||
|
||||
* [jimp](https://www.npmjs.com/package/jimp) v0.16.1 - Image processing in pure JavaScript. Provides bicubic interpolation.
|
||||
* [mapnik](https://www.npmjs.org/package/mapnik) v4.5.8 - Whilst primarily a map renderer, Mapnik contains bitmap image utilities.
|
||||
* [mapnik](https://www.npmjs.org/package/mapnik) v4.5.9 - Whilst primarily a map renderer, Mapnik contains bitmap image utilities.
|
||||
* [imagemagick](https://www.npmjs.com/package/imagemagick) v0.1.3 - Supports filesystem only and "*has been unmaintained for a long time*".
|
||||
* [gm](https://www.npmjs.com/package/gm) v1.23.1 - Fully featured wrapper around GraphicsMagick's `gm` command line utility.
|
||||
* [@squoosh/lib](https://www.npmjs.com/package/@squoosh/lib) v0.4.0 - Image libraries transpiled to WebAssembly, includes GPLv3 code.
|
||||
* [@squoosh/cli](https://www.npmjs.com/package/@squoosh/cli) v0.7.2 - Command line wrapper around `@squoosh/lib`, avoids GPLv3 by spawning process.
|
||||
* sharp v0.28.0 / libvips v8.10.6 - Caching within libvips disabled to ensure a fair comparison.
|
||||
* sharp v0.30.0 / libvips v8.12.2 - Caching within libvips disabled to ensure a fair comparison.
|
||||
|
||||
## The task
|
||||
|
||||
@@ -21,23 +21,23 @@ then compress to JPEG at a "quality" setting of 80.
|
||||
## Test environment
|
||||
|
||||
* AWS EC2 eu-west-1 [c5ad.xlarge](https://aws.amazon.com/ec2/instance-types/c5/) (4x AMD EPYC 7R32)
|
||||
* Ubuntu 21.04 (ami-0d7626a9c2ceab1ac)
|
||||
* Node.js 16.6.2
|
||||
* Ubuntu 21.10 (ami-0258eeb71ddf238b3)
|
||||
* Node.js 16.13.2
|
||||
|
||||
## Results
|
||||
|
||||
| Module | Input | Output | Ops/sec | Speed-up |
|
||||
| :----------------- | :----- | :----- | ------: | -------: |
|
||||
| jimp | buffer | buffer | 0.83 | 1.0 |
|
||||
| squoosh-cli | file | file | 1.09 | 1.3 |
|
||||
| squoosh-lib | buffer | buffer | 1.83 | 2.2 |
|
||||
| mapnik | buffer | buffer | 3.41 | 4.1 |
|
||||
| gm | buffer | buffer | 8.34 | 10.0 |
|
||||
| imagemagick | file | file | 8.67 | 10.4 |
|
||||
| gm | file | file | 8.82 | 10.6 |
|
||||
| sharp | stream | stream | 29.44 | 35.5 |
|
||||
| sharp | file | file | 29.64 | 35.7 |
|
||||
| sharp | buffer | buffer | 31.09 | 37.5 |
|
||||
| jimp | buffer | buffer | 0.84 | 1.0 |
|
||||
| squoosh-cli | file | file | 1.08 | 1.3 |
|
||||
| squoosh-lib | buffer | buffer | 1.85 | 2.2 |
|
||||
| mapnik | buffer | buffer | 3.45 | 4.1 |
|
||||
| gm | buffer | buffer | 8.60 | 10.2 |
|
||||
| gm | file | file | 8.66 | 10.3 |
|
||||
| imagemagick | file | file | 8.79 | 10.5 |
|
||||
| sharp | stream | stream | 28.90 | 34.4 |
|
||||
| sharp | file | file | 30.08 | 35.8 |
|
||||
| sharp | buffer | buffer | 30.42 | 36.2 |
|
||||
|
||||
Greater libvips performance can be expected with caching enabled (default)
|
||||
and using 8+ core machines, especially those with larger L1/L2 CPU caches.
|
||||
|
||||
@@ -20,14 +20,19 @@ module.exports = [
|
||||
'callback',
|
||||
'can',
|
||||
'containing',
|
||||
'contains',
|
||||
'current',
|
||||
'date',
|
||||
'default',
|
||||
'does',
|
||||
'each',
|
||||
'either',
|
||||
'ensure',
|
||||
'entirely',
|
||||
'etc',
|
||||
'every',
|
||||
'except',
|
||||
'following',
|
||||
'for',
|
||||
'from',
|
||||
'get',
|
||||
@@ -37,10 +42,13 @@ module.exports = [
|
||||
'have',
|
||||
'how',
|
||||
'image',
|
||||
'implies',
|
||||
'involve',
|
||||
'its',
|
||||
'last',
|
||||
'least',
|
||||
'lots',
|
||||
'make',
|
||||
'may',
|
||||
'more',
|
||||
'most',
|
||||
@@ -56,6 +64,7 @@ module.exports = [
|
||||
'over',
|
||||
'perform',
|
||||
'performs',
|
||||
'produce',
|
||||
'provide',
|
||||
'provided',
|
||||
'ready',
|
||||
@@ -67,12 +76,15 @@ module.exports = [
|
||||
'sets',
|
||||
'should',
|
||||
'since',
|
||||
'site',
|
||||
'specified',
|
||||
'spelling',
|
||||
'such',
|
||||
'support',
|
||||
'supported',
|
||||
'sure',
|
||||
'take',
|
||||
'task',
|
||||
'than',
|
||||
'that',
|
||||
'the',
|
||||
@@ -84,12 +96,15 @@ module.exports = [
|
||||
'this',
|
||||
'under',
|
||||
'unless',
|
||||
'unmaintained',
|
||||
'unsuitable',
|
||||
'until',
|
||||
'use',
|
||||
'used',
|
||||
'using',
|
||||
'value',
|
||||
'values',
|
||||
'were',
|
||||
'when',
|
||||
'which',
|
||||
'while',
|
||||
|
||||
@@ -5,6 +5,7 @@ const os = require('os');
|
||||
const path = require('path');
|
||||
const stream = require('stream');
|
||||
const zlib = require('zlib');
|
||||
const { createHash } = require('crypto');
|
||||
|
||||
const detectLibc = require('detect-libc');
|
||||
const semverLessThan = require('semver/functions/lt');
|
||||
@@ -18,7 +19,7 @@ const platform = require('../lib/platform');
|
||||
|
||||
const minimumGlibcVersionByArch = {
|
||||
arm: '2.28',
|
||||
arm64: '2.29',
|
||||
arm64: '2.17',
|
||||
x64: '2.17'
|
||||
};
|
||||
|
||||
@@ -55,6 +56,33 @@ const handleError = function (err) {
|
||||
}
|
||||
};
|
||||
|
||||
const verifyIntegrity = function (platformAndArch) {
|
||||
const expected = libvips.integrity(platformAndArch);
|
||||
if (installationForced || !expected) {
|
||||
libvips.log(`Integrity check skipped for ${platformAndArch}`);
|
||||
return new stream.PassThrough();
|
||||
}
|
||||
const hash = createHash('sha512');
|
||||
return new stream.Transform({
|
||||
transform: function (chunk, _encoding, done) {
|
||||
hash.update(chunk);
|
||||
done(null, chunk);
|
||||
},
|
||||
flush: function (done) {
|
||||
const digest = `sha512-${hash.digest('base64')}`;
|
||||
if (expected !== digest) {
|
||||
libvips.removeVendoredLibvips();
|
||||
libvips.log(`Integrity expected: ${expected}`);
|
||||
libvips.log(`Integrity received: ${digest}`);
|
||||
done(new Error(`Integrity check failed for ${platformAndArch}`));
|
||||
} else {
|
||||
libvips.log(`Integrity check passed for ${platformAndArch}`);
|
||||
done();
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const extractTarball = function (tarPath, platformAndArch) {
|
||||
const versionedVendorPath = path.join(__dirname, '..', 'vendor', minimumLibvipsVersion, platformAndArch);
|
||||
libvips.mkdirSync(versionedVendorPath);
|
||||
@@ -66,6 +94,7 @@ const extractTarball = function (tarPath, platformAndArch) {
|
||||
|
||||
stream.pipeline(
|
||||
fs.createReadStream(tarPath),
|
||||
verifyIntegrity(platformAndArch),
|
||||
new zlib.BrotliDecompress(),
|
||||
tarFs.extract(versionedVendorPath, { ignore }),
|
||||
function (err) {
|
||||
@@ -103,14 +132,16 @@ try {
|
||||
throw new Error(`BSD/SunOS systems require manual installation of libvips >= ${minimumLibvipsVersion}`);
|
||||
}
|
||||
// Linux libc version check
|
||||
if (detectLibc.family === detectLibc.GLIBC && detectLibc.version && minimumGlibcVersionByArch[arch]) {
|
||||
if (semverLessThan(`${detectLibc.version}.0`, `${minimumGlibcVersionByArch[arch]}.0`)) {
|
||||
handleError(new Error(`Use with glibc ${detectLibc.version} requires manual installation of libvips >= ${minimumLibvipsVersion}`));
|
||||
const libcFamily = detectLibc.familySync();
|
||||
const libcVersion = detectLibc.versionSync();
|
||||
if (libcFamily === detectLibc.GLIBC && libcVersion && minimumGlibcVersionByArch[arch]) {
|
||||
if (semverLessThan(`${libcVersion}.0`, `${minimumGlibcVersionByArch[arch]}.0`)) {
|
||||
handleError(new Error(`Use with glibc ${libcVersion} requires manual installation of libvips >= ${minimumLibvipsVersion}`));
|
||||
}
|
||||
}
|
||||
if (detectLibc.family === detectLibc.MUSL && detectLibc.version) {
|
||||
if (semverLessThan(detectLibc.version, '1.1.24')) {
|
||||
handleError(new Error(`Use with musl ${detectLibc.version} requires manual installation of libvips >= ${minimumLibvipsVersion}`));
|
||||
if (libcFamily === detectLibc.MUSL && libcVersion) {
|
||||
if (semverLessThan(libcVersion, '1.1.24')) {
|
||||
handleError(new Error(`Use with musl ${libcVersion} requires manual installation of libvips >= ${minimumLibvipsVersion}`));
|
||||
}
|
||||
}
|
||||
// Node.js minimum version check
|
||||
@@ -120,7 +151,7 @@ try {
|
||||
}
|
||||
|
||||
// Download to per-process temporary file
|
||||
const tarFilename = ['libvips', minimumLibvipsVersion, platformAndArch].join('-') + '.tar.br';
|
||||
const tarFilename = ['libvips', minimumLibvipsVersionLabelled, platformAndArch].join('-') + '.tar.br';
|
||||
const tarPathCache = path.join(libvips.cachePath(), tarFilename);
|
||||
if (fs.existsSync(tarPathCache)) {
|
||||
libvips.log(`Using cached ${tarPathCache}`);
|
||||
|
||||
@@ -13,7 +13,7 @@ const debuglog = util.debuglog('sharp');
|
||||
/**
|
||||
* Constructor factory to create an instance of `sharp`, to which further methods are chained.
|
||||
*
|
||||
* JPEG, PNG, WebP, AVIF or TIFF format image data can be streamed out from this object.
|
||||
* JPEG, PNG, WebP, GIF, AVIF or TIFF format image data can be streamed out from this object.
|
||||
* When using Stream based output, derived attributes are available from the `info` event.
|
||||
*
|
||||
* Non-critical problems encountered during processing are emitted as `warning` events.
|
||||
@@ -103,6 +103,7 @@ const debuglog = util.debuglog('sharp');
|
||||
* @param {number|boolean} [options.limitInputPixels=268402689] - Do not process input images where the number of pixels
|
||||
* (width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted.
|
||||
* An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF).
|
||||
* @param {boolean} [options.unlimited=false] - Set this to `true` to remove safety features that help prevent memory exhaustion (SVG, PNG).
|
||||
* @param {boolean} [options.sequentialRead=false] - Set this to `true` to use sequential rather than random access where possible.
|
||||
* This can reduce memory usage and might improve performance on some systems.
|
||||
* @param {number} [options.density=72] - number representing the DPI for vector images in the range 1 to 100000.
|
||||
@@ -165,6 +166,7 @@ const Sharp = function (input, options) {
|
||||
extendRight: 0,
|
||||
extendBackground: [0, 0, 0, 255],
|
||||
withoutEnlargement: false,
|
||||
withoutReduction: false,
|
||||
affineMatrix: [],
|
||||
affineBackground: [0, 0, 0, 255],
|
||||
affineIdx: 0,
|
||||
@@ -199,6 +201,7 @@ const Sharp = function (input, options) {
|
||||
brightness: 1,
|
||||
saturation: 1,
|
||||
hue: 0,
|
||||
lightness: 0,
|
||||
booleanBufferIn: null,
|
||||
booleanFileIn: '',
|
||||
joinChannelIn: [],
|
||||
@@ -232,14 +235,23 @@ const Sharp = function (input, options) {
|
||||
pngAdaptiveFiltering: false,
|
||||
pngPalette: false,
|
||||
pngQuality: 100,
|
||||
pngColours: 256,
|
||||
pngEffort: 7,
|
||||
pngBitdepth: 8,
|
||||
pngDither: 1,
|
||||
jp2Quality: 80,
|
||||
jp2TileHeight: 512,
|
||||
jp2TileWidth: 512,
|
||||
jp2Lossless: false,
|
||||
jp2ChromaSubsampling: '4:4:4',
|
||||
webpQuality: 80,
|
||||
webpAlphaQuality: 100,
|
||||
webpLossless: false,
|
||||
webpNearLossless: false,
|
||||
webpSmartSubsample: false,
|
||||
webpReductionEffort: 4,
|
||||
webpEffort: 4,
|
||||
gifBitdepth: 8,
|
||||
gifEffort: 7,
|
||||
gifDither: 1,
|
||||
tiffQuality: 80,
|
||||
tiffCompression: 'jpeg',
|
||||
tiffPredictor: 'horizontal',
|
||||
@@ -250,10 +262,11 @@ const Sharp = function (input, options) {
|
||||
tiffTileWidth: 256,
|
||||
tiffXres: 1.0,
|
||||
tiffYres: 1.0,
|
||||
tiffResolutionUnit: 'inch',
|
||||
heifQuality: 50,
|
||||
heifLossless: false,
|
||||
heifCompression: 'av1',
|
||||
heifSpeed: 5,
|
||||
heifEffort: 4,
|
||||
heifChromaSubsampling: '4:4:4',
|
||||
rawDepth: 'uchar',
|
||||
tileSize: 256,
|
||||
@@ -267,6 +280,7 @@ const Sharp = function (input, options) {
|
||||
tileBackground: [255, 255, 255, 255],
|
||||
tileCentre: false,
|
||||
tileId: 'https://example.com/iiif',
|
||||
timeoutSeconds: 0,
|
||||
linearA: 1,
|
||||
linearB: 0,
|
||||
// Function to notify of libvips warnings
|
||||
@@ -282,7 +296,8 @@ const Sharp = function (input, options) {
|
||||
this.options.input = this._createInputDescriptor(input, options, { allowStream: true });
|
||||
return this;
|
||||
};
|
||||
util.inherits(Sharp, stream.Duplex);
|
||||
Object.setPrototypeOf(Sharp.prototype, stream.Duplex.prototype);
|
||||
Object.setPrototypeOf(Sharp, stream.Duplex);
|
||||
|
||||
/**
|
||||
* Take a "snapshot" of the Sharp instance, returning a new instance.
|
||||
|
||||
25
lib/input.js
@@ -9,9 +9,9 @@ const sharp = require('./sharp');
|
||||
* @private
|
||||
*/
|
||||
function _inputOptionsFromObject (obj) {
|
||||
const { raw, density, limitInputPixels, sequentialRead, failOnError, animated, page, pages, subifd } = obj;
|
||||
return [raw, density, limitInputPixels, sequentialRead, failOnError, animated, page, pages, subifd].some(is.defined)
|
||||
? { raw, density, limitInputPixels, sequentialRead, failOnError, animated, page, pages, subifd }
|
||||
const { raw, density, limitInputPixels, unlimited, sequentialRead, failOnError, animated, page, pages, subifd } = obj;
|
||||
return [raw, density, limitInputPixels, unlimited, sequentialRead, failOnError, animated, page, pages, subifd].some(is.defined)
|
||||
? { raw, density, limitInputPixels, unlimited, sequentialRead, failOnError, animated, page, pages, subifd }
|
||||
: undefined;
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
|
||||
const inputDescriptor = {
|
||||
failOnError: true,
|
||||
limitInputPixels: Math.pow(0x3FFF, 2),
|
||||
unlimited: false,
|
||||
sequentialRead: false
|
||||
};
|
||||
if (is.string(input)) {
|
||||
@@ -83,6 +84,14 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
|
||||
throw is.invalidParameterError('limitInputPixels', 'integer >= 0', inputOptions.limitInputPixels);
|
||||
}
|
||||
}
|
||||
// unlimited
|
||||
if (is.defined(inputOptions.unlimited)) {
|
||||
if (is.bool(inputOptions.unlimited)) {
|
||||
inputDescriptor.unlimited = inputOptions.unlimited;
|
||||
} else {
|
||||
throw is.invalidParameterError('unlimited', 'boolean', inputOptions.unlimited);
|
||||
}
|
||||
}
|
||||
// sequentialRead
|
||||
if (is.defined(inputOptions.sequentialRead)) {
|
||||
if (is.bool(inputOptions.sequentialRead)) {
|
||||
@@ -281,7 +290,11 @@ function _isStreamInput () {
|
||||
}
|
||||
|
||||
/**
|
||||
* Fast access to (uncached) image metadata without decoding any compressed image data.
|
||||
* Fast access to (uncached) image metadata without decoding any compressed pixel data.
|
||||
*
|
||||
* This is taken from the header of the input image.
|
||||
* It does not include operations, such as resize, to be applied to the output image.
|
||||
*
|
||||
* A `Promise` is returned when `callback` is not provided.
|
||||
*
|
||||
* - `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
|
||||
@@ -303,6 +316,7 @@ function _isStreamInput () {
|
||||
* - `subifds`: Number of Sub Image File Directories in an OME-TIFF image
|
||||
* - `background`: Default background colour, if present, for PNG (bKGD) and GIF images, either an RGB Object or a single greyscale value
|
||||
* - `compression`: The encoder used to compress an HEIF file, `av1` (AVIF) or `hevc` (HEIC)
|
||||
* - `resolutionUnit`: The unit of resolution (density), either `inch` or `cm`, if present
|
||||
* - `hasProfile`: Boolean indicating the presence of an embedded ICC profile
|
||||
* - `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
|
||||
* - `orientation`: Number value of the EXIF Orientation header, if present
|
||||
@@ -313,6 +327,9 @@ function _isStreamInput () {
|
||||
* - `tifftagPhotoshop`: Buffer containing raw TIFFTAG_PHOTOSHOP data, if present
|
||||
*
|
||||
* @example
|
||||
* const metadata = await sharp(input).metadata();
|
||||
*
|
||||
* @example
|
||||
* const image = sharp(inputJpg);
|
||||
* image
|
||||
* .metadata()
|
||||
|
||||
@@ -8,10 +8,11 @@ const semverCoerce = require('semver/functions/coerce');
|
||||
const semverGreaterThanOrEqualTo = require('semver/functions/gte');
|
||||
|
||||
const platform = require('./platform');
|
||||
const { config } = require('../package.json');
|
||||
|
||||
const env = process.env;
|
||||
const minimumLibvipsVersionLabelled = env.npm_package_config_libvips || /* istanbul ignore next */
|
||||
require('../package.json').config.libvips;
|
||||
config.libvips;
|
||||
const minimumLibvipsVersion = semverCoerce(minimumLibvipsVersionLabelled).version;
|
||||
|
||||
const spawnSyncOptions = {
|
||||
@@ -19,6 +20,8 @@ const spawnSyncOptions = {
|
||||
shell: true
|
||||
};
|
||||
|
||||
const vendorPath = path.join(__dirname, '..', 'vendor', minimumLibvipsVersion, platform());
|
||||
|
||||
const mkdirSync = function (dirPath) {
|
||||
try {
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
@@ -39,6 +42,10 @@ const cachePath = function () {
|
||||
return libvipsCachePath;
|
||||
};
|
||||
|
||||
const integrity = function (platformAndArch) {
|
||||
return env[`npm_package_config_integrity_${platformAndArch.replace('-', '_')}`] || config.integrity[platformAndArch];
|
||||
};
|
||||
|
||||
const log = function (item) {
|
||||
if (item instanceof Error) {
|
||||
console.error(`sharp: Installation error: ${item.message}`);
|
||||
@@ -67,10 +74,15 @@ const globalLibvipsVersion = function () {
|
||||
};
|
||||
|
||||
const hasVendoredLibvips = function () {
|
||||
const vendorPath = path.join(__dirname, '..', 'vendor', minimumLibvipsVersion, platform());
|
||||
return fs.existsSync(vendorPath);
|
||||
};
|
||||
|
||||
/* istanbul ignore next */
|
||||
const removeVendoredLibvips = function () {
|
||||
const rm = fs.rmSync ? fs.rmSync : fs.rmdirSync;
|
||||
rm(vendorPath, { recursive: true, maxRetries: 3, force: true });
|
||||
};
|
||||
|
||||
const pkgConfigPath = function () {
|
||||
if (process.platform !== 'win32') {
|
||||
const brewPkgConfigPath = spawnSync('which brew >/dev/null 2>&1 && eval $(brew --env) && echo $PKG_CONFIG_LIBDIR', spawnSyncOptions).stdout || '';
|
||||
@@ -99,9 +111,11 @@ module.exports = {
|
||||
minimumLibvipsVersion,
|
||||
minimumLibvipsVersionLabelled,
|
||||
cachePath,
|
||||
integrity,
|
||||
log,
|
||||
globalLibvipsVersion,
|
||||
hasVendoredLibvips,
|
||||
removeVendoredLibvips,
|
||||
pkgConfigPath,
|
||||
useGlobalLibvips,
|
||||
mkdirSync
|
||||
|
||||
@@ -245,8 +245,21 @@ function median (size) {
|
||||
|
||||
/**
|
||||
* Blur the image.
|
||||
* When used without parameters, performs a fast, mild blur of the output image.
|
||||
*
|
||||
* When used without parameters, performs a fast 3x3 box blur (equivalent to a box linear filter).
|
||||
*
|
||||
* When a `sigma` is provided, performs a slower, more accurate Gaussian blur.
|
||||
*
|
||||
* @example
|
||||
* const boxBlurred = await sharp(input)
|
||||
* .blur()
|
||||
* .toBuffer();
|
||||
*
|
||||
* @example
|
||||
* const gaussianBlurred = await sharp(input)
|
||||
* .blur(5)
|
||||
* .toBuffer();
|
||||
*
|
||||
* @param {number} [sigma] a value between 0.3 and 1000 representing the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`.
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid parameters
|
||||
@@ -570,14 +583,16 @@ function recomb (inputMatrix) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms the image using brightness, saturation and hue rotation.
|
||||
* Transforms the image using brightness, saturation, hue rotation, and lightness.
|
||||
* Brightness and lightness both operate on luminance, with the difference being that
|
||||
* brightness is multiplicative whereas lightness is additive.
|
||||
*
|
||||
* @since 0.22.1
|
||||
*
|
||||
* @example
|
||||
* sharp(input)
|
||||
* .modulate({
|
||||
* brightness: 2 // increase lightness by a factor of 2
|
||||
* brightness: 2 // increase brightness by a factor of 2
|
||||
* });
|
||||
*
|
||||
* sharp(input)
|
||||
@@ -585,6 +600,11 @@ function recomb (inputMatrix) {
|
||||
* hue: 180 // hue-rotate by 180 degrees
|
||||
* });
|
||||
*
|
||||
* sharp(input)
|
||||
* .modulate({
|
||||
* lightness: 50 // increase lightness by +50
|
||||
* });
|
||||
*
|
||||
* // decreate brightness and saturation while also hue-rotating by 90 degrees
|
||||
* sharp(input)
|
||||
* .modulate({
|
||||
@@ -597,6 +617,7 @@ function recomb (inputMatrix) {
|
||||
* @param {number} [options.brightness] Brightness multiplier
|
||||
* @param {number} [options.saturation] Saturation multiplier
|
||||
* @param {number} [options.hue] Degrees for hue rotation
|
||||
* @param {number} [options.lightness] Lightness addend
|
||||
* @returns {Sharp}
|
||||
*/
|
||||
function modulate (options) {
|
||||
@@ -624,6 +645,13 @@ function modulate (options) {
|
||||
throw is.invalidParameterError('hue', 'number', options.hue);
|
||||
}
|
||||
}
|
||||
if ('lightness' in options) {
|
||||
if (is.number(options.lightness)) {
|
||||
this.options.lightness = options.lightness;
|
||||
} else {
|
||||
throw is.invalidParameterError('lightness', 'number', options.lightness);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
327
lib/output.js
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const is = require('./is');
|
||||
const sharp = require('./sharp');
|
||||
|
||||
@@ -12,22 +13,31 @@ const formats = new Map([
|
||||
['png', 'png'],
|
||||
['raw', 'raw'],
|
||||
['tiff', 'tiff'],
|
||||
['tif', 'tiff'],
|
||||
['webp', 'webp'],
|
||||
['gif', 'gif']
|
||||
['gif', 'gif'],
|
||||
['jp2', 'jp2'],
|
||||
['jpx', 'jp2'],
|
||||
['j2k', 'jp2'],
|
||||
['j2c', 'jp2']
|
||||
]);
|
||||
|
||||
const errMagickSave = new Error('GIF output requires libvips with support for ImageMagick');
|
||||
const errJp2Save = new Error('JP2 output requires libvips with support for OpenJPEG');
|
||||
|
||||
const bitdepthFromColourCount = (colours) => 1 << 31 - Math.clz32(Math.ceil(Math.log2(colours)));
|
||||
|
||||
/**
|
||||
* Write output image data to a file.
|
||||
*
|
||||
* If an explicit output format is not selected, it will be inferred from the extension,
|
||||
* with JPEG, PNG, WebP, AVIF, TIFF, DZI, and libvips' V format supported.
|
||||
* with JPEG, PNG, WebP, AVIF, TIFF, GIF, DZI, and libvips' V format supported.
|
||||
* Note that raw pixel data is only supported for buffer output.
|
||||
*
|
||||
* By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
* See {@link withMetadata} for control over this.
|
||||
*
|
||||
* The caller is responsible for ensuring directory structures and permissions exist.
|
||||
*
|
||||
* A `Promise` is returned when `callback` is not provided.
|
||||
*
|
||||
* @example
|
||||
@@ -52,10 +62,8 @@ function toFile (fileOut, callback) {
|
||||
let err;
|
||||
if (!is.string(fileOut)) {
|
||||
err = new Error('Missing output file path');
|
||||
} else if (this.options.input.file === fileOut) {
|
||||
} else if (is.string(this.options.input.file) && path.resolve(this.options.input.file) === path.resolve(fileOut)) {
|
||||
err = new Error('Cannot use same file for input and output');
|
||||
} else if (this.options.formatOut === 'input' && fileOut.toLowerCase().endsWith('.gif') && !this.constructor.format.magick.output.file) {
|
||||
err = errMagickSave;
|
||||
}
|
||||
if (err) {
|
||||
if (is.fn(callback)) {
|
||||
@@ -72,9 +80,9 @@ function toFile (fileOut, callback) {
|
||||
|
||||
/**
|
||||
* Write output to a Buffer.
|
||||
* JPEG, PNG, WebP, AVIF, TIFF and raw pixel data output are supported.
|
||||
* JPEG, PNG, WebP, AVIF, TIFF, GIF and raw pixel data output are supported.
|
||||
*
|
||||
* If no explicit format is set, the output format will match the input image, except GIF and SVG input which become PNG output.
|
||||
* If no explicit format is set, the output format will match the input image, except SVG input which becomes PNG output.
|
||||
*
|
||||
* By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
* See {@link withMetadata} for control over this.
|
||||
@@ -160,7 +168,7 @@ function toBuffer (options, callback) {
|
||||
* })
|
||||
* .toBuffer();
|
||||
*
|
||||
* * @example
|
||||
* @example
|
||||
* // Set output metadata to 96 DPI
|
||||
* const data = await sharp(input)
|
||||
* .withMetadata({ density: 96 })
|
||||
@@ -365,6 +373,7 @@ function jpeg (options) {
|
||||
* @param {boolean} [options.adaptiveFiltering=false] - use adaptive row filtering
|
||||
* @param {boolean} [options.palette=false] - quantise to a palette-based image with alpha transparency support
|
||||
* @param {number} [options.quality=100] - use the lowest number of colours needed to achieve given quality, sets `palette` to `true`
|
||||
* @param {number} [options.effort=7] - CPU effort, between 1 (fastest) and 10 (slowest), sets `palette` to `true`
|
||||
* @param {number} [options.colours=256] - maximum number of palette entries, sets `palette` to `true`
|
||||
* @param {number} [options.colors=256] - alternative spelling of `options.colours`, sets `palette` to `true`
|
||||
* @param {number} [options.dither=1.0] - level of Floyd-Steinberg error diffusion, sets `palette` to `true`
|
||||
@@ -389,7 +398,7 @@ function png (options) {
|
||||
}
|
||||
if (is.defined(options.palette)) {
|
||||
this._setBooleanOption('pngPalette', options.palette);
|
||||
} else if (is.defined(options.quality) || is.defined(options.colours || options.colors) || is.defined(options.dither)) {
|
||||
} else if ([options.quality, options.effort, options.colours, options.colors, options.dither].some(is.defined)) {
|
||||
this._setBooleanOption('pngPalette', true);
|
||||
}
|
||||
if (this.options.pngPalette) {
|
||||
@@ -400,10 +409,17 @@ function png (options) {
|
||||
throw is.invalidParameterError('quality', 'integer between 0 and 100', options.quality);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.effort)) {
|
||||
if (is.integer(options.effort) && is.inRange(options.effort, 1, 10)) {
|
||||
this.options.pngEffort = options.effort;
|
||||
} else {
|
||||
throw is.invalidParameterError('effort', 'integer between 1 and 10', options.effort);
|
||||
}
|
||||
}
|
||||
const colours = options.colours || options.colors;
|
||||
if (is.defined(colours)) {
|
||||
if (is.integer(colours) && is.inRange(colours, 2, 256)) {
|
||||
this.options.pngColours = colours;
|
||||
this.options.pngBitdepth = bitdepthFromColourCount(colours);
|
||||
} else {
|
||||
throw is.invalidParameterError('colours', 'integer between 2 and 256', colours);
|
||||
}
|
||||
@@ -432,7 +448,7 @@ function png (options) {
|
||||
* @example
|
||||
* // Optimise the file size of an animated WebP
|
||||
* const outputWebp = await sharp(inputWebp, { animated: true })
|
||||
* .webp({ reductionEffort: 6 })
|
||||
* .webp({ effort: 6 })
|
||||
* .toBuffer();
|
||||
*
|
||||
* @param {Object} [options] - output options
|
||||
@@ -441,93 +457,205 @@ function png (options) {
|
||||
* @param {boolean} [options.lossless=false] - use lossless compression mode
|
||||
* @param {boolean} [options.nearLossless=false] - use near_lossless compression mode
|
||||
* @param {boolean} [options.smartSubsample=false] - use high quality chroma subsampling
|
||||
* @param {number} [options.reductionEffort=4] - level of CPU effort to reduce file size, integer 0-6
|
||||
* @param {number} [options.pageHeight] - page height for animated output
|
||||
* @param {number} [options.effort=4] - CPU effort, between 0 (fastest) and 6 (slowest)
|
||||
* @param {number} [options.loop=0] - number of animation iterations, use 0 for infinite animation
|
||||
* @param {number[]} [options.delay] - list of delays between animation frames (in milliseconds)
|
||||
* @param {number|number[]} [options.delay] - delay(s) between animation frames (in milliseconds)
|
||||
* @param {boolean} [options.force=true] - force WebP output, otherwise attempt to use input format
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid options
|
||||
*/
|
||||
function webp (options) {
|
||||
if (is.object(options) && is.defined(options.quality)) {
|
||||
if (is.integer(options.quality) && is.inRange(options.quality, 1, 100)) {
|
||||
this.options.webpQuality = options.quality;
|
||||
} else {
|
||||
throw is.invalidParameterError('quality', 'integer between 1 and 100', options.quality);
|
||||
if (is.object(options)) {
|
||||
if (is.defined(options.quality)) {
|
||||
if (is.integer(options.quality) && is.inRange(options.quality, 1, 100)) {
|
||||
this.options.webpQuality = options.quality;
|
||||
} else {
|
||||
throw is.invalidParameterError('quality', 'integer between 1 and 100', options.quality);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.alphaQuality)) {
|
||||
if (is.integer(options.alphaQuality) && is.inRange(options.alphaQuality, 0, 100)) {
|
||||
this.options.webpAlphaQuality = options.alphaQuality;
|
||||
} else {
|
||||
throw is.invalidParameterError('alphaQuality', 'integer between 0 and 100', options.alphaQuality);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.lossless)) {
|
||||
this._setBooleanOption('webpLossless', options.lossless);
|
||||
}
|
||||
if (is.defined(options.nearLossless)) {
|
||||
this._setBooleanOption('webpNearLossless', options.nearLossless);
|
||||
}
|
||||
if (is.defined(options.smartSubsample)) {
|
||||
this._setBooleanOption('webpSmartSubsample', options.smartSubsample);
|
||||
}
|
||||
const effort = options.effort || options.reductionEffort;
|
||||
if (is.defined(effort)) {
|
||||
if (is.integer(effort) && is.inRange(effort, 0, 6)) {
|
||||
this.options.webpEffort = effort;
|
||||
} else {
|
||||
throw is.invalidParameterError('effort', 'integer between 0 and 6', effort);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (is.object(options) && is.defined(options.alphaQuality)) {
|
||||
if (is.integer(options.alphaQuality) && is.inRange(options.alphaQuality, 0, 100)) {
|
||||
this.options.webpAlphaQuality = options.alphaQuality;
|
||||
} else {
|
||||
throw is.invalidParameterError('alphaQuality', 'integer between 0 and 100', options.alphaQuality);
|
||||
}
|
||||
}
|
||||
if (is.object(options) && is.defined(options.lossless)) {
|
||||
this._setBooleanOption('webpLossless', options.lossless);
|
||||
}
|
||||
if (is.object(options) && is.defined(options.nearLossless)) {
|
||||
this._setBooleanOption('webpNearLossless', options.nearLossless);
|
||||
}
|
||||
if (is.object(options) && is.defined(options.smartSubsample)) {
|
||||
this._setBooleanOption('webpSmartSubsample', options.smartSubsample);
|
||||
}
|
||||
if (is.object(options) && is.defined(options.reductionEffort)) {
|
||||
if (is.integer(options.reductionEffort) && is.inRange(options.reductionEffort, 0, 6)) {
|
||||
this.options.webpReductionEffort = options.reductionEffort;
|
||||
} else {
|
||||
throw is.invalidParameterError('reductionEffort', 'integer between 0 and 6', options.reductionEffort);
|
||||
}
|
||||
}
|
||||
|
||||
trySetAnimationOptions(options, this.options);
|
||||
return this._updateFormatOut('webp', options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use these GIF options for output image.
|
||||
* Use these GIF options for the output image.
|
||||
*
|
||||
* Requires libvips compiled with support for ImageMagick or GraphicsMagick.
|
||||
* The prebuilt binaries do not include this - see
|
||||
* {@link https://sharp.pixelplumbing.com/install#custom-libvips installing a custom libvips}.
|
||||
* The first entry in the palette is reserved for transparency.
|
||||
*
|
||||
* @since 0.30.0
|
||||
*
|
||||
* @example
|
||||
* // Convert PNG to GIF
|
||||
* await sharp(pngBuffer)
|
||||
* .gif()
|
||||
* .toBuffer());
|
||||
*
|
||||
* @example
|
||||
* // Convert animated WebP to animated GIF
|
||||
* await sharp('animated.webp', { animated: true })
|
||||
* .toFile('animated.gif');
|
||||
*
|
||||
* @example
|
||||
* // Create a 128x128, cropped, non-dithered, animated thumbnail of an animated GIF
|
||||
* const out = await sharp('in.gif', { animated: true })
|
||||
* .resize({ width: 128, height: 128 })
|
||||
* .gif({ dither: 0 })
|
||||
* .toBuffer();
|
||||
*
|
||||
* @param {Object} [options] - output options
|
||||
* @param {number} [options.pageHeight] - page height for animated output
|
||||
* @param {number} [options.colours=256] - maximum number of palette entries, including transparency, between 2 and 256
|
||||
* @param {number} [options.colors=256] - alternative spelling of `options.colours`
|
||||
* @param {number} [options.effort=7] - CPU effort, between 1 (fastest) and 10 (slowest)
|
||||
* @param {number} [options.dither=1.0] - level of Floyd-Steinberg error diffusion, between 0 (least) and 1 (most)
|
||||
* @param {number} [options.loop=0] - number of animation iterations, use 0 for infinite animation
|
||||
* @param {number[]} [options.delay] - list of delays between animation frames (in milliseconds)
|
||||
* @param {number|number[]} [options.delay] - delay(s) between animation frames (in milliseconds)
|
||||
* @param {boolean} [options.force=true] - force GIF output, otherwise attempt to use input format
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid options
|
||||
*/
|
||||
/* istanbul ignore next */
|
||||
function gif (options) {
|
||||
if (!this.constructor.format.magick.output.buffer) {
|
||||
throw errMagickSave;
|
||||
if (is.object(options)) {
|
||||
const colours = options.colours || options.colors;
|
||||
if (is.defined(colours)) {
|
||||
if (is.integer(colours) && is.inRange(colours, 2, 256)) {
|
||||
this.options.gifBitdepth = bitdepthFromColourCount(colours);
|
||||
} else {
|
||||
throw is.invalidParameterError('colours', 'integer between 2 and 256', colours);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.effort)) {
|
||||
if (is.number(options.effort) && is.inRange(options.effort, 1, 10)) {
|
||||
this.options.gifEffort = options.effort;
|
||||
} else {
|
||||
throw is.invalidParameterError('effort', 'integer between 1 and 10', options.effort);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.dither)) {
|
||||
if (is.number(options.dither) && is.inRange(options.dither, 0, 1)) {
|
||||
this.options.gifDither = options.dither;
|
||||
} else {
|
||||
throw is.invalidParameterError('dither', 'number between 0.0 and 1.0', options.dither);
|
||||
}
|
||||
}
|
||||
}
|
||||
trySetAnimationOptions(options, this.options);
|
||||
return this._updateFormatOut('gif', options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use these JP2 options for output image.
|
||||
*
|
||||
* Requires libvips compiled with support for OpenJPEG.
|
||||
* The prebuilt binaries do not include this - see
|
||||
* {@link https://sharp.pixelplumbing.com/install#custom-libvips installing a custom libvips}.
|
||||
*
|
||||
* @example
|
||||
* // Convert any input to lossless JP2 output
|
||||
* const data = await sharp(input)
|
||||
* .jp2({ lossless: true })
|
||||
* .toBuffer();
|
||||
*
|
||||
* @example
|
||||
* // Convert any input to very high quality JP2 output
|
||||
* const data = await sharp(input)
|
||||
* .jp2({
|
||||
* quality: 100,
|
||||
* chromaSubsampling: '4:4:4'
|
||||
* })
|
||||
* .toBuffer();
|
||||
*
|
||||
* @since 0.29.1
|
||||
*
|
||||
* @param {Object} [options] - output options
|
||||
* @param {number} [options.quality=80] - quality, integer 1-100
|
||||
* @param {boolean} [options.lossless=false] - use lossless compression mode
|
||||
* @param {number} [options.tileWidth=512] - horizontal tile size
|
||||
* @param {number} [options.tileHeight=512] - vertical tile size
|
||||
* @param {string} [options.chromaSubsampling='4:4:4'] - set to '4:2:0' to use chroma subsampling
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid options
|
||||
*/
|
||||
/* istanbul ignore next */
|
||||
function jp2 (options) {
|
||||
if (!this.constructor.format.jp2k.output.buffer) {
|
||||
throw errJp2Save;
|
||||
}
|
||||
if (is.object(options)) {
|
||||
if (is.defined(options.quality)) {
|
||||
if (is.integer(options.quality) && is.inRange(options.quality, 1, 100)) {
|
||||
this.options.jp2Quality = options.quality;
|
||||
} else {
|
||||
throw is.invalidParameterError('quality', 'integer between 1 and 100', options.quality);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.lossless)) {
|
||||
if (is.bool(options.lossless)) {
|
||||
this.options.jp2Lossless = options.lossless;
|
||||
} else {
|
||||
throw is.invalidParameterError('lossless', 'boolean', options.lossless);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.tileWidth)) {
|
||||
if (is.integer(options.tileWidth) && is.inRange(options.tileWidth, 1, 32768)) {
|
||||
this.options.jp2TileWidth = options.tileWidth;
|
||||
} else {
|
||||
throw is.invalidParameterError('tileWidth', 'integer between 1 and 32768', options.tileWidth);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.tileHeight)) {
|
||||
if (is.integer(options.tileHeight) && is.inRange(options.tileHeight, 1, 32768)) {
|
||||
this.options.jp2TileHeight = options.tileHeight;
|
||||
} else {
|
||||
throw is.invalidParameterError('tileHeight', 'integer between 1 and 32768', options.tileHeight);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.chromaSubsampling)) {
|
||||
if (is.string(options.chromaSubsampling) && is.inArray(options.chromaSubsampling, ['4:2:0', '4:4:4'])) {
|
||||
this.options.heifChromaSubsampling = options.chromaSubsampling;
|
||||
} else {
|
||||
throw is.invalidParameterError('chromaSubsampling', 'one of: 4:2:0, 4:4:4', options.chromaSubsampling);
|
||||
}
|
||||
}
|
||||
}
|
||||
return this._updateFormatOut('jp2', options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set animation options if available.
|
||||
* @private
|
||||
*
|
||||
* @param {Object} [source] - output options
|
||||
* @param {number} [source.pageHeight] - page height for animated output
|
||||
* @param {number} [source.loop=0] - number of animation iterations, use 0 for infinite animation
|
||||
* @param {number[]} [source.delay] - list of delays between animation frames (in milliseconds)
|
||||
* @param {Object} [target] - target object for valid options
|
||||
* @throws {Error} Invalid options
|
||||
*/
|
||||
function trySetAnimationOptions (source, target) {
|
||||
if (is.object(source) && is.defined(source.pageHeight)) {
|
||||
if (is.integer(source.pageHeight) && source.pageHeight > 0) {
|
||||
target.pageHeight = source.pageHeight;
|
||||
} else {
|
||||
throw is.invalidParameterError('pageHeight', 'integer larger than 0', source.pageHeight);
|
||||
}
|
||||
}
|
||||
if (is.object(source) && is.defined(source.loop)) {
|
||||
if (is.integer(source.loop) && is.inRange(source.loop, 0, 65535)) {
|
||||
target.loop = source.loop;
|
||||
@@ -536,13 +664,16 @@ function trySetAnimationOptions (source, target) {
|
||||
}
|
||||
}
|
||||
if (is.object(source) && is.defined(source.delay)) {
|
||||
if (
|
||||
// We allow singular values as well
|
||||
if (is.integer(source.delay) && is.inRange(source.delay, 0, 65535)) {
|
||||
target.delay = [source.delay];
|
||||
} else if (
|
||||
Array.isArray(source.delay) &&
|
||||
source.delay.every(is.integer) &&
|
||||
source.delay.every(v => is.inRange(v, 0, 65535))) {
|
||||
target.delay = source.delay;
|
||||
} else {
|
||||
throw is.invalidParameterError('delay', 'array of integers between 0 and 65535', source.delay);
|
||||
throw is.invalidParameterError('delay', 'integer or an array of integers between 0 and 65535', source.delay);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -550,6 +681,8 @@ function trySetAnimationOptions (source, target) {
|
||||
/**
|
||||
* Use these TIFF options for output image.
|
||||
*
|
||||
* The `density` can be set in pixels/inch via {@link withMetadata} instead of providing `xres` and `yres` in pixels/mm.
|
||||
*
|
||||
* @example
|
||||
* // Convert SVG input to LZW-compressed, 1 bit per pixel TIFF output
|
||||
* sharp('input.svg')
|
||||
@@ -571,6 +704,7 @@ function trySetAnimationOptions (source, target) {
|
||||
* @param {number} [options.tileHeight=256] - vertical tile size
|
||||
* @param {number} [options.xres=1.0] - horizontal resolution in pixels/mm
|
||||
* @param {number} [options.yres=1.0] - vertical resolution in pixels/mm
|
||||
* @param {string} [options.resolutionUnit='inch'] - resolution unit options: inch, cm
|
||||
* @param {number} [options.bitdepth=8] - reduce bitdepth to 1, 2 or 4 bit
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid options
|
||||
@@ -644,6 +778,14 @@ function tiff (options) {
|
||||
throw is.invalidParameterError('predictor', 'one of: none, horizontal, float', options.predictor);
|
||||
}
|
||||
}
|
||||
// resolutionUnit
|
||||
if (is.defined(options.resolutionUnit)) {
|
||||
if (is.string(options.resolutionUnit) && is.inArray(options.resolutionUnit, ['inch', 'cm'])) {
|
||||
this.options.tiffResolutionUnit = options.resolutionUnit;
|
||||
} else {
|
||||
throw is.invalidParameterError('resolutionUnit', 'one of: inch, cm', options.resolutionUnit);
|
||||
}
|
||||
}
|
||||
}
|
||||
return this._updateFormatOut('tiff', options);
|
||||
}
|
||||
@@ -654,12 +796,14 @@ function tiff (options) {
|
||||
* Whilst it is possible to create AVIF images smaller than 16x16 pixels,
|
||||
* most web browsers do not display these properly.
|
||||
*
|
||||
* AVIF image sequences are not supported.
|
||||
*
|
||||
* @since 0.27.0
|
||||
*
|
||||
* @param {Object} [options] - output options
|
||||
* @param {number} [options.quality=50] - quality, integer 1-100
|
||||
* @param {boolean} [options.lossless=false] - use lossless compression
|
||||
* @param {number} [options.speed=5] - CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest)
|
||||
* @param {number} [options.effort=4] - CPU effort, between 0 (fastest) and 9 (slowest)
|
||||
* @param {string} [options.chromaSubsampling='4:4:4'] - set to '4:2:0' to use chroma subsampling
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid options
|
||||
@@ -680,7 +824,7 @@ function avif (options) {
|
||||
* @param {number} [options.quality=50] - quality, integer 1-100
|
||||
* @param {string} [options.compression='av1'] - compression format: av1, hevc
|
||||
* @param {boolean} [options.lossless=false] - use lossless compression
|
||||
* @param {number} [options.speed=5] - CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest)
|
||||
* @param {number} [options.effort=4] - CPU effort, between 0 (fastest) and 9 (slowest)
|
||||
* @param {string} [options.chromaSubsampling='4:4:4'] - set to '4:2:0' to use chroma subsampling
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid options
|
||||
@@ -708,11 +852,17 @@ function heif (options) {
|
||||
throw is.invalidParameterError('compression', 'one of: av1, hevc', options.compression);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.speed)) {
|
||||
if (is.integer(options.speed) && is.inRange(options.speed, 0, 8)) {
|
||||
this.options.heifSpeed = options.speed;
|
||||
if (is.defined(options.effort)) {
|
||||
if (is.integer(options.effort) && is.inRange(options.effort, 0, 9)) {
|
||||
this.options.heifEffort = options.effort;
|
||||
} else {
|
||||
throw is.invalidParameterError('speed', 'integer between 0 and 8', options.speed);
|
||||
throw is.invalidParameterError('effort', 'integer between 0 and 9', options.effort);
|
||||
}
|
||||
} else if (is.defined(options.speed)) {
|
||||
if (is.integer(options.speed) && is.inRange(options.speed, 0, 9)) {
|
||||
this.options.heifEffort = 9 - options.speed;
|
||||
} else {
|
||||
throw is.invalidParameterError('speed', 'integer between 0 and 9', options.speed);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.chromaSubsampling)) {
|
||||
@@ -770,8 +920,6 @@ function raw (options) {
|
||||
* Set the format and options for tile images via the `toFormat`, `jpeg`, `png` or `webp` functions.
|
||||
* Use a `.zip` or `.szi` file extension with `toFile` to write to a compressed archive file format.
|
||||
*
|
||||
* Warning: multiple sharp instances concurrently producing tile output can expose a possible race condition in some versions of libgsf.
|
||||
*
|
||||
* @example
|
||||
* sharp('input.tiff')
|
||||
* .png()
|
||||
@@ -791,10 +939,10 @@ function raw (options) {
|
||||
* @param {string} [options.depth] how deep to make the pyramid, possible values are `onepixel`, `onetile` or `one`, default based on layout.
|
||||
* @param {number} [options.skipBlanks=-1] threshold to skip tile generation, a value 0 - 255 for 8-bit images or 0 - 65535 for 16-bit images
|
||||
* @param {string} [options.container='fs'] tile container, with value `fs` (filesystem) or `zip` (compressed file).
|
||||
* @param {string} [options.layout='dz'] filesystem layout, possible values are `dz`, `iiif`, `zoomify` or `google`.
|
||||
* @param {string} [options.layout='dz'] filesystem layout, possible values are `dz`, `iiif`, `iiif3`, `zoomify` or `google`.
|
||||
* @param {boolean} [options.centre=false] centre image in tile.
|
||||
* @param {boolean} [options.center=false] alternative spelling of centre.
|
||||
* @param {string} [options.id='https://example.com/iiif'] when `layout` is `iiif`, sets the `@id` attribute of `info.json`
|
||||
* @param {string} [options.id='https://example.com/iiif'] when `layout` is `iiif`/`iiif3`, sets the `@id`/`id` attribute of `info.json`
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid parameters
|
||||
*/
|
||||
@@ -829,10 +977,10 @@ function tile (options) {
|
||||
}
|
||||
// Layout
|
||||
if (is.defined(options.layout)) {
|
||||
if (is.string(options.layout) && is.inArray(options.layout, ['dz', 'google', 'iiif', 'zoomify'])) {
|
||||
if (is.string(options.layout) && is.inArray(options.layout, ['dz', 'google', 'iiif', 'iiif3', 'zoomify'])) {
|
||||
this.options.tileLayout = options.layout;
|
||||
} else {
|
||||
throw is.invalidParameterError('layout', 'one of: dz, google, iiif, zoomify', options.layout);
|
||||
throw is.invalidParameterError('layout', 'one of: dz, google, iiif, iiif3, zoomify', options.layout);
|
||||
}
|
||||
}
|
||||
// Angle of rotation,
|
||||
@@ -886,6 +1034,31 @@ function tile (options) {
|
||||
return this._updateFormatOut('dz');
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a timeout for processing, in seconds.
|
||||
* Use a value of zero to continue processing indefinitely, the default behaviour.
|
||||
*
|
||||
* The clock starts when libvips opens an input image for processing.
|
||||
* Time spent waiting for a libuv thread to become available is not included.
|
||||
*
|
||||
* @since 0.29.2
|
||||
*
|
||||
* @param {Object} options
|
||||
* @param {number} options.seconds - Number of seconds after which processing will be stopped
|
||||
* @returns {Sharp}
|
||||
*/
|
||||
function timeout (options) {
|
||||
if (!is.plainObject(options)) {
|
||||
throw is.invalidParameterError('options', 'object', options);
|
||||
}
|
||||
if (is.integer(options.seconds) && is.inRange(options.seconds, 0, 3600)) {
|
||||
this.options.timeoutSeconds = options.seconds;
|
||||
} else {
|
||||
throw is.invalidParameterError('seconds', 'integer between 0 and 3600', options.seconds);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the output format unless options.force is false,
|
||||
* in which case revert to input format.
|
||||
@@ -962,6 +1135,7 @@ function _pipeline (callback) {
|
||||
this.push(data);
|
||||
}
|
||||
this.push(null);
|
||||
this.emit('close');
|
||||
});
|
||||
});
|
||||
if (this.streamInFinished) {
|
||||
@@ -977,6 +1151,7 @@ function _pipeline (callback) {
|
||||
this.push(data);
|
||||
}
|
||||
this.push(null);
|
||||
this.emit('close');
|
||||
});
|
||||
}
|
||||
return this;
|
||||
@@ -1031,6 +1206,7 @@ module.exports = function (Sharp) {
|
||||
withMetadata,
|
||||
toFormat,
|
||||
jpeg,
|
||||
jp2,
|
||||
png,
|
||||
webp,
|
||||
tiff,
|
||||
@@ -1039,6 +1215,7 @@ module.exports = function (Sharp) {
|
||||
gif,
|
||||
raw,
|
||||
tile,
|
||||
timeout,
|
||||
// Private
|
||||
_updateFormatOut,
|
||||
_setBooleanOption,
|
||||
|
||||
@@ -8,7 +8,7 @@ module.exports = function () {
|
||||
const arch = env.npm_config_arch || process.arch;
|
||||
const platform = env.npm_config_platform || process.platform;
|
||||
/* istanbul ignore next */
|
||||
const libc = (platform === 'linux' && detectLibc.isNonGlibcLinux) ? detectLibc.family : '';
|
||||
const libc = (platform === 'linux' && detectLibc.isNonGlibcLinuxSync()) ? detectLibc.familySync() : '';
|
||||
|
||||
const platformId = [`${platform}${libc}`];
|
||||
|
||||
|
||||
@@ -183,6 +183,20 @@ function isRotationExpected (options) {
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* sharp(input)
|
||||
* .resize(200, 200, {
|
||||
* fit: sharp.fit.outside,
|
||||
* withoutReduction: true
|
||||
* })
|
||||
* .toFormat('jpeg')
|
||||
* .toBuffer()
|
||||
* .then(function(outputBuffer) {
|
||||
* // outputBuffer contains JPEG image data
|
||||
* // of at least 200 pixels wide and 200 pixels high while maintaining aspect ratio
|
||||
* // and no smaller than the input image
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* const scaleByHalf = await sharp(input)
|
||||
* .metadata()
|
||||
* .then(({ width }) => sharp(input)
|
||||
@@ -200,6 +214,7 @@ function isRotationExpected (options) {
|
||||
* @param {String|Object} [options.background={r: 0, g: 0, b: 0, alpha: 1}] - background colour when using a `fit` of `contain`, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black without transparency.
|
||||
* @param {String} [options.kernel='lanczos3'] - the kernel to use for image reduction.
|
||||
* @param {Boolean} [options.withoutEnlargement=false] - do not enlarge if the width *or* height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option.
|
||||
* @param {Boolean} [options.withoutReduction=false] - do not reduce if the width *or* height are already greater than the specified dimensions, equivalent to GraphicsMagick's `<` geometry option.
|
||||
* @param {Boolean} [options.fastShrinkOnLoad=true] - take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images.
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid parameters
|
||||
@@ -276,6 +291,10 @@ function resize (width, height, options) {
|
||||
if (is.defined(options.withoutEnlargement)) {
|
||||
this._setBooleanOption('withoutEnlargement', options.withoutEnlargement);
|
||||
}
|
||||
// Without reduction
|
||||
if (is.defined(options.withoutReduction)) {
|
||||
this._setBooleanOption('withoutReduction', options.withoutReduction);
|
||||
}
|
||||
// Shrink on load
|
||||
if (is.defined(options.fastShrinkOnLoad)) {
|
||||
this._setBooleanOption('fastShrinkOnLoad', options.fastShrinkOnLoad);
|
||||
|
||||
14
lib/sharp.js
@@ -11,14 +11,22 @@ try {
|
||||
if (/dylib/.test(err.message) && /Incompatible library version/.test(err.message)) {
|
||||
help.push('- Update Homebrew: "brew update && brew upgrade vips"');
|
||||
} else {
|
||||
const [platform, arch] = platformAndArch.split('-');
|
||||
help.push(
|
||||
'- Install with the --verbose flag and look for errors: "npm install --ignore-scripts=false --verbose sharp"',
|
||||
`- Install for the current runtime: "npm install --platform=${process.platform} --arch=${process.arch} sharp"`
|
||||
`- Install for the current ${platformAndArch} runtime: "npm install --platform=${platform} --arch=${arch} sharp"`
|
||||
);
|
||||
}
|
||||
help.push(
|
||||
'- Consult the installation documentation: https://sharp.pixelplumbing.com/install'
|
||||
);
|
||||
console.error(help.join('\n'));
|
||||
process.exit(1);
|
||||
// Check loaded
|
||||
if (process.platform === 'win32') {
|
||||
const loadedModule = Object.keys(require.cache).find((i) => /[\\/]build[\\/]Release[\\/]sharp(.*)\.node$/.test(i));
|
||||
if (loadedModule) {
|
||||
const [, loadedPackage] = loadedModule.match(/node_modules[\\/]([^\\/]+)[\\/]/);
|
||||
help.push(`- Ensure the version of sharp aligns with the ${loadedPackage} package: "npm ls sharp"`);
|
||||
}
|
||||
}
|
||||
throw new Error(help.join('\n'));
|
||||
}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const events = require('events');
|
||||
const detectLibc = require('detect-libc');
|
||||
|
||||
const is = require('./is');
|
||||
const platformAndArch = require('./platform')();
|
||||
const sharp = require('./sharp');
|
||||
|
||||
/**
|
||||
@@ -45,8 +48,23 @@ let versions = {
|
||||
vips: sharp.libvipsVersion()
|
||||
};
|
||||
try {
|
||||
versions = require(`../vendor/${versions.vips}/versions.json`);
|
||||
} catch (err) {}
|
||||
versions = require(`../vendor/${versions.vips}/${platformAndArch}/versions.json`);
|
||||
} catch (_err) { /* ignore */ }
|
||||
|
||||
/**
|
||||
* An Object containing the platform and architecture
|
||||
* of the current and installed vendored binaries.
|
||||
* @member
|
||||
* @example
|
||||
* console.log(sharp.vendor);
|
||||
*/
|
||||
const vendor = {
|
||||
current: platformAndArch,
|
||||
installed: []
|
||||
};
|
||||
try {
|
||||
vendor.installed = fs.readdirSync(path.join(__dirname, `../vendor/${versions.vips}`));
|
||||
} catch (_err) { /* ignore */ }
|
||||
|
||||
/**
|
||||
* Gets or, when options are provided, sets the limits of _libvips'_ operation cache.
|
||||
@@ -110,7 +128,7 @@ function concurrency (concurrency) {
|
||||
return sharp.concurrency(is.integer(concurrency) ? concurrency : null);
|
||||
}
|
||||
/* istanbul ignore next */
|
||||
if (detectLibc.family === detectLibc.GLIBC && !sharp._isUsingJemalloc()) {
|
||||
if (detectLibc.familySync() === detectLibc.GLIBC && !sharp._isUsingJemalloc()) {
|
||||
// Reduce default concurrency to 1 when using glibc memory allocator
|
||||
sharp.concurrency(1);
|
||||
}
|
||||
@@ -175,5 +193,6 @@ module.exports = function (Sharp) {
|
||||
Sharp.format = format;
|
||||
Sharp.interpolators = interpolators;
|
||||
Sharp.versions = versions;
|
||||
Sharp.vendor = vendor;
|
||||
Sharp.queue = queue;
|
||||
};
|
||||
|
||||
43
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "sharp",
|
||||
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP, AVIF and TIFF images",
|
||||
"version": "0.29.0",
|
||||
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP, GIF, AVIF and TIFF images",
|
||||
"version": "0.30.0",
|
||||
"author": "Lovell Fuller <npm@lovell.info>",
|
||||
"homepage": "https://github.com/lovell/sharp",
|
||||
"contributors": [
|
||||
@@ -78,7 +78,10 @@
|
||||
"Jacob Smith <jacob@frende.me>",
|
||||
"Michael Nutt <michael@nutt.im>",
|
||||
"Brad Parham <baparham@gmail.com>",
|
||||
"Taneli Vatanen <taneli.vatanen@gmail.com>"
|
||||
"Taneli Vatanen <taneli.vatanen@gmail.com>",
|
||||
"Joris Dugué <zaruike10@gmail.com>",
|
||||
"Chris Banks <christopher.bradley.banks@gmail.com>",
|
||||
"Ompal Singh <ompal.hitm09@gmail.com>"
|
||||
],
|
||||
"scripts": {
|
||||
"install": "(node install/libvips && node install/dll-copy && prebuild-install) || (node install/can-compile && node-gyp rebuild && node install/dll-copy)",
|
||||
@@ -112,6 +115,7 @@
|
||||
"tiff",
|
||||
"gif",
|
||||
"svg",
|
||||
"jp2",
|
||||
"dzi",
|
||||
"image",
|
||||
"resize",
|
||||
@@ -122,33 +126,46 @@
|
||||
"vips"
|
||||
],
|
||||
"dependencies": {
|
||||
"color": "^4.0.1",
|
||||
"detect-libc": "^1.0.3",
|
||||
"node-addon-api": "^4.0.0",
|
||||
"prebuild-install": "^6.1.4",
|
||||
"color": "^4.2.0",
|
||||
"detect-libc": "^2.0.0",
|
||||
"node-addon-api": "^4.3.0",
|
||||
"prebuild-install": "^7.0.1",
|
||||
"semver": "^7.3.5",
|
||||
"simple-get": "^3.1.0",
|
||||
"simple-get": "^4.0.1",
|
||||
"tar-fs": "^2.1.1",
|
||||
"tunnel-agent": "^0.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"async": "^3.2.1",
|
||||
"async": "^3.2.3",
|
||||
"cc": "^3.0.1",
|
||||
"decompress-zip": "^0.3.3",
|
||||
"documentation": "^13.2.5",
|
||||
"exif-reader": "^1.0.3",
|
||||
"icc": "^2.0.0",
|
||||
"license-checker": "^25.0.1",
|
||||
"mocha": "^9.0.3",
|
||||
"mock-fs": "^5.0.0",
|
||||
"mocha": "^9.2.0",
|
||||
"mock-fs": "^5.1.2",
|
||||
"nyc": "^15.1.0",
|
||||
"prebuild": "^10.0.1",
|
||||
"prebuild": "^11.0.2",
|
||||
"rimraf": "^3.0.2",
|
||||
"semistandard": "^16.0.1"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"config": {
|
||||
"libvips": "8.11.3",
|
||||
"libvips": "8.12.2",
|
||||
"integrity": {
|
||||
"darwin-arm64v8": "sha512-p46s/bbJAjkOXzPISZt9HUpG9GWjwQkYnLLRLKzsBJHLtB3X6C6Y/zXI5Hd0DOojcFkks9a0kTN+uDQ/XJY19g==",
|
||||
"darwin-x64": "sha512-6vOHVZnvXwe6EXRsy29jdkUzBE6ElNpXUwd+m8vV7qy32AnXu7B9YemHsZ44vWviIwPZeXF6Nhd9EFLM0wWohw==",
|
||||
"linux-arm64v8": "sha512-XwZdS63yhqLtbFtx/0eoLF/Agf5qtTrI11FMnMRpuBJWd4jHB60RAH+uzYUgoChCmKIS+AeXYMLm4d8Ns2QX8w==",
|
||||
"linux-armv6": "sha512-Rh0Q0kqwPG2MjXWOkPCuPEyiUKFgKJYWLgS835D4MrXgdKr8Tft/eVrc2iGIxt2re30VpDiZ1h0Rby1aCZt2zw==",
|
||||
"linux-armv7": "sha512-heTS/MsmRvu4JljINxP+vDiS9ZZfuGhr3IStb5F7Gc0/QLRhllYAg4rcO8L1eTK9sIIzG5ARvI19+YUZe7WbzA==",
|
||||
"linux-x64": "sha512-SSWAwBFi0hx8V/h/v82tTFGKWTFv9FiCK3Timz5OExuI+sX1Ngrd0PVQaWXOThGNdel/fcD3Bz9YjSt4feBR1g==",
|
||||
"linuxmusl-arm64v8": "sha512-Rhks+5C7p7aO6AucLT1uvzo8ohlqcqCUPgZmN+LZjsPWob/Iix3MfiDYtv/+gTvdeEfXxbCU6/YuPBwHQ7/crA==",
|
||||
"linuxmusl-x64": "sha512-IOyjSQqpWVntqOUpCHVWuQwACwmmjdi15H8Pc+Ma1JkhPogTfVsFQWyL7DuOTD3Yr23EuYGzovUX00duOtfy/g==",
|
||||
"win32-arm64v8": "sha512-A+Qe8Ipewtvw9ldvF6nWed2J8kphzrUE04nFeKCtNx6pfGQ/MAlCKMjt/U8VgUKNjB01zJDUW9XE0+FhGZ/UpQ==",
|
||||
"win32-ia32": "sha512-cMrAvwFdDeAVnLJt0IPMPRKaIFhyXYGTprsM0DND9VUHE8F7dJMR44tS5YkXsGh1QNDtjKT6YuxAVUglmiXtpA==",
|
||||
"win32-x64": "sha512-vLFIfw6aW2zABa8jpgzWDhljnE6glktrddErVyazAIoHl6BFFe/Da+LK1DbXvIYHz7fyOoKhSfCJHCiJG1Vg6w=="
|
||||
},
|
||||
"runtime": "napi",
|
||||
"target": 5
|
||||
},
|
||||
|
||||
187
src/common.cc
@@ -132,6 +132,8 @@ namespace sharp {
|
||||
descriptor->limitInputPixels = AttrAsUint32(input, "limitInputPixels");
|
||||
// Allow switch from random to sequential access
|
||||
descriptor->access = AttrAsBool(input, "sequentialRead") ? VIPS_ACCESS_SEQUENTIAL : VIPS_ACCESS_RANDOM;
|
||||
// Remove safety features and allow unlimited SVG/PNG input
|
||||
descriptor->unlimited = AttrAsBool(input, "unlimited");
|
||||
return descriptor;
|
||||
}
|
||||
|
||||
@@ -157,6 +159,10 @@ namespace sharp {
|
||||
bool IsGif(std::string const &str) {
|
||||
return EndsWith(str, ".gif") || EndsWith(str, ".GIF");
|
||||
}
|
||||
bool IsJp2(std::string const &str) {
|
||||
return EndsWith(str, ".jp2") || EndsWith(str, ".jpx") || EndsWith(str, ".j2k") || EndsWith(str, ".j2c")
|
||||
|| EndsWith(str, ".JP2") || EndsWith(str, ".JPX") || EndsWith(str, ".J2K") || EndsWith(str, ".J2C");
|
||||
}
|
||||
bool IsTiff(std::string const &str) {
|
||||
return EndsWith(str, ".tif") || EndsWith(str, ".tiff") || EndsWith(str, ".TIF") || EndsWith(str, ".TIFF");
|
||||
}
|
||||
@@ -190,6 +196,7 @@ namespace sharp {
|
||||
case ImageType::WEBP: id = "webp"; break;
|
||||
case ImageType::TIFF: id = "tiff"; break;
|
||||
case ImageType::GIF: id = "gif"; break;
|
||||
case ImageType::JP2: id = "jp2"; break;
|
||||
case ImageType::SVG: id = "svg"; break;
|
||||
case ImageType::HEIF: id = "heif"; break;
|
||||
case ImageType::PDF: id = "pdf"; break;
|
||||
@@ -226,6 +233,8 @@ namespace sharp {
|
||||
{ "VipsForeignLoadGifBuffer", ImageType::GIF },
|
||||
{ "VipsForeignLoadNsgifFile", ImageType::GIF },
|
||||
{ "VipsForeignLoadNsgifBuffer", ImageType::GIF },
|
||||
{ "VipsForeignLoadJp2kBuffer", ImageType::JP2 },
|
||||
{ "VipsForeignLoadJp2kFile", ImageType::JP2 },
|
||||
{ "VipsForeignLoadSvgFile", ImageType::SVG },
|
||||
{ "VipsForeignLoadSvgBuffer", ImageType::SVG },
|
||||
{ "VipsForeignLoadHeifFile", ImageType::HEIF },
|
||||
@@ -234,6 +243,8 @@ namespace sharp {
|
||||
{ "VipsForeignLoadPdfBuffer", ImageType::PDF },
|
||||
{ "VipsForeignLoadMagickFile", ImageType::MAGICK },
|
||||
{ "VipsForeignLoadMagickBuffer", ImageType::MAGICK },
|
||||
{ "VipsForeignLoadMagick7File", ImageType::MAGICK },
|
||||
{ "VipsForeignLoadMagick7Buffer", ImageType::MAGICK },
|
||||
{ "VipsForeignLoadOpenslide", ImageType::OPENSLIDE },
|
||||
{ "VipsForeignLoadPpmFile", ImageType::PPM },
|
||||
{ "VipsForeignLoadFits", ImageType::FITS },
|
||||
@@ -285,6 +296,7 @@ namespace sharp {
|
||||
imageType == ImageType::WEBP ||
|
||||
imageType == ImageType::MAGICK ||
|
||||
imageType == ImageType::GIF ||
|
||||
imageType == ImageType::JP2 ||
|
||||
imageType == ImageType::TIFF ||
|
||||
imageType == ImageType::HEIF ||
|
||||
imageType == ImageType::PDF;
|
||||
@@ -318,7 +330,7 @@ namespace sharp {
|
||||
vips::VOption *option = VImage::option()
|
||||
->set("access", descriptor->access)
|
||||
->set("fail", descriptor->failOnError);
|
||||
if (imageType == ImageType::SVG) {
|
||||
if (descriptor->unlimited && (imageType == ImageType::SVG || imageType == ImageType::PNG)) {
|
||||
option->set("unlimited", TRUE);
|
||||
}
|
||||
if (imageType == ImageType::SVG || imageType == ImageType::PDF) {
|
||||
@@ -393,7 +405,7 @@ namespace sharp {
|
||||
vips::VOption *option = VImage::option()
|
||||
->set("access", descriptor->access)
|
||||
->set("fail", descriptor->failOnError);
|
||||
if (imageType == ImageType::SVG) {
|
||||
if (descriptor->unlimited && (imageType == ImageType::SVG || imageType == ImageType::PNG)) {
|
||||
option->set("unlimited", TRUE);
|
||||
}
|
||||
if (imageType == ImageType::SVG || imageType == ImageType::PDF) {
|
||||
@@ -478,36 +490,41 @@ namespace sharp {
|
||||
|
||||
/*
|
||||
Set animation properties if necessary.
|
||||
Non-provided properties will be loaded from image.
|
||||
*/
|
||||
VImage SetAnimationProperties(VImage image, int pageHeight, std::vector<int> delay, int loop) {
|
||||
bool hasDelay = delay.size() != 1 || delay.front() != -1;
|
||||
VImage SetAnimationProperties(VImage image, int nPages, int pageHeight, std::vector<int> delay, int loop) {
|
||||
bool hasDelay = !delay.empty();
|
||||
|
||||
if (pageHeight == 0 && image.get_typeof(VIPS_META_PAGE_HEIGHT) == G_TYPE_INT) {
|
||||
pageHeight = image.get_int(VIPS_META_PAGE_HEIGHT);
|
||||
// Avoid a copy if none of the animation properties are needed.
|
||||
if (nPages == 1 && !hasDelay && loop == -1) return image;
|
||||
|
||||
if (delay.size() == 1) {
|
||||
// We have just one delay, repeat that value for all frames.
|
||||
delay.insert(delay.end(), nPages - 1, delay[0]);
|
||||
}
|
||||
|
||||
if (!hasDelay && image.get_typeof("delay") == VIPS_TYPE_ARRAY_INT) {
|
||||
delay = image.get_array_int("delay");
|
||||
hasDelay = true;
|
||||
}
|
||||
|
||||
if (loop == -1 && image.get_typeof("loop") == G_TYPE_INT) {
|
||||
loop = image.get_int("loop");
|
||||
}
|
||||
|
||||
if (pageHeight == 0) return image;
|
||||
|
||||
// It is necessary to create the copy as otherwise, pageHeight will be ignored!
|
||||
// Attaching metadata, need to copy the image.
|
||||
VImage copy = image.copy();
|
||||
|
||||
copy.set(VIPS_META_PAGE_HEIGHT, pageHeight);
|
||||
// Only set page-height if we have more than one page, or this could
|
||||
// accidentally turn into an animated image later.
|
||||
if (nPages > 1) copy.set(VIPS_META_PAGE_HEIGHT, pageHeight);
|
||||
if (hasDelay) copy.set("delay", delay);
|
||||
if (loop != -1) copy.set("loop", loop);
|
||||
|
||||
return copy;
|
||||
}
|
||||
|
||||
/*
|
||||
Remove animation properties from image.
|
||||
*/
|
||||
VImage RemoveAnimationProperties(VImage image) {
|
||||
VImage copy = image.copy();
|
||||
copy.remove(VIPS_META_PAGE_HEIGHT);
|
||||
copy.remove("delay");
|
||||
copy.remove("loop");
|
||||
return copy;
|
||||
}
|
||||
|
||||
/*
|
||||
Does this image have a non-default density?
|
||||
*/
|
||||
@@ -533,6 +550,14 @@ namespace sharp {
|
||||
return copy;
|
||||
}
|
||||
|
||||
/*
|
||||
Multi-page images can have a page height. Fetch it, and sanity check it.
|
||||
If page-height is not set, it defaults to the image height
|
||||
*/
|
||||
int GetPageHeight(VImage image) {
|
||||
return vips_image_get_page_height(image.get_image());
|
||||
}
|
||||
|
||||
/*
|
||||
Check the proposed format supports the current dimensions.
|
||||
*/
|
||||
@@ -589,6 +614,33 @@ namespace sharp {
|
||||
return warning;
|
||||
}
|
||||
|
||||
/*
|
||||
Attach an event listener for progress updates, used to detect timeout
|
||||
*/
|
||||
void SetTimeout(VImage image, int const seconds) {
|
||||
if (seconds > 0) {
|
||||
VipsImage *im = image.get_image();
|
||||
if (im->progress_signal == NULL) {
|
||||
int *timeout = VIPS_NEW(im, int);
|
||||
*timeout = seconds;
|
||||
g_signal_connect(im, "eval", G_CALLBACK(VipsProgressCallBack), timeout);
|
||||
vips_image_set_progress(im, TRUE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Event listener for progress updates, used to detect timeout
|
||||
*/
|
||||
void VipsProgressCallBack(VipsImage *im, VipsProgress *progress, int *timeout) {
|
||||
// printf("VipsProgressCallBack progress=%d run=%d timeout=%d\n", progress->percent, progress->run, *timeout);
|
||||
if (*timeout > 0 && progress->run >= *timeout) {
|
||||
vips_image_set_kill(im, TRUE);
|
||||
vips_error("timeout", "%d%% complete", progress->percent);
|
||||
*timeout = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate the (left, top) coordinates of the output image
|
||||
within the input image, applying the given gravity during an embed.
|
||||
@@ -757,23 +809,27 @@ namespace sharp {
|
||||
/*
|
||||
Convert RGBA value to another colourspace
|
||||
*/
|
||||
std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba, VipsInterpretation const interpretation) {
|
||||
std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba,
|
||||
VipsInterpretation const interpretation, bool premultiply) {
|
||||
int const bands = static_cast<int>(rgba.size());
|
||||
if (bands < 3 || interpretation == VIPS_INTERPRETATION_sRGB || interpretation == VIPS_INTERPRETATION_RGB) {
|
||||
if (bands < 3) {
|
||||
return rgba;
|
||||
} else {
|
||||
VImage pixel = VImage::new_matrix(1, 1);
|
||||
pixel.set("bands", bands);
|
||||
pixel = pixel.new_from_image(rgba);
|
||||
pixel = pixel.colourspace(interpretation, VImage::option()->set("source_space", VIPS_INTERPRETATION_sRGB));
|
||||
return pixel(0, 0);
|
||||
}
|
||||
VImage pixel = VImage::new_matrix(1, 1);
|
||||
pixel.set("bands", bands);
|
||||
pixel = pixel
|
||||
.new_from_image(rgba)
|
||||
.colourspace(interpretation, VImage::option()->set("source_space", VIPS_INTERPRETATION_sRGB));
|
||||
if (premultiply) {
|
||||
pixel = pixel.premultiply();
|
||||
}
|
||||
return pixel(0, 0);
|
||||
}
|
||||
|
||||
/*
|
||||
Apply the alpha channel to a given colour
|
||||
*/
|
||||
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour) {
|
||||
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour, bool premultiply) {
|
||||
// Scale up 8-bit values to match 16-bit input image
|
||||
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
|
||||
// Create alphaColour colour
|
||||
@@ -797,7 +853,7 @@ namespace sharp {
|
||||
alphaColour.push_back(colour[3] * multiplier);
|
||||
}
|
||||
// Ensure alphaColour colour uses correct colourspace
|
||||
alphaColour = sharp::GetRgbaAsColourspace(alphaColour, image.interpretation());
|
||||
alphaColour = sharp::GetRgbaAsColourspace(alphaColour, image.interpretation(), premultiply);
|
||||
// Add non-transparent alpha channel, if required
|
||||
if (colour[3] < 255.0 && !HasAlpha(image)) {
|
||||
image = image.bandjoin(
|
||||
@@ -827,4 +883,75 @@ namespace sharp {
|
||||
}
|
||||
return image;
|
||||
}
|
||||
|
||||
std::pair<double, double> ResolveShrink(int width, int height, int targetWidth, int targetHeight,
|
||||
Canvas canvas, bool swap, bool withoutEnlargement) {
|
||||
if (swap) {
|
||||
// Swap input width and height when requested.
|
||||
std::swap(width, height);
|
||||
}
|
||||
|
||||
double hshrink = 1.0;
|
||||
double vshrink = 1.0;
|
||||
|
||||
if (targetWidth > 0 && targetHeight > 0) {
|
||||
// Fixed width and height
|
||||
hshrink = static_cast<double>(width) / targetWidth;
|
||||
vshrink = static_cast<double>(height) / targetHeight;
|
||||
|
||||
switch (canvas) {
|
||||
case Canvas::CROP:
|
||||
case Canvas::MIN:
|
||||
if (hshrink < vshrink) {
|
||||
vshrink = hshrink;
|
||||
} else {
|
||||
hshrink = vshrink;
|
||||
}
|
||||
break;
|
||||
case Canvas::EMBED:
|
||||
case Canvas::MAX:
|
||||
if (hshrink > vshrink) {
|
||||
vshrink = hshrink;
|
||||
} else {
|
||||
hshrink = vshrink;
|
||||
}
|
||||
break;
|
||||
case Canvas::IGNORE_ASPECT:
|
||||
if (swap) {
|
||||
std::swap(hshrink, vshrink);
|
||||
}
|
||||
break;
|
||||
}
|
||||
} else if (targetWidth > 0) {
|
||||
// Fixed width
|
||||
hshrink = static_cast<double>(width) / targetWidth;
|
||||
|
||||
if (canvas != Canvas::IGNORE_ASPECT) {
|
||||
// Auto height
|
||||
vshrink = hshrink;
|
||||
}
|
||||
} else if (targetHeight > 0) {
|
||||
// Fixed height
|
||||
vshrink = static_cast<double>(height) / targetHeight;
|
||||
|
||||
if (canvas != Canvas::IGNORE_ASPECT) {
|
||||
// Auto width
|
||||
hshrink = vshrink;
|
||||
}
|
||||
}
|
||||
|
||||
// We should not enlarge (oversample) the output image,
|
||||
// if withoutEnlargement is specified.
|
||||
if (withoutEnlargement) {
|
||||
hshrink = std::max(1.0, hshrink);
|
||||
vshrink = std::max(1.0, vshrink);
|
||||
}
|
||||
|
||||
// We don't want to shrink so much that we send an axis to 0
|
||||
hshrink = std::min(hshrink, static_cast<double>(width));
|
||||
vshrink = std::min(vshrink, static_cast<double>(height));
|
||||
|
||||
return std::make_pair(hshrink, vshrink);
|
||||
}
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
56
src/common.h
@@ -25,9 +25,9 @@
|
||||
// Verify platform and compiler compatibility
|
||||
|
||||
#if (VIPS_MAJOR_VERSION < 8) || \
|
||||
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 11) || \
|
||||
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION == 11 && VIPS_MICRO_VERSION < 3)
|
||||
#error "libvips version 8.11.3+ is required - please see https://sharp.pixelplumbing.com/install"
|
||||
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 12) || \
|
||||
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION == 12 && VIPS_MICRO_VERSION < 2)
|
||||
#error "libvips version 8.12.2+ is required - please see https://sharp.pixelplumbing.com/install"
|
||||
#endif
|
||||
|
||||
#if ((!defined(__clang__)) && defined(__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 6)))
|
||||
@@ -50,6 +50,7 @@ namespace sharp {
|
||||
char *buffer;
|
||||
bool failOnError;
|
||||
int limitInputPixels;
|
||||
bool unlimited;
|
||||
VipsAccess access;
|
||||
size_t bufferLength;
|
||||
bool isBuffer;
|
||||
@@ -75,6 +76,7 @@ namespace sharp {
|
||||
buffer(nullptr),
|
||||
failOnError(TRUE),
|
||||
limitInputPixels(0x3FFF * 0x3FFF),
|
||||
unlimited(FALSE),
|
||||
access(VIPS_ACCESS_RANDOM),
|
||||
bufferLength(0),
|
||||
isBuffer(FALSE),
|
||||
@@ -116,6 +118,7 @@ namespace sharp {
|
||||
JPEG,
|
||||
PNG,
|
||||
WEBP,
|
||||
JP2,
|
||||
TIFF,
|
||||
GIF,
|
||||
SVG,
|
||||
@@ -132,6 +135,14 @@ namespace sharp {
|
||||
MISSING
|
||||
};
|
||||
|
||||
enum class Canvas {
|
||||
CROP,
|
||||
EMBED,
|
||||
MAX,
|
||||
MIN,
|
||||
IGNORE_ASPECT
|
||||
};
|
||||
|
||||
// How many tasks are in the queue?
|
||||
extern volatile int counterQueue;
|
||||
|
||||
@@ -142,6 +153,7 @@ namespace sharp {
|
||||
bool IsJpeg(std::string const &str);
|
||||
bool IsPng(std::string const &str);
|
||||
bool IsWebp(std::string const &str);
|
||||
bool IsJp2(std::string const &str);
|
||||
bool IsGif(std::string const &str);
|
||||
bool IsTiff(std::string const &str);
|
||||
bool IsHeic(std::string const &str);
|
||||
@@ -204,9 +216,13 @@ namespace sharp {
|
||||
|
||||
/*
|
||||
Set animation properties if necessary.
|
||||
Non-provided properties will be loaded from image.
|
||||
*/
|
||||
VImage SetAnimationProperties(VImage image, int pageHeight, std::vector<int> delay, int loop);
|
||||
VImage SetAnimationProperties(VImage image, int nPages, int pageHeight, std::vector<int> delay, int loop);
|
||||
|
||||
/*
|
||||
Remove animation properties from image.
|
||||
*/
|
||||
VImage RemoveAnimationProperties(VImage image);
|
||||
|
||||
/*
|
||||
Does this image have a non-default density?
|
||||
@@ -223,6 +239,12 @@ namespace sharp {
|
||||
*/
|
||||
VImage SetDensity(VImage image, const double density);
|
||||
|
||||
/*
|
||||
Multi-page images can have a page height. Fetch it, and sanity check it.
|
||||
If page-height is not set, it defaults to the image height
|
||||
*/
|
||||
int GetPageHeight(VImage image);
|
||||
|
||||
/*
|
||||
Check the proposed format supports the current dimensions.
|
||||
*/
|
||||
@@ -243,6 +265,16 @@ namespace sharp {
|
||||
*/
|
||||
std::string VipsWarningPop();
|
||||
|
||||
/*
|
||||
Attach an event listener for progress updates, used to detect timeout
|
||||
*/
|
||||
void SetTimeout(VImage image, int const timeoutSeconds);
|
||||
|
||||
/*
|
||||
Event listener for progress updates, used to detect timeout
|
||||
*/
|
||||
void VipsProgressCallBack(VipsImage *image, VipsProgress *progress, int *timeoutSeconds);
|
||||
|
||||
/*
|
||||
Calculate the (left, top) coordinates of the output image
|
||||
within the input image, applying the given gravity during an embed.
|
||||
@@ -288,12 +320,13 @@ namespace sharp {
|
||||
/*
|
||||
Convert RGBA value to another colourspace
|
||||
*/
|
||||
std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba, VipsInterpretation const interpretation);
|
||||
std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba,
|
||||
VipsInterpretation const interpretation, bool premultiply);
|
||||
|
||||
/*
|
||||
Apply the alpha channel to a given colour
|
||||
*/
|
||||
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour);
|
||||
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour, bool premultiply);
|
||||
|
||||
/*
|
||||
Removes alpha channel, if any.
|
||||
@@ -305,6 +338,15 @@ namespace sharp {
|
||||
*/
|
||||
VImage EnsureAlpha(VImage image, double const value);
|
||||
|
||||
/*
|
||||
Calculate the shrink factor, taking into account auto-rotate, the canvas
|
||||
mode, and so on. The hshrink/vshrink are the amount to shrink the input
|
||||
image axes by in order for the output axes (ie. after rotation) to match
|
||||
the required thumbnail width/height and canvas mode.
|
||||
*/
|
||||
std::pair<double, double> ResolveShrink(int width, int height, int targetWidth, int targetHeight,
|
||||
Canvas canvas, bool swap, bool withoutEnlargement);
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
#endif // SRC_COMMON_H_
|
||||
|
||||
@@ -93,7 +93,7 @@ negate( std::vector<double> vector )
|
||||
{
|
||||
std::vector<double> new_vector( vector.size() );
|
||||
|
||||
for( unsigned int i = 0; i < vector.size(); i++ )
|
||||
for( std::vector<double>::size_type i = 0; i < vector.size(); i++ )
|
||||
new_vector[i] = vector[i] * -1;
|
||||
|
||||
return( new_vector );
|
||||
@@ -104,7 +104,7 @@ invert( std::vector<double> vector )
|
||||
{
|
||||
std::vector<double> new_vector( vector.size() );
|
||||
|
||||
for( unsigned int i = 0; i < vector.size(); i++ )
|
||||
for( std::vector<double>::size_type i = 0; i < vector.size(); i++ )
|
||||
new_vector[i] = 1.0 / vector[i];
|
||||
|
||||
return( new_vector );
|
||||
@@ -210,7 +210,6 @@ VOption::set( const char *name, std::vector<int> value )
|
||||
Pair *pair = new Pair( name );
|
||||
|
||||
int *array;
|
||||
unsigned int i;
|
||||
|
||||
pair->input = true;
|
||||
|
||||
@@ -219,7 +218,7 @@ VOption::set( const char *name, std::vector<int> value )
|
||||
static_cast< int >( value.size() ) );
|
||||
array = vips_value_get_array_int( &pair->value, NULL );
|
||||
|
||||
for( i = 0; i < value.size(); i++ )
|
||||
for( std::vector<double>::size_type i = 0; i < value.size(); i++ )
|
||||
array[i] = value[i];
|
||||
|
||||
options.push_back( pair );
|
||||
@@ -234,7 +233,6 @@ VOption::set( const char *name, std::vector<double> value )
|
||||
Pair *pair = new Pair( name );
|
||||
|
||||
double *array;
|
||||
unsigned int i;
|
||||
|
||||
pair->input = true;
|
||||
|
||||
@@ -243,7 +241,7 @@ VOption::set( const char *name, std::vector<double> value )
|
||||
static_cast< int >( value.size() ) );
|
||||
array = vips_value_get_array_double( &pair->value, NULL );
|
||||
|
||||
for( i = 0; i < value.size(); i++ )
|
||||
for( std::vector<double>::size_type i = 0; i < value.size(); i++ )
|
||||
array[i] = value[i];
|
||||
|
||||
options.push_back( pair );
|
||||
@@ -258,7 +256,6 @@ VOption::set( const char *name, std::vector<VImage> value )
|
||||
Pair *pair = new Pair( name );
|
||||
|
||||
VipsImage **array;
|
||||
unsigned int i;
|
||||
|
||||
pair->input = true;
|
||||
|
||||
@@ -267,7 +264,7 @@ VOption::set( const char *name, std::vector<VImage> value )
|
||||
static_cast< int >( value.size() ) );
|
||||
array = vips_value_get_array_image( &pair->value, NULL );
|
||||
|
||||
for( i = 0; i < value.size(); i++ ) {
|
||||
for( std::vector<double>::size_type i = 0; i < value.size(); i++ ) {
|
||||
VipsImage *vips_image = value[i].get_image();
|
||||
|
||||
array[i] = vips_image;
|
||||
@@ -488,10 +485,9 @@ VOption::get_operation( VipsOperation *operation )
|
||||
double *array =
|
||||
vips_value_get_array_double( value,
|
||||
&length );
|
||||
int j;
|
||||
|
||||
((*i)->vvector)->resize( length );
|
||||
for( j = 0; j < length; j++ )
|
||||
for( int j = 0; j < length; j++ )
|
||||
(*((*i)->vvector))[j] = array[j];
|
||||
}
|
||||
else if( type == VIPS_TYPE_BLOB ) {
|
||||
@@ -718,17 +714,38 @@ VImage::write_to_buffer( const char *suffix, void **buf, size_t *size,
|
||||
const char *operation_name;
|
||||
VipsBlob *blob;
|
||||
|
||||
/* Save with the new target API if we can. Fall back to the older
|
||||
* mechanism in case the saver we need has not been converted yet.
|
||||
*
|
||||
* We need to hide any errors from this first phase.
|
||||
*/
|
||||
vips__filename_split8( suffix, filename, option_string );
|
||||
if( !(operation_name = vips_foreign_find_save_buffer( filename )) ) {
|
||||
|
||||
vips_error_freeze();
|
||||
operation_name = vips_foreign_find_save_target( filename );
|
||||
vips_error_thaw();
|
||||
|
||||
if( operation_name ) {
|
||||
VTarget target = VTarget::new_to_memory();
|
||||
|
||||
call_option_string( operation_name, option_string,
|
||||
(options ? options : VImage::option())->
|
||||
set( "in", *this )->
|
||||
set( "target", target ) );
|
||||
|
||||
g_object_get( target.get_target(), "blob", &blob, NULL );
|
||||
}
|
||||
else if( (operation_name = vips_foreign_find_save_buffer( filename )) ) {
|
||||
call_option_string( operation_name, option_string,
|
||||
(options ? options : VImage::option())->
|
||||
set( "in", *this )->
|
||||
set( "buffer", &blob ) );
|
||||
}
|
||||
else {
|
||||
delete options;
|
||||
throw VError();
|
||||
}
|
||||
|
||||
call_option_string( operation_name, option_string,
|
||||
(options ? options : VImage::option())->
|
||||
set( "in", *this )->
|
||||
set( "buffer", &blob ) );
|
||||
|
||||
if( blob ) {
|
||||
if( buf ) {
|
||||
*buf = VIPS_AREA( blob )->data;
|
||||
@@ -767,6 +784,7 @@ std::vector<VImage>
|
||||
VImage::bandsplit( VOption *options ) const
|
||||
{
|
||||
std::vector<VImage> b;
|
||||
b.reserve(bands());
|
||||
|
||||
for( int i = 0; i < bands(); i++ )
|
||||
b.push_back( extract_band( i ) );
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// bodies for vips operations
|
||||
// Wed May 12 11:30:00 AM CEST 2021
|
||||
// Mon Nov 1 03:31:09 PM CET 2021
|
||||
// this file is generated automatically, do not edit!
|
||||
|
||||
VImage VImage::CMC2LCh( VOption *options ) const
|
||||
@@ -1262,6 +1262,34 @@ VImage VImage::gifload_source( VSource source, VOption *options )
|
||||
return( out );
|
||||
}
|
||||
|
||||
void VImage::gifsave( const char *filename, VOption *options ) const
|
||||
{
|
||||
call( "gifsave",
|
||||
(options ? options : VImage::option())->
|
||||
set( "in", *this )->
|
||||
set( "filename", filename ) );
|
||||
}
|
||||
|
||||
VipsBlob *VImage::gifsave_buffer( VOption *options ) const
|
||||
{
|
||||
VipsBlob *buffer;
|
||||
|
||||
call( "gifsave_buffer",
|
||||
(options ? options : VImage::option())->
|
||||
set( "in", *this )->
|
||||
set( "buffer", &buffer ) );
|
||||
|
||||
return( buffer );
|
||||
}
|
||||
|
||||
void VImage::gifsave_target( VTarget target, VOption *options ) const
|
||||
{
|
||||
call( "gifsave_target",
|
||||
(options ? options : VImage::option())->
|
||||
set( "in", *this )->
|
||||
set( "target", target ) );
|
||||
}
|
||||
|
||||
VImage VImage::globalbalance( VOption *options ) const
|
||||
{
|
||||
VImage out;
|
||||
|
||||
@@ -77,6 +77,9 @@ class MetadataWorker : public Napi::AsyncWorker {
|
||||
if (image.get_typeof("heif-compression") == VIPS_TYPE_REF_STRING) {
|
||||
baton->compression = image.get_string("heif-compression");
|
||||
}
|
||||
if (image.get_typeof(VIPS_META_RESOLUTION_UNIT) == VIPS_TYPE_REF_STRING) {
|
||||
baton->resolutionUnit = image.get_string(VIPS_META_RESOLUTION_UNIT);
|
||||
}
|
||||
if (image.get_typeof("openslide.level-count") == VIPS_TYPE_REF_STRING) {
|
||||
int const levels = std::stoi(image.get_string("openslide.level-count"));
|
||||
for (int l = 0; l < levels; l++) {
|
||||
@@ -198,6 +201,9 @@ class MetadataWorker : public Napi::AsyncWorker {
|
||||
if (!baton->compression.empty()) {
|
||||
info.Set("compression", baton->compression);
|
||||
}
|
||||
if (!baton->resolutionUnit.empty()) {
|
||||
info.Set("resolutionUnit", baton->resolutionUnit == "in" ? "inch" : baton->resolutionUnit);
|
||||
}
|
||||
if (!baton->levels.empty()) {
|
||||
int i = 0;
|
||||
Napi::Array levels = Napi::Array::New(env, static_cast<size_t>(baton->levels.size()));
|
||||
|
||||
@@ -40,6 +40,7 @@ struct MetadataBaton {
|
||||
std::vector<int> delay;
|
||||
int pagePrimary;
|
||||
std::string compression;
|
||||
std::string resolutionUnit;
|
||||
std::vector<std::pair<int, int>> levels;
|
||||
int subifds;
|
||||
std::vector<double> background;
|
||||
|
||||
@@ -182,7 +182,8 @@ namespace sharp {
|
||||
0.0, 0.0, 0.0, 1.0));
|
||||
}
|
||||
|
||||
VImage Modulate(VImage image, double const brightness, double const saturation, int const hue) {
|
||||
VImage Modulate(VImage image, double const brightness, double const saturation,
|
||||
int const hue, double const lightness) {
|
||||
if (HasAlpha(image)) {
|
||||
// Separate alpha channel
|
||||
VImage alpha = image[image.bands() - 1];
|
||||
@@ -190,7 +191,7 @@ namespace sharp {
|
||||
.colourspace(VIPS_INTERPRETATION_LCH)
|
||||
.linear(
|
||||
{ brightness, saturation, 1},
|
||||
{ 0.0, 0.0, static_cast<double>(hue) }
|
||||
{ lightness, 0.0, static_cast<double>(hue) }
|
||||
)
|
||||
.colourspace(VIPS_INTERPRETATION_sRGB)
|
||||
.bandjoin(alpha);
|
||||
@@ -199,7 +200,7 @@ namespace sharp {
|
||||
.colourspace(VIPS_INTERPRETATION_LCH)
|
||||
.linear(
|
||||
{ brightness, saturation, 1 },
|
||||
{ 0.0, 0.0, static_cast<double>(hue) }
|
||||
{ lightness, 0.0, static_cast<double>(hue) }
|
||||
)
|
||||
.colourspace(VIPS_INTERPRETATION_sRGB);
|
||||
}
|
||||
@@ -307,4 +308,98 @@ namespace sharp {
|
||||
return image;
|
||||
}
|
||||
|
||||
/*
|
||||
* Split and crop each frame, reassemble, and update pageHeight.
|
||||
*/
|
||||
VImage CropMultiPage(VImage image, int left, int top, int width, int height,
|
||||
int nPages, int *pageHeight) {
|
||||
if (top == 0 && height == *pageHeight) {
|
||||
// Fast path; no need to adjust the height of the multi-page image
|
||||
return image.extract_area(left, 0, width, image.height());
|
||||
} else {
|
||||
std::vector<VImage> pages;
|
||||
pages.reserve(nPages);
|
||||
|
||||
// Split the image into cropped frames
|
||||
for (int i = 0; i < nPages; i++) {
|
||||
pages.push_back(
|
||||
image.extract_area(left, *pageHeight * i + top, width, height));
|
||||
}
|
||||
|
||||
// Reassemble the frames into a tall, thin image
|
||||
VImage assembled = VImage::arrayjoin(pages,
|
||||
VImage::option()->set("across", 1));
|
||||
|
||||
// Update the page height
|
||||
*pageHeight = height;
|
||||
|
||||
return assembled;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Split into frames, embed each frame, reassemble, and update pageHeight.
|
||||
*/
|
||||
VImage EmbedMultiPage(VImage image, int left, int top, int width, int height,
|
||||
std::vector<double> background, int nPages, int *pageHeight) {
|
||||
if (top == 0 && height == *pageHeight) {
|
||||
// Fast path; no need to adjust the height of the multi-page image
|
||||
return image.embed(left, 0, width, image.height(), VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background));
|
||||
} else if (left == 0 && width == image.width()) {
|
||||
// Fast path; no need to adjust the width of the multi-page image
|
||||
std::vector<VImage> pages;
|
||||
pages.reserve(nPages);
|
||||
|
||||
// Rearrange the tall image into a vertical grid
|
||||
image = image.grid(*pageHeight, nPages, 1);
|
||||
|
||||
// Do the embed on the wide image
|
||||
image = image.embed(0, top, image.width(), height, VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background));
|
||||
|
||||
// Split the wide image into frames
|
||||
for (int i = 0; i < nPages; i++) {
|
||||
pages.push_back(
|
||||
image.extract_area(width * i, 0, width, height));
|
||||
}
|
||||
|
||||
// Reassemble the frames into a tall, thin image
|
||||
VImage assembled = VImage::arrayjoin(pages,
|
||||
VImage::option()->set("across", 1));
|
||||
|
||||
// Update the page height
|
||||
*pageHeight = height;
|
||||
|
||||
return assembled;
|
||||
} else {
|
||||
std::vector<VImage> pages;
|
||||
pages.reserve(nPages);
|
||||
|
||||
// Split the image into frames
|
||||
for (int i = 0; i < nPages; i++) {
|
||||
pages.push_back(
|
||||
image.extract_area(0, *pageHeight * i, image.width(), *pageHeight));
|
||||
}
|
||||
|
||||
// Embed each frame in the target size
|
||||
for (int i = 0; i < nPages; i++) {
|
||||
pages[i] = pages[i].embed(left, top, width, height, VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background));
|
||||
}
|
||||
|
||||
// Reassemble the frames into a tall, thin image
|
||||
VImage assembled = VImage::arrayjoin(pages,
|
||||
VImage::option()->set("across", 1));
|
||||
|
||||
// Update the page height
|
||||
*pageHeight = height;
|
||||
|
||||
return assembled;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
@@ -98,15 +98,28 @@ namespace sharp {
|
||||
VImage Recomb(VImage image, std::unique_ptr<double[]> const &matrix);
|
||||
|
||||
/*
|
||||
* Modulate brightness, saturation and hue
|
||||
* Modulate brightness, saturation, hue and lightness
|
||||
*/
|
||||
VImage Modulate(VImage image, double const brightness, double const saturation, int const hue);
|
||||
VImage Modulate(VImage image, double const brightness, double const saturation,
|
||||
int const hue, double const lightness);
|
||||
|
||||
/*
|
||||
* Ensure the image is in a given colourspace
|
||||
*/
|
||||
VImage EnsureColourspace(VImage image, VipsInterpretation colourspace);
|
||||
|
||||
/*
|
||||
* Split and crop each frame, reassemble, and update pageHeight.
|
||||
*/
|
||||
VImage CropMultiPage(VImage image, int left, int top, int width, int height,
|
||||
int nPages, int *pageHeight);
|
||||
|
||||
/*
|
||||
* Split into frames, embed each frame, reassemble, and update pageHeight.
|
||||
*/
|
||||
VImage EmbedMultiPage(VImage image, int left, int top, int width, int height,
|
||||
std::vector<double> background, int nPages, int *pageHeight);
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
#endif // SRC_OPERATIONS_H_
|
||||
|
||||
623
src/pipeline.cc
@@ -69,15 +69,24 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
std::tie(image, inputImageType) = sharp::OpenInput(baton->input);
|
||||
image = sharp::EnsureColourspace(image, baton->colourspaceInput);
|
||||
|
||||
int nPages = baton->input->pages;
|
||||
if (nPages == -1) {
|
||||
// Resolve the number of pages if we need to render until the end of the document
|
||||
nPages = image.get_typeof(VIPS_META_N_PAGES) != 0
|
||||
? image.get_int(VIPS_META_N_PAGES) - baton->input->page
|
||||
: 1;
|
||||
}
|
||||
|
||||
// Get pre-resize page height
|
||||
int pageHeight = sharp::GetPageHeight(image);
|
||||
|
||||
// Calculate angle of rotation
|
||||
VipsAngle rotation;
|
||||
bool flip = FALSE;
|
||||
bool flop = FALSE;
|
||||
if (baton->useExifOrientation) {
|
||||
// Rotate and flip image according to Exif orientation
|
||||
bool flip;
|
||||
bool flop;
|
||||
std::tie(rotation, flip, flop) = CalculateExifRotationAndFlip(sharp::ExifOrientation(image));
|
||||
baton->flip = baton->flip || flip;
|
||||
baton->flop = baton->flop || flop;
|
||||
} else {
|
||||
rotation = CalculateAngleRotation(baton->angle);
|
||||
}
|
||||
@@ -86,17 +95,27 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
if (baton->rotateBeforePreExtract) {
|
||||
if (rotation != VIPS_ANGLE_D0) {
|
||||
image = image.rot(rotation);
|
||||
if (flip) {
|
||||
image = image.flip(VIPS_DIRECTION_VERTICAL);
|
||||
flip = FALSE;
|
||||
}
|
||||
if (flop) {
|
||||
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
|
||||
flop = FALSE;
|
||||
}
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
}
|
||||
if (baton->rotationAngle != 0.0) {
|
||||
MultiPageUnsupported(nPages, "Rotate");
|
||||
std::vector<double> background;
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground);
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground, FALSE);
|
||||
image = image.rotate(baton->rotationAngle, VImage::option()->set("background", background));
|
||||
}
|
||||
}
|
||||
|
||||
// Trim
|
||||
if (baton->trimThreshold > 0.0) {
|
||||
MultiPageUnsupported(nPages, "Trim");
|
||||
image = sharp::Trim(image, baton->trimThreshold);
|
||||
baton->trimOffsetLeft = image.xoffset();
|
||||
baton->trimOffsetTop = image.yoffset();
|
||||
@@ -104,201 +123,194 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
|
||||
// Pre extraction
|
||||
if (baton->topOffsetPre != -1) {
|
||||
image = image.extract_area(baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre);
|
||||
image = nPages > 1
|
||||
? sharp::CropMultiPage(image,
|
||||
baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre, nPages, &pageHeight)
|
||||
: image.extract_area(baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre);
|
||||
}
|
||||
|
||||
// Get pre-resize image width and height
|
||||
int inputWidth = image.width();
|
||||
int inputHeight = image.height();
|
||||
if (!baton->rotateBeforePreExtract &&
|
||||
(rotation == VIPS_ANGLE_D90 || rotation == VIPS_ANGLE_D270)) {
|
||||
// Swap input output width and height when rotating by 90 or 270 degrees
|
||||
std::swap(inputWidth, inputHeight);
|
||||
|
||||
// Is there just one page? Shrink to inputHeight instead
|
||||
if (nPages == 1) {
|
||||
pageHeight = inputHeight;
|
||||
}
|
||||
|
||||
// If withoutEnlargement is specified,
|
||||
// Override target width and height if exceeds respective value from input file
|
||||
if (baton->withoutEnlargement) {
|
||||
if (baton->width > inputWidth) {
|
||||
baton->width = inputWidth;
|
||||
}
|
||||
if (baton->height > inputHeight) {
|
||||
baton->height = inputHeight;
|
||||
}
|
||||
// If withoutReduction is specified,
|
||||
// Override target width and height if less than respective value from input file
|
||||
if (baton->withoutReduction) {
|
||||
if (baton->width < inputWidth) {
|
||||
baton->width = inputWidth;
|
||||
}
|
||||
if (baton->height < inputHeight) {
|
||||
baton->height = inputHeight;
|
||||
}
|
||||
}
|
||||
|
||||
// Scaling calculations
|
||||
double xfactor = 1.0;
|
||||
double yfactor = 1.0;
|
||||
double hshrink;
|
||||
double vshrink;
|
||||
int targetResizeWidth = baton->width;
|
||||
int targetResizeHeight = baton->height;
|
||||
if (baton->width > 0 && baton->height > 0) {
|
||||
// Fixed width and height
|
||||
xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width);
|
||||
yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height);
|
||||
switch (baton->canvas) {
|
||||
case Canvas::CROP:
|
||||
if (xfactor < yfactor) {
|
||||
targetResizeHeight = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
yfactor = xfactor;
|
||||
} else {
|
||||
targetResizeWidth = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
xfactor = yfactor;
|
||||
}
|
||||
break;
|
||||
case Canvas::EMBED:
|
||||
if (xfactor > yfactor) {
|
||||
targetResizeHeight = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
yfactor = xfactor;
|
||||
} else {
|
||||
targetResizeWidth = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
xfactor = yfactor;
|
||||
}
|
||||
break;
|
||||
case Canvas::MAX:
|
||||
if (xfactor > yfactor) {
|
||||
targetResizeHeight = baton->height = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
yfactor = xfactor;
|
||||
} else {
|
||||
targetResizeWidth = baton->width = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
xfactor = yfactor;
|
||||
}
|
||||
break;
|
||||
case Canvas::MIN:
|
||||
if (xfactor < yfactor) {
|
||||
targetResizeHeight = baton->height = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
yfactor = xfactor;
|
||||
} else {
|
||||
targetResizeWidth = baton->width = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
xfactor = yfactor;
|
||||
}
|
||||
break;
|
||||
case Canvas::IGNORE_ASPECT:
|
||||
if (!baton->rotateBeforePreExtract &&
|
||||
(rotation == VIPS_ANGLE_D90 || rotation == VIPS_ANGLE_D270)) {
|
||||
std::swap(xfactor, yfactor);
|
||||
}
|
||||
break;
|
||||
}
|
||||
} else if (baton->width > 0) {
|
||||
// Fixed width
|
||||
xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width);
|
||||
if (baton->canvas == Canvas::IGNORE_ASPECT) {
|
||||
targetResizeHeight = baton->height = inputHeight;
|
||||
} else {
|
||||
// Auto height
|
||||
yfactor = xfactor;
|
||||
targetResizeHeight = baton->height = static_cast<int>(round(static_cast<double>(inputHeight) / yfactor));
|
||||
}
|
||||
} else if (baton->height > 0) {
|
||||
// Fixed height
|
||||
yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height);
|
||||
if (baton->canvas == Canvas::IGNORE_ASPECT) {
|
||||
targetResizeWidth = baton->width = inputWidth;
|
||||
} else {
|
||||
// Auto width
|
||||
xfactor = yfactor;
|
||||
targetResizeWidth = baton->width = static_cast<int>(round(static_cast<double>(inputWidth) / xfactor));
|
||||
}
|
||||
} else {
|
||||
// Identity transform
|
||||
baton->width = inputWidth;
|
||||
baton->height = inputHeight;
|
||||
}
|
||||
|
||||
// Calculate integral box shrink
|
||||
int xshrink = std::max(1, static_cast<int>(floor(xfactor)));
|
||||
int yshrink = std::max(1, static_cast<int>(floor(yfactor)));
|
||||
// Swap input output width and height when rotating by 90 or 270 degrees
|
||||
bool swap = !baton->rotateBeforePreExtract && (rotation == VIPS_ANGLE_D90 || rotation == VIPS_ANGLE_D270);
|
||||
|
||||
// Calculate residual float affine transformation
|
||||
double xresidual = static_cast<double>(xshrink) / xfactor;
|
||||
double yresidual = static_cast<double>(yshrink) / yfactor;
|
||||
// Shrink to pageHeight, so we work for multi-page images
|
||||
std::tie(hshrink, vshrink) = sharp::ResolveShrink(
|
||||
inputWidth, pageHeight, targetResizeWidth, targetResizeHeight,
|
||||
baton->canvas, swap, baton->withoutEnlargement);
|
||||
|
||||
// If integral x and y shrink are equal, try to use shrink-on-load for JPEG and WebP,
|
||||
// but not when applying gamma correction, pre-resize extract, trim or input colourspace
|
||||
int shrink_on_load = 1;
|
||||
// The jpeg preload shrink.
|
||||
int jpegShrinkOnLoad = 1;
|
||||
|
||||
int shrink_on_load_factor = 1;
|
||||
// Leave at least a factor of two for the final resize step, when fastShrinkOnLoad: false
|
||||
// for more consistent results and avoid occasional small image shifting
|
||||
if (!baton->fastShrinkOnLoad) {
|
||||
shrink_on_load_factor = 2;
|
||||
}
|
||||
if (
|
||||
xshrink == yshrink && xshrink >= 2 * shrink_on_load_factor &&
|
||||
(inputImageType == sharp::ImageType::JPEG || inputImageType == sharp::ImageType::WEBP) &&
|
||||
// WebP, PDF, SVG scale
|
||||
double scale = 1.0;
|
||||
|
||||
// Try to reload input using shrink-on-load for JPEG, WebP, SVG and PDF, when:
|
||||
// - the width or height parameters are specified;
|
||||
// - gamma correction doesn't need to be applied;
|
||||
// - trimming or pre-resize extract isn't required;
|
||||
// - input colourspace is not specified;
|
||||
bool const shouldPreShrink = (targetResizeWidth > 0 || targetResizeHeight > 0) &&
|
||||
baton->gamma == 0 && baton->topOffsetPre == -1 && baton->trimThreshold == 0.0 &&
|
||||
baton->colourspaceInput == VIPS_INTERPRETATION_LAST &&
|
||||
image.width() > 3 && image.height() > 3 && baton->input->pages == 1
|
||||
) {
|
||||
if (xshrink >= 8 * shrink_on_load_factor) {
|
||||
xfactor = xfactor / 8;
|
||||
yfactor = yfactor / 8;
|
||||
shrink_on_load = 8;
|
||||
} else if (xshrink >= 4 * shrink_on_load_factor) {
|
||||
xfactor = xfactor / 4;
|
||||
yfactor = yfactor / 4;
|
||||
shrink_on_load = 4;
|
||||
} else if (xshrink >= 2 * shrink_on_load_factor) {
|
||||
xfactor = xfactor / 2;
|
||||
yfactor = yfactor / 2;
|
||||
shrink_on_load = 2;
|
||||
baton->colourspaceInput == VIPS_INTERPRETATION_LAST;
|
||||
|
||||
if (shouldPreShrink) {
|
||||
// The common part of the shrink: the bit by which both axes must be shrunk
|
||||
double shrink = std::min(hshrink, vshrink);
|
||||
|
||||
if (inputImageType == sharp::ImageType::JPEG) {
|
||||
// Leave at least a factor of two for the final resize step, when fastShrinkOnLoad: false
|
||||
// for more consistent results and avoid occasional small image shifting
|
||||
int factor = baton->fastShrinkOnLoad ? 1 : 2;
|
||||
if (shrink >= 8 * factor) {
|
||||
jpegShrinkOnLoad = 8;
|
||||
} else if (shrink >= 4 * factor) {
|
||||
jpegShrinkOnLoad = 4;
|
||||
} else if (shrink >= 2 * factor) {
|
||||
jpegShrinkOnLoad = 2;
|
||||
}
|
||||
} else if (inputImageType == sharp::ImageType::WEBP ||
|
||||
inputImageType == sharp::ImageType::SVG ||
|
||||
inputImageType == sharp::ImageType::PDF) {
|
||||
scale = 1.0 / shrink;
|
||||
}
|
||||
}
|
||||
// Help ensure a final kernel-based reduction to prevent shrink aliasing
|
||||
if (shrink_on_load > 1 && (xresidual == 1.0 || yresidual == 1.0)) {
|
||||
shrink_on_load = shrink_on_load / 2;
|
||||
xfactor = xfactor * 2;
|
||||
yfactor = yfactor * 2;
|
||||
}
|
||||
if (shrink_on_load > 1) {
|
||||
// Reload input using shrink-on-load
|
||||
|
||||
// Reload input using shrink-on-load, it'll be an integer shrink
|
||||
// factor for jpegload*, a double scale factor for webpload*,
|
||||
// pdfload* and svgload*
|
||||
if (jpegShrinkOnLoad > 1) {
|
||||
vips::VOption *option = VImage::option()
|
||||
->set("access", baton->input->access)
|
||||
->set("shrink", shrink_on_load)
|
||||
->set("shrink", jpegShrinkOnLoad)
|
||||
->set("fail", baton->input->failOnError);
|
||||
if (baton->input->buffer != nullptr) {
|
||||
// Reload JPEG buffer
|
||||
VipsBlob *blob = vips_blob_new(nullptr, baton->input->buffer, baton->input->bufferLength);
|
||||
if (inputImageType == sharp::ImageType::JPEG) {
|
||||
// Reload JPEG buffer
|
||||
image = VImage::jpegload_buffer(blob, option);
|
||||
} else {
|
||||
// Reload WebP buffer
|
||||
image = VImage::webpload_buffer(blob, option);
|
||||
}
|
||||
image = VImage::jpegload_buffer(blob, option);
|
||||
vips_area_unref(reinterpret_cast<VipsArea*>(blob));
|
||||
} else {
|
||||
if (inputImageType == sharp::ImageType::JPEG) {
|
||||
// Reload JPEG file
|
||||
image = VImage::jpegload(const_cast<char*>(baton->input->file.data()), option);
|
||||
// Reload JPEG file
|
||||
image = VImage::jpegload(const_cast<char*>(baton->input->file.data()), option);
|
||||
}
|
||||
} else if (scale != 1.0) {
|
||||
vips::VOption *option = VImage::option()
|
||||
->set("access", baton->input->access)
|
||||
->set("scale", scale)
|
||||
->set("fail", baton->input->failOnError);
|
||||
if (inputImageType == sharp::ImageType::WEBP) {
|
||||
option->set("n", baton->input->pages);
|
||||
option->set("page", baton->input->page);
|
||||
|
||||
if (baton->input->buffer != nullptr) {
|
||||
// Reload WebP buffer
|
||||
VipsBlob *blob = vips_blob_new(nullptr, baton->input->buffer, baton->input->bufferLength);
|
||||
image = VImage::webpload_buffer(blob, option);
|
||||
vips_area_unref(reinterpret_cast<VipsArea*>(blob));
|
||||
} else {
|
||||
// Reload WebP file
|
||||
image = VImage::webpload(const_cast<char*>(baton->input->file.data()), option);
|
||||
}
|
||||
}
|
||||
// Recalculate integral shrink and double residual
|
||||
int const shrunkOnLoadWidth = image.width();
|
||||
int const shrunkOnLoadHeight = image.height();
|
||||
if (!baton->rotateBeforePreExtract &&
|
||||
(rotation == VIPS_ANGLE_D90 || rotation == VIPS_ANGLE_D270)) {
|
||||
// Swap when rotating by 90 or 270 degrees
|
||||
xfactor = static_cast<double>(shrunkOnLoadWidth) / static_cast<double>(targetResizeHeight);
|
||||
yfactor = static_cast<double>(shrunkOnLoadHeight) / static_cast<double>(targetResizeWidth);
|
||||
} else {
|
||||
xfactor = static_cast<double>(shrunkOnLoadWidth) / static_cast<double>(targetResizeWidth);
|
||||
yfactor = static_cast<double>(shrunkOnLoadHeight) / static_cast<double>(targetResizeHeight);
|
||||
} else if (inputImageType == sharp::ImageType::SVG) {
|
||||
option->set("unlimited", baton->input->unlimited);
|
||||
option->set("dpi", baton->input->density);
|
||||
|
||||
if (baton->input->buffer != nullptr) {
|
||||
// Reload SVG buffer
|
||||
VipsBlob *blob = vips_blob_new(nullptr, baton->input->buffer, baton->input->bufferLength);
|
||||
image = VImage::svgload_buffer(blob, option);
|
||||
vips_area_unref(reinterpret_cast<VipsArea*>(blob));
|
||||
} else {
|
||||
// Reload SVG file
|
||||
image = VImage::svgload(const_cast<char*>(baton->input->file.data()), option);
|
||||
}
|
||||
|
||||
sharp::SetDensity(image, baton->input->density);
|
||||
} else if (inputImageType == sharp::ImageType::PDF) {
|
||||
option->set("n", baton->input->pages);
|
||||
option->set("page", baton->input->page);
|
||||
option->set("dpi", baton->input->density);
|
||||
|
||||
if (baton->input->buffer != nullptr) {
|
||||
// Reload PDF buffer
|
||||
VipsBlob *blob = vips_blob_new(nullptr, baton->input->buffer, baton->input->bufferLength);
|
||||
image = VImage::pdfload_buffer(blob, option);
|
||||
vips_area_unref(reinterpret_cast<VipsArea*>(blob));
|
||||
} else {
|
||||
// Reload PDF file
|
||||
image = VImage::pdfload(const_cast<char*>(baton->input->file.data()), option);
|
||||
}
|
||||
|
||||
sharp::SetDensity(image, baton->input->density);
|
||||
}
|
||||
}
|
||||
|
||||
// Any pre-shrinking may already have been done
|
||||
int thumbWidth = image.width();
|
||||
int thumbHeight = image.height();
|
||||
|
||||
// After pre-shrink, but before the main shrink stage
|
||||
// Reuse the initial pageHeight if we didn't pre-shrink
|
||||
int preshrunkPageHeight = shouldPreShrink ? sharp::GetPageHeight(image) : pageHeight;
|
||||
|
||||
if (baton->fastShrinkOnLoad && jpegShrinkOnLoad > 1) {
|
||||
// JPEG shrink-on-load rounds the output dimensions down, which
|
||||
// may cause incorrect dimensions when fastShrinkOnLoad is enabled
|
||||
// Just recalculate vshrink / hshrink on the main image instead of
|
||||
// the pre-shrunk image when this is the case
|
||||
hshrink = static_cast<double>(thumbWidth) / (static_cast<double>(inputWidth) / hshrink);
|
||||
vshrink = static_cast<double>(preshrunkPageHeight) / (static_cast<double>(pageHeight) / vshrink);
|
||||
} else {
|
||||
// Shrink to preshrunkPageHeight, so we work for multi-page images
|
||||
std::tie(hshrink, vshrink) = sharp::ResolveShrink(
|
||||
thumbWidth, preshrunkPageHeight, targetResizeWidth, targetResizeHeight,
|
||||
baton->canvas, swap, baton->withoutEnlargement);
|
||||
}
|
||||
|
||||
int targetHeight = static_cast<int>(std::rint(static_cast<double>(preshrunkPageHeight) / vshrink));
|
||||
int targetPageHeight = targetHeight;
|
||||
|
||||
// In toilet-roll mode, we must adjust vshrink so that we exactly hit
|
||||
// preshrunkPageHeight or we'll have pixels straddling pixel boundaries
|
||||
if (thumbHeight > preshrunkPageHeight) {
|
||||
targetHeight *= nPages;
|
||||
vshrink = static_cast<double>(thumbHeight) / targetHeight;
|
||||
}
|
||||
|
||||
// Ensure we're using a device-independent colour space
|
||||
char const *processingProfile = image.interpretation() == VIPS_INTERPRETATION_RGB16 ? "p3" : "srgb";
|
||||
if (
|
||||
sharp::HasProfile(image) &&
|
||||
image.interpretation() != VIPS_INTERPRETATION_LABS &&
|
||||
image.interpretation() != VIPS_INTERPRETATION_GREY16
|
||||
) {
|
||||
// Convert to sRGB using embedded profile
|
||||
// Convert to sRGB/P3 using embedded profile
|
||||
try {
|
||||
image = image.icc_transform("srgb", VImage::option()
|
||||
image = image.icc_transform(processingProfile, VImage::option()
|
||||
->set("embedded", TRUE)
|
||||
->set("depth", image.interpretation() == VIPS_INTERPRETATION_RGB16 ? 16 : 8)
|
||||
->set("intent", VIPS_INTENT_PERCEPTUAL));
|
||||
@@ -306,7 +318,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
// Ignore failure of embedded profile
|
||||
}
|
||||
} else if (image.interpretation() == VIPS_INTERPRETATION_CMYK) {
|
||||
image = image.icc_transform("srgb", VImage::option()
|
||||
image = image.icc_transform(processingProfile, VImage::option()
|
||||
->set("input_profile", "cmyk")
|
||||
->set("intent", VIPS_INTENT_PERCEPTUAL));
|
||||
}
|
||||
@@ -340,13 +352,14 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
image = image.colourspace(VIPS_INTERPRETATION_B_W);
|
||||
}
|
||||
|
||||
bool const shouldResize = xfactor != 1.0 || yfactor != 1.0;
|
||||
bool const shouldResize = hshrink != 1.0 || vshrink != 1.0;
|
||||
bool const shouldBlur = baton->blurSigma != 0.0;
|
||||
bool const shouldConv = baton->convKernelWidth * baton->convKernelHeight > 0;
|
||||
bool const shouldSharpen = baton->sharpenSigma != 0.0;
|
||||
bool const shouldApplyMedian = baton->medianSize > 0;
|
||||
bool const shouldComposite = !baton->composite.empty();
|
||||
bool const shouldModulate = baton->brightness != 1.0 || baton->saturation != 1.0 || baton->hue != 0.0;
|
||||
bool const shouldModulate = baton->brightness != 1.0 || baton->saturation != 1.0 ||
|
||||
baton->hue != 0.0 || baton->lightness != 0.0;
|
||||
bool const shouldApplyClahe = baton->claheWidth != 0 && baton->claheHeight != 0;
|
||||
|
||||
if (shouldComposite && !sharp::HasAlpha(image)) {
|
||||
@@ -373,35 +386,33 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
) {
|
||||
throw vips::VError("Unknown kernel");
|
||||
}
|
||||
// Ensure shortest edge is at least 1 pixel
|
||||
if (image.width() / xfactor < 0.5) {
|
||||
xfactor = 2 * image.width();
|
||||
baton->width = 1;
|
||||
}
|
||||
if (image.height() / yfactor < 0.5) {
|
||||
yfactor = 2 * image.height();
|
||||
baton->height = 1;
|
||||
}
|
||||
image = image.resize(1.0 / xfactor, VImage::option()
|
||||
->set("vscale", 1.0 / yfactor)
|
||||
image = image.resize(1.0 / hshrink, VImage::option()
|
||||
->set("vscale", 1.0 / vshrink)
|
||||
->set("kernel", kernel));
|
||||
}
|
||||
|
||||
// Rotate post-extract 90-angle
|
||||
if (!baton->rotateBeforePreExtract && rotation != VIPS_ANGLE_D0) {
|
||||
image = image.rot(rotation);
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
if (!baton->rotateBeforePreExtract && rotation != VIPS_ANGLE_D0) {
|
||||
image = image.rot(rotation);
|
||||
if (flip) {
|
||||
image = image.flip(VIPS_DIRECTION_VERTICAL);
|
||||
flip = FALSE;
|
||||
}
|
||||
if (flop) {
|
||||
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
|
||||
flop = FALSE;
|
||||
}
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
}
|
||||
|
||||
|
||||
// Flip (mirror about Y axis)
|
||||
if (baton->flip) {
|
||||
if (baton->flip || flip) {
|
||||
image = image.flip(VIPS_DIRECTION_VERTICAL);
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
}
|
||||
|
||||
// Flop (mirror about X axis)
|
||||
if (baton->flop) {
|
||||
if (baton->flop || flop) {
|
||||
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
}
|
||||
@@ -419,52 +430,68 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
image = image.copy(VImage::option()->set("interpretation", baton->colourspace));
|
||||
}
|
||||
|
||||
inputWidth = image.width();
|
||||
inputHeight = nPages > 1 ? targetPageHeight : image.height();
|
||||
|
||||
// Resolve dimensions
|
||||
if (baton->width <= 0) {
|
||||
baton->width = inputWidth;
|
||||
}
|
||||
if (baton->height <= 0) {
|
||||
baton->height = inputHeight;
|
||||
}
|
||||
|
||||
// Crop/embed
|
||||
if (image.width() != baton->width || image.height() != baton->height) {
|
||||
if (baton->canvas == Canvas::EMBED) {
|
||||
if (inputWidth != baton->width || inputHeight != baton->height) {
|
||||
if (baton->canvas == sharp::Canvas::EMBED) {
|
||||
std::vector<double> background;
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->resizeBackground);
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->resizeBackground, shouldPremultiplyAlpha);
|
||||
|
||||
// Embed
|
||||
|
||||
// Calculate where to position the embeded image if gravity specified, else center.
|
||||
// Calculate where to position the embedded image if gravity specified, else center.
|
||||
int left;
|
||||
int top;
|
||||
|
||||
left = static_cast<int>(round((baton->width - image.width()) / 2));
|
||||
top = static_cast<int>(round((baton->height - image.height()) / 2));
|
||||
left = static_cast<int>(round((baton->width - inputWidth) / 2));
|
||||
top = static_cast<int>(round((baton->height - inputHeight) / 2));
|
||||
|
||||
int width = std::max(image.width(), baton->width);
|
||||
int height = std::max(image.height(), baton->height);
|
||||
int width = std::max(inputWidth, baton->width);
|
||||
int height = std::max(inputHeight, baton->height);
|
||||
std::tie(left, top) = sharp::CalculateEmbedPosition(
|
||||
image.width(), image.height(), baton->width, baton->height, baton->position);
|
||||
inputWidth, inputHeight, baton->width, baton->height, baton->position);
|
||||
|
||||
image = image.embed(left, top, width, height, VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background));
|
||||
image = nPages > 1
|
||||
? sharp::EmbedMultiPage(image,
|
||||
left, top, width, height, background, nPages, &targetPageHeight)
|
||||
: image.embed(left, top, width, height, VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background));
|
||||
} else if (baton->canvas == sharp::Canvas::CROP) {
|
||||
if (baton->width > inputWidth) {
|
||||
baton->width = inputWidth;
|
||||
}
|
||||
if (baton->height > inputHeight) {
|
||||
baton->height = inputHeight;
|
||||
}
|
||||
|
||||
} else if (
|
||||
baton->canvas != Canvas::IGNORE_ASPECT &&
|
||||
(image.width() > baton->width || image.height() > baton->height)
|
||||
) {
|
||||
// Crop/max/min
|
||||
// Crop
|
||||
if (baton->position < 9) {
|
||||
// Gravity-based crop
|
||||
int left;
|
||||
int top;
|
||||
std::tie(left, top) = sharp::CalculateCrop(
|
||||
image.width(), image.height(), baton->width, baton->height, baton->position);
|
||||
int width = std::min(image.width(), baton->width);
|
||||
int height = std::min(image.height(), baton->height);
|
||||
image = image.extract_area(left, top, width, height);
|
||||
inputWidth, inputHeight, baton->width, baton->height, baton->position);
|
||||
int width = std::min(inputWidth, baton->width);
|
||||
int height = std::min(inputHeight, baton->height);
|
||||
|
||||
image = nPages > 1
|
||||
? sharp::CropMultiPage(image,
|
||||
left, top, width, height, nPages, &targetPageHeight)
|
||||
: image.extract_area(left, top, width, height);
|
||||
} else {
|
||||
// Attention-based or Entropy-based crop
|
||||
if (baton->width > image.width()) {
|
||||
baton->width = image.width();
|
||||
}
|
||||
if (baton->height > image.height()) {
|
||||
baton->height = image.height();
|
||||
}
|
||||
MultiPageUnsupported(nPages, "Resize strategy");
|
||||
image = image.tilecache(VImage::option()
|
||||
->set("access", VIPS_ACCESS_RANDOM)
|
||||
->set("threaded", TRUE));
|
||||
@@ -479,21 +506,32 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
|
||||
// Rotate post-extract non-90 angle
|
||||
if (!baton->rotateBeforePreExtract && baton->rotationAngle != 0.0) {
|
||||
MultiPageUnsupported(nPages, "Rotate");
|
||||
std::vector<double> background;
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground);
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground, shouldPremultiplyAlpha);
|
||||
image = image.rotate(baton->rotationAngle, VImage::option()->set("background", background));
|
||||
}
|
||||
|
||||
// Post extraction
|
||||
if (baton->topOffsetPost != -1) {
|
||||
image = image.extract_area(
|
||||
baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost);
|
||||
if (nPages > 1) {
|
||||
image = sharp::CropMultiPage(image,
|
||||
baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost,
|
||||
nPages, &targetPageHeight);
|
||||
|
||||
// heightPost is used in the info object, so update to reflect the number of pages
|
||||
baton->heightPost *= nPages;
|
||||
} else {
|
||||
image = image.extract_area(
|
||||
baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost);
|
||||
}
|
||||
}
|
||||
|
||||
// Affine transform
|
||||
if (baton->affineMatrix.size() > 0) {
|
||||
MultiPageUnsupported(nPages, "Affine");
|
||||
std::vector<double> background;
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->affineBackground);
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->affineBackground, shouldPremultiplyAlpha);
|
||||
image = image.affine(baton->affineMatrix, VImage::option()->set("background", background)
|
||||
->set("idx", baton->affineIdx)
|
||||
->set("idy", baton->affineIdy)
|
||||
@@ -505,14 +543,17 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
// Extend edges
|
||||
if (baton->extendTop > 0 || baton->extendBottom > 0 || baton->extendLeft > 0 || baton->extendRight > 0) {
|
||||
std::vector<double> background;
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->extendBackground);
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->extendBackground, shouldPremultiplyAlpha);
|
||||
|
||||
// Embed
|
||||
baton->width = image.width() + baton->extendLeft + baton->extendRight;
|
||||
baton->height = image.height() + baton->extendTop + baton->extendBottom;
|
||||
baton->height = (nPages > 1 ? targetPageHeight : image.height()) + baton->extendTop + baton->extendBottom;
|
||||
|
||||
image = image.embed(baton->extendLeft, baton->extendTop, baton->width, baton->height,
|
||||
VImage::option()->set("extend", VIPS_EXTEND_BACKGROUND)->set("background", background));
|
||||
image = nPages > 1
|
||||
? sharp::EmbedMultiPage(image,
|
||||
baton->extendLeft, baton->extendTop, baton->width, baton->height, background, nPages, &targetPageHeight)
|
||||
: image.embed(baton->extendLeft, baton->extendTop, baton->width, baton->height,
|
||||
VImage::option()->set("extend", VIPS_EXTEND_BACKGROUND)->set("background", background));
|
||||
}
|
||||
// Median - must happen before blurring, due to the utility of blurring after thresholding
|
||||
if (shouldApplyMedian) {
|
||||
@@ -542,7 +583,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
}
|
||||
|
||||
if (shouldModulate) {
|
||||
image = sharp::Modulate(image, baton->brightness, baton->saturation, baton->hue);
|
||||
image = sharp::Modulate(image, baton->brightness, baton->saturation, baton->hue, baton->lightness);
|
||||
}
|
||||
|
||||
// Sharpen
|
||||
@@ -715,9 +756,10 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
// Convert colourspace, pass the current known interpretation so libvips doesn't have to guess
|
||||
image = image.colourspace(baton->colourspace, VImage::option()->set("source_space", image.interpretation()));
|
||||
// Transform colours from embedded profile to output profile
|
||||
if (baton->withMetadata && sharp::HasProfile(image)) {
|
||||
image = image.icc_transform(vips_enum_nick(VIPS_TYPE_INTERPRETATION, baton->colourspace),
|
||||
VImage::option()->set("embedded", TRUE));
|
||||
if (baton->withMetadata && sharp::HasProfile(image) && baton->withMetadataIcc.empty()) {
|
||||
image = image.icc_transform("srgb", VImage::option()
|
||||
->set("embedded", TRUE)
|
||||
->set("intent", VIPS_INTENT_PERCEPTUAL));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -726,7 +768,8 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
image = image.icc_transform(
|
||||
const_cast<char*>(baton->withMetadataIcc.data()),
|
||||
VImage::option()
|
||||
->set("input_profile", "srgb")
|
||||
->set("input_profile", processingProfile)
|
||||
->set("embedded", TRUE)
|
||||
->set("intent", VIPS_INTENT_PERCEPTUAL));
|
||||
}
|
||||
// Override EXIF Orientation tag
|
||||
@@ -750,16 +793,11 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
baton->width = image.width();
|
||||
baton->height = image.height();
|
||||
|
||||
bool const supportsGifOutput = vips_type_find("VipsOperation", "magicksave") != 0 &&
|
||||
vips_type_find("VipsOperation", "magicksave_buffer") != 0;
|
||||
|
||||
image = sharp::SetAnimationProperties(
|
||||
image,
|
||||
baton->pageHeight,
|
||||
baton->delay,
|
||||
baton->loop);
|
||||
image, nPages, targetPageHeight, baton->delay, baton->loop);
|
||||
|
||||
// Output
|
||||
sharp::SetTimeout(image, baton->timeoutSeconds);
|
||||
if (baton->fileOut.empty()) {
|
||||
// Buffer output
|
||||
if (baton->formatOut == "jpeg" || (baton->formatOut == "input" && inputImageType == sharp::ImageType::JPEG)) {
|
||||
@@ -787,9 +825,24 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
} else {
|
||||
baton->channels = std::min(baton->channels, 3);
|
||||
}
|
||||
} else if (baton->formatOut == "jp2" || (baton->formatOut == "input"
|
||||
&& inputImageType == sharp::ImageType::JP2)) {
|
||||
// Write JP2 to Buffer
|
||||
sharp::AssertImageTypeDimensions(image, sharp::ImageType::JP2);
|
||||
VipsArea *area = reinterpret_cast<VipsArea*>(image.jp2ksave_buffer(VImage::option()
|
||||
->set("Q", baton->jp2Quality)
|
||||
->set("lossless", baton->jp2Lossless)
|
||||
->set("subsample_mode", baton->jp2ChromaSubsampling == "4:4:4"
|
||||
? VIPS_FOREIGN_SUBSAMPLE_OFF : VIPS_FOREIGN_SUBSAMPLE_ON)
|
||||
->set("tile_height", baton->jp2TileHeight)
|
||||
->set("tile_width", baton->jp2TileWidth)));
|
||||
baton->bufferOut = static_cast<char*>(area->data);
|
||||
baton->bufferOutLength = area->length;
|
||||
area->free_fn = nullptr;
|
||||
vips_area_unref(area);
|
||||
baton->formatOut = "jp2";
|
||||
} else if (baton->formatOut == "png" || (baton->formatOut == "input" &&
|
||||
(inputImageType == sharp::ImageType::PNG || (inputImageType == sharp::ImageType::GIF && !supportsGifOutput) ||
|
||||
inputImageType == sharp::ImageType::SVG))) {
|
||||
(inputImageType == sharp::ImageType::PNG || inputImageType == sharp::ImageType::SVG))) {
|
||||
// Write PNG to buffer
|
||||
sharp::AssertImageTypeDimensions(image, sharp::ImageType::PNG);
|
||||
VipsArea *area = reinterpret_cast<VipsArea*>(image.pngsave_buffer(VImage::option()
|
||||
@@ -799,7 +852,8 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)
|
||||
->set("palette", baton->pngPalette)
|
||||
->set("Q", baton->pngQuality)
|
||||
->set("colours", baton->pngColours)
|
||||
->set("effort", baton->pngEffort)
|
||||
->set("bitdepth", sharp::Is16Bit(image.interpretation()) ? 16 : baton->pngBitdepth)
|
||||
->set("dither", baton->pngDither)));
|
||||
baton->bufferOut = static_cast<char*>(area->data);
|
||||
baton->bufferOutLength = area->length;
|
||||
@@ -816,7 +870,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("lossless", baton->webpLossless)
|
||||
->set("near_lossless", baton->webpNearLossless)
|
||||
->set("smart_subsample", baton->webpSmartSubsample)
|
||||
->set("reduction_effort", baton->webpReductionEffort)
|
||||
->set("effort", baton->webpEffort)
|
||||
->set("alpha_q", baton->webpAlphaQuality)));
|
||||
baton->bufferOut = static_cast<char*>(area->data);
|
||||
baton->bufferOutLength = area->length;
|
||||
@@ -824,14 +878,14 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
vips_area_unref(area);
|
||||
baton->formatOut = "webp";
|
||||
} else if (baton->formatOut == "gif" ||
|
||||
(baton->formatOut == "input" && inputImageType == sharp::ImageType::GIF && supportsGifOutput)) {
|
||||
(baton->formatOut == "input" && inputImageType == sharp::ImageType::GIF)) {
|
||||
// Write GIF to buffer
|
||||
sharp::AssertImageTypeDimensions(image, sharp::ImageType::GIF);
|
||||
VipsArea *area = reinterpret_cast<VipsArea*>(image.magicksave_buffer(VImage::option()
|
||||
VipsArea *area = reinterpret_cast<VipsArea*>(image.gifsave_buffer(VImage::option()
|
||||
->set("strip", !baton->withMetadata)
|
||||
->set("optimize_gif_frames", TRUE)
|
||||
->set("optimize_gif_transparency", TRUE)
|
||||
->set("format", "gif")));
|
||||
->set("bitdepth", baton->gifBitdepth)
|
||||
->set("effort", baton->gifEffort)
|
||||
->set("dither", baton->gifDither)));
|
||||
baton->bufferOut = static_cast<char*>(area->data);
|
||||
baton->bufferOutLength = area->length;
|
||||
area->free_fn = nullptr;
|
||||
@@ -859,7 +913,8 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("tile_height", baton->tiffTileHeight)
|
||||
->set("tile_width", baton->tiffTileWidth)
|
||||
->set("xres", baton->tiffXres)
|
||||
->set("yres", baton->tiffYres)));
|
||||
->set("yres", baton->tiffYres)
|
||||
->set("resunit", baton->tiffResolutionUnit)));
|
||||
baton->bufferOut = static_cast<char*>(area->data);
|
||||
baton->bufferOutLength = area->length;
|
||||
area->free_fn = nullptr;
|
||||
@@ -868,11 +923,12 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
} else if (baton->formatOut == "heif" ||
|
||||
(baton->formatOut == "input" && inputImageType == sharp::ImageType::HEIF)) {
|
||||
// Write HEIF to buffer
|
||||
image = sharp::RemoveAnimationProperties(image);
|
||||
VipsArea *area = reinterpret_cast<VipsArea*>(image.heifsave_buffer(VImage::option()
|
||||
->set("strip", !baton->withMetadata)
|
||||
->set("Q", baton->heifQuality)
|
||||
->set("compression", baton->heifCompression)
|
||||
->set("speed", baton->heifSpeed)
|
||||
->set("effort", baton->heifEffort)
|
||||
->set("subsample_mode", baton->heifChromaSubsampling == "4:4:4"
|
||||
? VIPS_FOREIGN_SUBSAMPLE_OFF : VIPS_FOREIGN_SUBSAMPLE_ON)
|
||||
->set("lossless", baton->heifLossless)));
|
||||
@@ -917,13 +973,14 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
bool const isWebp = sharp::IsWebp(baton->fileOut);
|
||||
bool const isGif = sharp::IsGif(baton->fileOut);
|
||||
bool const isTiff = sharp::IsTiff(baton->fileOut);
|
||||
bool const isJp2 = sharp::IsJp2(baton->fileOut);
|
||||
bool const isHeif = sharp::IsHeif(baton->fileOut);
|
||||
bool const isDz = sharp::IsDz(baton->fileOut);
|
||||
bool const isDzZip = sharp::IsDzZip(baton->fileOut);
|
||||
bool const isV = sharp::IsV(baton->fileOut);
|
||||
bool const mightMatchInput = baton->formatOut == "input";
|
||||
bool const willMatchInput = mightMatchInput &&
|
||||
!(isJpeg || isPng || isWebp || isGif || isTiff || isHeif || isDz || isDzZip || isV);
|
||||
!(isJpeg || isPng || isWebp || isGif || isTiff || isJp2 || isHeif || isDz || isDzZip || isV);
|
||||
|
||||
if (baton->formatOut == "jpeg" || (mightMatchInput && isJpeg) ||
|
||||
(willMatchInput && inputImageType == sharp::ImageType::JPEG)) {
|
||||
@@ -943,9 +1000,20 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("optimize_coding", baton->jpegOptimiseCoding));
|
||||
baton->formatOut = "jpeg";
|
||||
baton->channels = std::min(baton->channels, 3);
|
||||
} else if (baton->formatOut == "jp2" || (mightMatchInput && isJp2) ||
|
||||
(willMatchInput && (inputImageType == sharp::ImageType::JP2))) {
|
||||
// Write JP2 to file
|
||||
sharp::AssertImageTypeDimensions(image, sharp::ImageType::JP2);
|
||||
image.jp2ksave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
->set("Q", baton->jp2Quality)
|
||||
->set("lossless", baton->jp2Lossless)
|
||||
->set("subsample_mode", baton->jp2ChromaSubsampling == "4:4:4"
|
||||
? VIPS_FOREIGN_SUBSAMPLE_OFF : VIPS_FOREIGN_SUBSAMPLE_ON)
|
||||
->set("tile_height", baton->jp2TileHeight)
|
||||
->set("tile_width", baton->jp2TileWidth));
|
||||
baton->formatOut = "jp2";
|
||||
} else if (baton->formatOut == "png" || (mightMatchInput && isPng) || (willMatchInput &&
|
||||
(inputImageType == sharp::ImageType::PNG || (inputImageType == sharp::ImageType::GIF && !supportsGifOutput) ||
|
||||
inputImageType == sharp::ImageType::SVG))) {
|
||||
(inputImageType == sharp::ImageType::PNG || inputImageType == sharp::ImageType::SVG))) {
|
||||
// Write PNG to file
|
||||
sharp::AssertImageTypeDimensions(image, sharp::ImageType::PNG);
|
||||
image.pngsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
@@ -955,7 +1023,8 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)
|
||||
->set("palette", baton->pngPalette)
|
||||
->set("Q", baton->pngQuality)
|
||||
->set("colours", baton->pngColours)
|
||||
->set("bitdepth", sharp::Is16Bit(image.interpretation()) ? 16 : baton->pngBitdepth)
|
||||
->set("effort", baton->pngEffort)
|
||||
->set("dither", baton->pngDither));
|
||||
baton->formatOut = "png";
|
||||
} else if (baton->formatOut == "webp" || (mightMatchInput && isWebp) ||
|
||||
@@ -968,18 +1037,18 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("lossless", baton->webpLossless)
|
||||
->set("near_lossless", baton->webpNearLossless)
|
||||
->set("smart_subsample", baton->webpSmartSubsample)
|
||||
->set("reduction_effort", baton->webpReductionEffort)
|
||||
->set("effort", baton->webpEffort)
|
||||
->set("alpha_q", baton->webpAlphaQuality));
|
||||
baton->formatOut = "webp";
|
||||
} else if (baton->formatOut == "gif" || (mightMatchInput && isGif) ||
|
||||
(willMatchInput && inputImageType == sharp::ImageType::GIF && supportsGifOutput)) {
|
||||
(willMatchInput && inputImageType == sharp::ImageType::GIF)) {
|
||||
// Write GIF to file
|
||||
sharp::AssertImageTypeDimensions(image, sharp::ImageType::GIF);
|
||||
image.magicksave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
image.gifsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
->set("strip", !baton->withMetadata)
|
||||
->set("optimize_gif_frames", TRUE)
|
||||
->set("optimize_gif_transparency", TRUE)
|
||||
->set("format", "gif"));
|
||||
->set("bitdepth", baton->gifBitdepth)
|
||||
->set("effort", baton->gifEffort)
|
||||
->set("dither", baton->gifDither));
|
||||
baton->formatOut = "gif";
|
||||
} else if (baton->formatOut == "tiff" || (mightMatchInput && isTiff) ||
|
||||
(willMatchInput && inputImageType == sharp::ImageType::TIFF)) {
|
||||
@@ -1003,16 +1072,18 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("tile_height", baton->tiffTileHeight)
|
||||
->set("tile_width", baton->tiffTileWidth)
|
||||
->set("xres", baton->tiffXres)
|
||||
->set("yres", baton->tiffYres));
|
||||
->set("yres", baton->tiffYres)
|
||||
->set("resunit", baton->tiffResolutionUnit));
|
||||
baton->formatOut = "tiff";
|
||||
} else if (baton->formatOut == "heif" || (mightMatchInput && isHeif) ||
|
||||
(willMatchInput && inputImageType == sharp::ImageType::HEIF)) {
|
||||
// Write HEIF to file
|
||||
image = sharp::RemoveAnimationProperties(image);
|
||||
image.heifsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
->set("strip", !baton->withMetadata)
|
||||
->set("Q", baton->heifQuality)
|
||||
->set("compression", baton->heifCompression)
|
||||
->set("speed", baton->heifSpeed)
|
||||
->set("effort", baton->heifEffort)
|
||||
->set("subsample_mode", baton->heifChromaSubsampling == "4:4:4"
|
||||
? VIPS_FOREIGN_SUBSAMPLE_OFF : VIPS_FOREIGN_SUBSAMPLE_ON)
|
||||
->set("lossless", baton->heifLossless));
|
||||
@@ -1037,7 +1108,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
{"lossless", baton->webpLossless ? "TRUE" : "FALSE"},
|
||||
{"near_lossless", baton->webpNearLossless ? "TRUE" : "FALSE"},
|
||||
{"smart_subsample", baton->webpSmartSubsample ? "TRUE" : "FALSE"},
|
||||
{"reduction_effort", std::to_string(baton->webpReductionEffort)}
|
||||
{"effort", std::to_string(baton->webpEffort)}
|
||||
};
|
||||
suffix = AssembleSuffixString(".webp", options);
|
||||
} else {
|
||||
@@ -1186,6 +1257,12 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
Napi::FunctionReference debuglog;
|
||||
Napi::FunctionReference queueListener;
|
||||
|
||||
void MultiPageUnsupported(int const pages, std::string op) {
|
||||
if (pages > 1) {
|
||||
throw vips::VError(op + " is not supported for multi-page images");
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate the angle of rotation and need-to-flip for the given Exif orientation
|
||||
By default, returns zero, i.e. no rotation.
|
||||
@@ -1276,15 +1353,15 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
// Canvas option
|
||||
std::string canvas = sharp::AttrAsStr(options, "canvas");
|
||||
if (canvas == "crop") {
|
||||
baton->canvas = Canvas::CROP;
|
||||
baton->canvas = sharp::Canvas::CROP;
|
||||
} else if (canvas == "embed") {
|
||||
baton->canvas = Canvas::EMBED;
|
||||
baton->canvas = sharp::Canvas::EMBED;
|
||||
} else if (canvas == "max") {
|
||||
baton->canvas = Canvas::MAX;
|
||||
baton->canvas = sharp::Canvas::MAX;
|
||||
} else if (canvas == "min") {
|
||||
baton->canvas = Canvas::MIN;
|
||||
baton->canvas = sharp::Canvas::MIN;
|
||||
} else if (canvas == "ignore_aspect") {
|
||||
baton->canvas = Canvas::IGNORE_ASPECT;
|
||||
baton->canvas = sharp::Canvas::IGNORE_ASPECT;
|
||||
}
|
||||
// Tint chroma
|
||||
baton->tintA = sharp::AttrAsDouble(options, "tintA");
|
||||
@@ -1307,6 +1384,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
}
|
||||
// Resize options
|
||||
baton->withoutEnlargement = sharp::AttrAsBool(options, "withoutEnlargement");
|
||||
baton->withoutReduction = sharp::AttrAsBool(options, "withoutReduction");
|
||||
baton->position = sharp::AttrAsInt32(options, "position");
|
||||
baton->resizeBackground = sharp::AttrAsVectorOfDouble(options, "resizeBackground");
|
||||
baton->kernel = sharp::AttrAsStr(options, "kernel");
|
||||
@@ -1328,6 +1406,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->brightness = sharp::AttrAsDouble(options, "brightness");
|
||||
baton->saturation = sharp::AttrAsDouble(options, "saturation");
|
||||
baton->hue = sharp::AttrAsInt32(options, "hue");
|
||||
baton->lightness = sharp::AttrAsDouble(options, "lightness");
|
||||
baton->medianSize = sharp::AttrAsUint32(options, "medianSize");
|
||||
baton->sharpenSigma = sharp::AttrAsDouble(options, "sharpenSigma");
|
||||
baton->sharpenFlat = sharp::AttrAsDouble(options, "sharpenFlat");
|
||||
@@ -1364,7 +1443,6 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->affineOdx = sharp::AttrAsDouble(options, "affineOdx");
|
||||
baton->affineOdy = sharp::AttrAsDouble(options, "affineOdy");
|
||||
baton->affineInterpolator = vips::VInterpolate::new_from_name(sharp::AttrAsStr(options, "affineInterpolator").data());
|
||||
|
||||
baton->removeAlpha = sharp::AttrAsBool(options, "removeAlpha");
|
||||
baton->ensureAlpha = sharp::AttrAsDouble(options, "ensureAlpha");
|
||||
if (options.Has("boolean")) {
|
||||
@@ -1415,6 +1493,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
std::string k = sharp::AttrAsStr(mdStrKeys, i);
|
||||
baton->withMetadataStrs.insert(std::make_pair(k, sharp::AttrAsStr(mdStrs, k)));
|
||||
}
|
||||
baton->timeoutSeconds = sharp::AttrAsUint32(options, "timeoutSeconds");
|
||||
// Format-specific
|
||||
baton->jpegQuality = sharp::AttrAsUint32(options, "jpegQuality");
|
||||
baton->jpegProgressive = sharp::AttrAsBool(options, "jpegProgressive");
|
||||
@@ -1429,14 +1508,23 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->pngAdaptiveFiltering = sharp::AttrAsBool(options, "pngAdaptiveFiltering");
|
||||
baton->pngPalette = sharp::AttrAsBool(options, "pngPalette");
|
||||
baton->pngQuality = sharp::AttrAsUint32(options, "pngQuality");
|
||||
baton->pngColours = sharp::AttrAsUint32(options, "pngColours");
|
||||
baton->pngEffort = sharp::AttrAsUint32(options, "pngEffort");
|
||||
baton->pngBitdepth = sharp::AttrAsUint32(options, "pngBitdepth");
|
||||
baton->pngDither = sharp::AttrAsDouble(options, "pngDither");
|
||||
baton->jp2Quality = sharp::AttrAsUint32(options, "jp2Quality");
|
||||
baton->jp2Lossless = sharp::AttrAsBool(options, "jp2Lossless");
|
||||
baton->jp2TileHeight = sharp::AttrAsUint32(options, "jp2TileHeight");
|
||||
baton->jp2TileWidth = sharp::AttrAsUint32(options, "jp2TileWidth");
|
||||
baton->jp2ChromaSubsampling = sharp::AttrAsStr(options, "jp2ChromaSubsampling");
|
||||
baton->webpQuality = sharp::AttrAsUint32(options, "webpQuality");
|
||||
baton->webpAlphaQuality = sharp::AttrAsUint32(options, "webpAlphaQuality");
|
||||
baton->webpLossless = sharp::AttrAsBool(options, "webpLossless");
|
||||
baton->webpNearLossless = sharp::AttrAsBool(options, "webpNearLossless");
|
||||
baton->webpSmartSubsample = sharp::AttrAsBool(options, "webpSmartSubsample");
|
||||
baton->webpReductionEffort = sharp::AttrAsUint32(options, "webpReductionEffort");
|
||||
baton->webpEffort = sharp::AttrAsUint32(options, "webpEffort");
|
||||
baton->gifBitdepth = sharp::AttrAsUint32(options, "gifBitdepth");
|
||||
baton->gifEffort = sharp::AttrAsUint32(options, "gifEffort");
|
||||
baton->gifDither = sharp::AttrAsDouble(options, "gifDither");
|
||||
baton->tiffQuality = sharp::AttrAsUint32(options, "tiffQuality");
|
||||
baton->tiffPyramid = sharp::AttrAsBool(options, "tiffPyramid");
|
||||
baton->tiffBitdepth = sharp::AttrAsUint32(options, "tiffBitdepth");
|
||||
@@ -1445,6 +1533,9 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->tiffTileHeight = sharp::AttrAsUint32(options, "tiffTileHeight");
|
||||
baton->tiffXres = sharp::AttrAsDouble(options, "tiffXres");
|
||||
baton->tiffYres = sharp::AttrAsDouble(options, "tiffYres");
|
||||
if (baton->tiffXres == 1.0 && baton->tiffYres == 1.0 && baton->withMetadataDensity > 0) {
|
||||
baton->tiffXres = baton->tiffYres = baton->withMetadataDensity / 25.4;
|
||||
}
|
||||
// tiff compression options
|
||||
baton->tiffCompression = static_cast<VipsForeignTiffCompression>(
|
||||
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_TIFF_COMPRESSION,
|
||||
@@ -1452,30 +1543,28 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->tiffPredictor = static_cast<VipsForeignTiffPredictor>(
|
||||
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_TIFF_PREDICTOR,
|
||||
sharp::AttrAsStr(options, "tiffPredictor").data()));
|
||||
baton->tiffResolutionUnit = static_cast<VipsForeignTiffResunit>(
|
||||
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_TIFF_RESUNIT,
|
||||
sharp::AttrAsStr(options, "tiffResolutionUnit").data()));
|
||||
|
||||
baton->heifQuality = sharp::AttrAsUint32(options, "heifQuality");
|
||||
baton->heifLossless = sharp::AttrAsBool(options, "heifLossless");
|
||||
baton->heifCompression = static_cast<VipsForeignHeifCompression>(
|
||||
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_HEIF_COMPRESSION,
|
||||
sharp::AttrAsStr(options, "heifCompression").data()));
|
||||
baton->heifSpeed = sharp::AttrAsUint32(options, "heifSpeed");
|
||||
baton->heifEffort = sharp::AttrAsUint32(options, "heifEffort");
|
||||
baton->heifChromaSubsampling = sharp::AttrAsStr(options, "heifChromaSubsampling");
|
||||
|
||||
// Raw output
|
||||
baton->rawDepth = static_cast<VipsBandFormat>(
|
||||
vips_enum_from_nick(nullptr, VIPS_TYPE_BAND_FORMAT,
|
||||
sharp::AttrAsStr(options, "rawDepth").data()));
|
||||
|
||||
// Animated output
|
||||
if (sharp::HasAttr(options, "pageHeight")) {
|
||||
baton->pageHeight = sharp::AttrAsUint32(options, "pageHeight");
|
||||
}
|
||||
// Animated output properties
|
||||
if (sharp::HasAttr(options, "loop")) {
|
||||
baton->loop = sharp::AttrAsUint32(options, "loop");
|
||||
}
|
||||
if (sharp::HasAttr(options, "delay")) {
|
||||
baton->delay = sharp::AttrAsInt32Vector(options, "delay");
|
||||
}
|
||||
|
||||
// Tile output
|
||||
baton->tileSize = sharp::AttrAsUint32(options, "tileSize");
|
||||
baton->tileOverlap = sharp::AttrAsUint32(options, "tileOverlap");
|
||||
|
||||
@@ -27,14 +27,6 @@
|
||||
|
||||
Napi::Value pipeline(const Napi::CallbackInfo& info);
|
||||
|
||||
enum class Canvas {
|
||||
CROP,
|
||||
EMBED,
|
||||
MAX,
|
||||
MIN,
|
||||
IGNORE_ASPECT
|
||||
};
|
||||
|
||||
struct Composite {
|
||||
sharp::InputDescriptor *input;
|
||||
VipsBlendMode mode;
|
||||
@@ -75,7 +67,7 @@ struct PipelineBaton {
|
||||
int width;
|
||||
int height;
|
||||
int channels;
|
||||
Canvas canvas;
|
||||
sharp::Canvas canvas;
|
||||
int position;
|
||||
std::vector<double> resizeBackground;
|
||||
bool hasCropOffset;
|
||||
@@ -95,6 +87,7 @@ struct PipelineBaton {
|
||||
double brightness;
|
||||
double saturation;
|
||||
int hue;
|
||||
double lightness;
|
||||
int medianSize;
|
||||
double sharpenSigma;
|
||||
double sharpenFlat;
|
||||
@@ -126,6 +119,7 @@ struct PipelineBaton {
|
||||
int extendRight;
|
||||
std::vector<double> extendBackground;
|
||||
bool withoutEnlargement;
|
||||
bool withoutReduction;
|
||||
std::vector<double> affineMatrix;
|
||||
std::vector<double> affineBackground;
|
||||
double affineIdx;
|
||||
@@ -146,14 +140,23 @@ struct PipelineBaton {
|
||||
bool pngAdaptiveFiltering;
|
||||
bool pngPalette;
|
||||
int pngQuality;
|
||||
int pngColours;
|
||||
int pngEffort;
|
||||
int pngBitdepth;
|
||||
double pngDither;
|
||||
int jp2Quality;
|
||||
bool jp2Lossless;
|
||||
int jp2TileHeight;
|
||||
int jp2TileWidth;
|
||||
std::string jp2ChromaSubsampling;
|
||||
int webpQuality;
|
||||
int webpAlphaQuality;
|
||||
bool webpNearLossless;
|
||||
bool webpLossless;
|
||||
bool webpSmartSubsample;
|
||||
int webpReductionEffort;
|
||||
int webpEffort;
|
||||
int gifBitdepth;
|
||||
int gifEffort;
|
||||
double gifDither;
|
||||
int tiffQuality;
|
||||
VipsForeignTiffCompression tiffCompression;
|
||||
VipsForeignTiffPredictor tiffPredictor;
|
||||
@@ -164,9 +167,10 @@ struct PipelineBaton {
|
||||
int tiffTileWidth;
|
||||
double tiffXres;
|
||||
double tiffYres;
|
||||
VipsForeignTiffResunit tiffResolutionUnit;
|
||||
int heifQuality;
|
||||
VipsForeignHeifCompression heifCompression;
|
||||
int heifSpeed;
|
||||
int heifEffort;
|
||||
std::string heifChromaSubsampling;
|
||||
bool heifLossless;
|
||||
VipsBandFormat rawDepth;
|
||||
@@ -176,6 +180,7 @@ struct PipelineBaton {
|
||||
double withMetadataDensity;
|
||||
std::string withMetadataIcc;
|
||||
std::unordered_map<std::string, std::string> withMetadataStrs;
|
||||
int timeoutSeconds;
|
||||
std::unique_ptr<double[]> convKernel;
|
||||
int convKernelWidth;
|
||||
int convKernelHeight;
|
||||
@@ -189,7 +194,6 @@ struct PipelineBaton {
|
||||
double ensureAlpha;
|
||||
VipsInterpretation colourspaceInput;
|
||||
VipsInterpretation colourspace;
|
||||
int pageHeight;
|
||||
std::vector<int> delay;
|
||||
int loop;
|
||||
int tileSize;
|
||||
@@ -210,7 +214,7 @@ struct PipelineBaton {
|
||||
topOffsetPre(-1),
|
||||
topOffsetPost(-1),
|
||||
channels(0),
|
||||
canvas(Canvas::CROP),
|
||||
canvas(sharp::Canvas::CROP),
|
||||
position(0),
|
||||
resizeBackground{ 0.0, 0.0, 0.0, 255.0 },
|
||||
hasCropOffset(false),
|
||||
@@ -227,6 +231,7 @@ struct PipelineBaton {
|
||||
brightness(1.0),
|
||||
saturation(1.0),
|
||||
hue(0),
|
||||
lightness(0),
|
||||
medianSize(0),
|
||||
sharpenSigma(0.0),
|
||||
sharpenFlat(1.0),
|
||||
@@ -256,6 +261,7 @@ struct PipelineBaton {
|
||||
extendRight(0),
|
||||
extendBackground{ 0.0, 0.0, 0.0, 255.0 },
|
||||
withoutEnlargement(false),
|
||||
withoutReduction(false),
|
||||
affineMatrix{ 1.0, 0.0, 0.0, 1.0 },
|
||||
affineBackground{ 0.0, 0.0, 0.0, 255.0 },
|
||||
affineIdx(0),
|
||||
@@ -276,14 +282,20 @@ struct PipelineBaton {
|
||||
pngAdaptiveFiltering(false),
|
||||
pngPalette(false),
|
||||
pngQuality(100),
|
||||
pngColours(256),
|
||||
pngEffort(7),
|
||||
pngBitdepth(8),
|
||||
pngDither(1.0),
|
||||
jp2Quality(80),
|
||||
jp2Lossless(false),
|
||||
jp2TileHeight(512),
|
||||
jp2TileWidth(512),
|
||||
jp2ChromaSubsampling("4:4:4"),
|
||||
webpQuality(80),
|
||||
webpAlphaQuality(100),
|
||||
webpNearLossless(false),
|
||||
webpLossless(false),
|
||||
webpSmartSubsample(false),
|
||||
webpReductionEffort(4),
|
||||
webpEffort(4),
|
||||
tiffQuality(80),
|
||||
tiffCompression(VIPS_FOREIGN_TIFF_COMPRESSION_JPEG),
|
||||
tiffPredictor(VIPS_FOREIGN_TIFF_PREDICTOR_HORIZONTAL),
|
||||
@@ -294,15 +306,17 @@ struct PipelineBaton {
|
||||
tiffTileWidth(256),
|
||||
tiffXres(1.0),
|
||||
tiffYres(1.0),
|
||||
tiffResolutionUnit(VIPS_FOREIGN_TIFF_RESUNIT_INCH),
|
||||
heifQuality(50),
|
||||
heifCompression(VIPS_FOREIGN_HEIF_COMPRESSION_AV1),
|
||||
heifSpeed(5),
|
||||
heifEffort(4),
|
||||
heifChromaSubsampling("4:4:4"),
|
||||
heifLossless(false),
|
||||
rawDepth(VIPS_FORMAT_UCHAR),
|
||||
withMetadata(false),
|
||||
withMetadataOrientation(-1),
|
||||
withMetadataDensity(0.0),
|
||||
timeoutSeconds(0),
|
||||
convKernelWidth(0),
|
||||
convKernelHeight(0),
|
||||
convKernelScale(0.0),
|
||||
@@ -315,8 +329,6 @@ struct PipelineBaton {
|
||||
ensureAlpha(-1.0),
|
||||
colourspaceInput(VIPS_INTERPRETATION_LAST),
|
||||
colourspace(VIPS_INTERPRETATION_LAST),
|
||||
pageHeight(0),
|
||||
delay{-1},
|
||||
loop(-1),
|
||||
tileSize(256),
|
||||
tileOverlap(0),
|
||||
|
||||
16
src/sharp.cc
@@ -13,6 +13,7 @@
|
||||
// limitations under the License.
|
||||
|
||||
#include <napi.h>
|
||||
#include <cstdlib>
|
||||
#include <vips/vips8>
|
||||
|
||||
#include "common.h"
|
||||
@@ -21,6 +22,14 @@
|
||||
#include "utilities.h"
|
||||
#include "stats.h"
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1400 // MSVC 2005/8
|
||||
static void empty_invalid_parameter_handler(const wchar_t* expression,
|
||||
const wchar_t* function, const wchar_t* file, unsigned int line,
|
||||
uintptr_t reserved) {
|
||||
// No-op.
|
||||
}
|
||||
#endif
|
||||
|
||||
static void* sharp_vips_init(void*) {
|
||||
g_setenv("VIPS_MIN_STACK_SIZE", "2m", FALSE);
|
||||
vips_init("sharp");
|
||||
@@ -34,6 +43,13 @@ Napi::Object init(Napi::Env env, Napi::Object exports) {
|
||||
g_log_set_handler("VIPS", static_cast<GLogLevelFlags>(G_LOG_LEVEL_WARNING),
|
||||
static_cast<GLogFunc>(sharp::VipsWarningCallback), nullptr);
|
||||
|
||||
// Tell the CRT to not exit the application when an invalid parameter is
|
||||
// passed. The main issue is that invalid FDs will trigger this behaviour.
|
||||
// See: https://github.com/libvips/libvips/pull/2571.
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1400 // MSVC 2005/8
|
||||
_set_invalid_parameter_handler(empty_invalid_parameter_handler);
|
||||
#endif
|
||||
|
||||
// Methods available to JavaScript
|
||||
exports.Set("metadata", Napi::Function::New(env, metadata));
|
||||
exports.Set("pipeline", Napi::Function::New(env, pipeline));
|
||||
|
||||
@@ -115,7 +115,7 @@ Napi::Value format(const Napi::CallbackInfo& info) {
|
||||
Napi::Object format = Napi::Object::New(env);
|
||||
for (std::string const f : {
|
||||
"jpeg", "png", "webp", "tiff", "magick", "openslide", "dz",
|
||||
"ppm", "fits", "gif", "svg", "heif", "pdf", "vips"
|
||||
"ppm", "fits", "gif", "svg", "heif", "pdf", "vips", "jp2k"
|
||||
}) {
|
||||
// Input
|
||||
Napi::Boolean hasInputFile =
|
||||
|
||||
@@ -10,12 +10,12 @@
|
||||
"devDependencies": {
|
||||
"@squoosh/cli": "0.7.2",
|
||||
"@squoosh/lib": "0.4.0",
|
||||
"async": "3.2.1",
|
||||
"async": "3.2.3",
|
||||
"benchmark": "2.1.4",
|
||||
"gm": "1.23.1",
|
||||
"imagemagick": "0.1.3",
|
||||
"jimp": "0.16.1",
|
||||
"mapnik": "4.5.8",
|
||||
"mapnik": "4.5.9",
|
||||
"semver": "7.3.5"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
|
||||
BIN
test/fixtures/expected/clahe-11-25-14.jpg
vendored
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
BIN
test/fixtures/expected/clahe-5-5-0.jpg
vendored
|
Before Width: | Height: | Size: 40 KiB After Width: | Height: | Size: 40 KiB |
BIN
test/fixtures/expected/composite-cutout.png
vendored
|
Before Width: | Height: | Size: 175 KiB After Width: | Height: | Size: 180 KiB |
BIN
test/fixtures/expected/embed-animated-height.webp
vendored
Normal file
|
After Width: | Height: | Size: 13 KiB |
BIN
test/fixtures/expected/embed-animated-width.webp
vendored
Normal file
|
After Width: | Height: | Size: 13 KiB |
BIN
test/fixtures/expected/extend-equal-single.webp
vendored
Normal file
|
After Width: | Height: | Size: 8.0 KiB |
BIN
test/fixtures/expected/extract-lch.jpg
vendored
|
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 13 KiB |
BIN
test/fixtures/expected/gravity-center-height.webp
vendored
Normal file
|
After Width: | Height: | Size: 6.8 KiB |
BIN
test/fixtures/expected/gravity-center-width.webp
vendored
Normal file
|
After Width: | Height: | Size: 7.6 KiB |
BIN
test/fixtures/expected/hilutite.jpg
vendored
|
Before Width: | Height: | Size: 424 KiB After Width: | Height: | Size: 424 KiB |
BIN
test/fixtures/expected/icc-cmyk.jpg
vendored
|
Before Width: | Height: | Size: 943 KiB After Width: | Height: | Size: 943 KiB |
BIN
test/fixtures/expected/median_1.jpg
vendored
|
Before Width: | Height: | Size: 20 KiB |
BIN
test/fixtures/expected/median_3.jpg
vendored
|
Before Width: | Height: | Size: 833 B |
BIN
test/fixtures/expected/median_5.jpg
vendored
|
Before Width: | Height: | Size: 640 B |
BIN
test/fixtures/expected/median_color.jpg
vendored
|
Before Width: | Height: | Size: 12 KiB |
BIN
test/fixtures/expected/resize-crop-extract.jpg
vendored
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
BIN
test/fixtures/expected/rotate-mirror-extract.jpg
vendored
Normal file
|
After Width: | Height: | Size: 1.4 KiB |
BIN
test/fixtures/expected/svg72.png
vendored
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 373 B |
BIN
test/fixtures/expected/tint-sepia.jpg
vendored
|
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 14 KiB |
4
test/fixtures/index.js
vendored
@@ -92,6 +92,8 @@ module.exports = {
|
||||
inputPngRGBWithAlpha: getPath('2569067123_aca715a2ee_o.png'), // http://www.flickr.com/photos/grizdave/2569067123/ (same as inputJpg)
|
||||
inputPngImageInAlpha: getPath('image-in-alpha.png'), // https://github.com/lovell/sharp/issues/1597
|
||||
inputPngSolidAlpha: getPath('with-alpha.png'), // https://github.com/lovell/sharp/issues/1599
|
||||
inputPngP3: getPath('p3.png'), // https://github.com/lovell/sharp/issues/2862
|
||||
inputPngPalette: getPath('swiss.png'), // https://github.com/randy408/libspng/issues/188
|
||||
|
||||
inputWebP: getPath('4.webp'), // http://www.gstatic.com/webp/gallery/4.webp
|
||||
inputWebPWithTransparency: getPath('5_webp_a.webp'), // http://www.gstatic.com/webp/gallery3/5_webp_a.webp
|
||||
@@ -104,6 +106,8 @@ module.exports = {
|
||||
inputTiffUncompressed: getPath('uncompressed_tiff.tiff'), // https://code.google.com/archive/p/imagetestsuite/wikis/TIFFTestSuite.wiki file: 0c84d07e1b22b76f24cccc70d8788e4a.tif
|
||||
inputTiff8BitDepth: getPath('8bit_depth.tiff'),
|
||||
inputTifftagPhotoshop: getPath('tifftag-photoshop.tiff'), // https://github.com/lovell/sharp/issues/1600
|
||||
|
||||
inputJp2: getPath('relax.jp2'), // https://www.fnordware.com/j2k/relax.jp2
|
||||
inputGif: getPath('Crash_test.gif'), // http://upload.wikimedia.org/wikipedia/commons/e/e3/Crash_test.gif
|
||||
inputGifGreyPlusAlpha: getPath('grey-plus-alpha.gif'), // http://i.imgur.com/gZ5jlmE.gif
|
||||
inputGifAnimated: getPath('rotating-squares.gif'), // CC0 https://loading.io/spinner/blocks/-rotating-squares-preloader-gif
|
||||
|
||||
BIN
test/fixtures/p3.png
vendored
Normal file
|
After Width: | Height: | Size: 610 B |
BIN
test/fixtures/relax.jp2
vendored
Normal file
BIN
test/fixtures/swiss.png
vendored
Normal file
|
After Width: | Height: | Size: 2.6 KiB |
@@ -8,7 +8,7 @@ fi
|
||||
curl -s -o ./test/leak/libvips.supp https://raw.githubusercontent.com/libvips/libvips/master/suppressions/valgrind.supp
|
||||
|
||||
for test in ./test/unit/*.js; do
|
||||
G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind \
|
||||
G_SLICE=always-malloc G_DEBUG=gc-friendly VIPS_LEAK=1 valgrind \
|
||||
--suppressions=test/leak/libvips.supp \
|
||||
--suppressions=test/leak/sharp.supp \
|
||||
--gen-suppressions=yes \
|
||||
|
||||
@@ -225,14 +225,10 @@
|
||||
fun:FcInitLoadConfigAndFonts
|
||||
}
|
||||
{
|
||||
leak_fontconfig_doContent
|
||||
leak_fontconfig_XML_ParseBuffer
|
||||
Memcheck:Leak
|
||||
match-leak-kinds: definite
|
||||
fun:malloc
|
||||
...
|
||||
fun:doContent
|
||||
fun:doProlog
|
||||
fun:prologInitProcessor
|
||||
fun:XML_ParseBuffer
|
||||
obj:*/libfontconfig.so.*
|
||||
}
|
||||
@@ -646,6 +642,13 @@
|
||||
...
|
||||
fun:_ZN4node9inspector5Agent5StartEPNS_12NodePlatformEPKcRKNS_12DebugOptionsE
|
||||
}
|
||||
{
|
||||
leak_nodejs_node9inspector5Agent5StartERKSsRKNS
|
||||
Memcheck:Leak
|
||||
match-leak-kinds: possible
|
||||
...
|
||||
fun:_ZN4node9inspector5Agent5StartERKSsRKNS_12DebugOptionsESt10shared_ptrINS_15ExclusiveAccessINS_8HostPortENS_9MutexBaseINS_16LibuvMutexTraitsEEEEEEb
|
||||
}
|
||||
{
|
||||
leak_nodejs_node12NodePlatform_TracingController
|
||||
Memcheck:Leak
|
||||
|
||||
@@ -155,6 +155,15 @@ describe('Affine transform', () => {
|
||||
fixtures.assertSimilar(fixtures.expected('affine-background-all-offsets-expected.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Animated image rejects', () =>
|
||||
assert.rejects(() => sharp(fixtures.inputGifAnimated, { animated: true })
|
||||
.affine([1, 1, 1, 1])
|
||||
.toBuffer(),
|
||||
/Affine is not supported for multi-page images/
|
||||
)
|
||||
);
|
||||
|
||||
describe('Interpolations', () => {
|
||||
const input = fixtures.inputJpg320x240;
|
||||
const inputWidth = 320;
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
const assert = require('assert');
|
||||
|
||||
const sharp = require('../../');
|
||||
const { inputAvif, inputJpg } = require('../fixtures');
|
||||
const { inputAvif, inputJpg, inputGifAnimated } = require('../fixtures');
|
||||
|
||||
describe('AVIF', () => {
|
||||
it('called without options does not throw an error', () => {
|
||||
@@ -17,10 +17,9 @@ describe('AVIF', () => {
|
||||
.resize(32)
|
||||
.jpeg()
|
||||
.toBuffer();
|
||||
const metadata = await sharp(data)
|
||||
const { size, ...metadata } = await sharp(data)
|
||||
.metadata();
|
||||
const { compression, size, ...metadataWithoutSize } = metadata;
|
||||
assert.deepStrictEqual(metadataWithoutSize, {
|
||||
assert.deepStrictEqual(metadata, {
|
||||
channels: 3,
|
||||
chromaSubsampling: '4:2:0',
|
||||
density: 72,
|
||||
@@ -28,7 +27,7 @@ describe('AVIF', () => {
|
||||
format: 'jpeg',
|
||||
hasAlpha: false,
|
||||
hasProfile: false,
|
||||
height: 13,
|
||||
height: 14,
|
||||
isProgressive: false,
|
||||
space: 'srgb',
|
||||
width: 32
|
||||
@@ -38,20 +37,19 @@ describe('AVIF', () => {
|
||||
it('can convert JPEG to AVIF', async () => {
|
||||
const data = await sharp(inputJpg)
|
||||
.resize(32)
|
||||
.avif()
|
||||
.avif({ effort: 0 })
|
||||
.toBuffer();
|
||||
const metadata = await sharp(data)
|
||||
const { size, ...metadata } = await sharp(data)
|
||||
.metadata();
|
||||
const { compression, size, ...metadataWithoutSize } = metadata;
|
||||
assert.deepStrictEqual(metadataWithoutSize, {
|
||||
assert.deepStrictEqual(metadata, {
|
||||
channels: 3,
|
||||
compression: 'av1',
|
||||
depth: 'uchar',
|
||||
format: 'heif',
|
||||
hasAlpha: false,
|
||||
hasProfile: false,
|
||||
height: 26,
|
||||
isProgressive: false,
|
||||
pageHeight: 26,
|
||||
pagePrimary: 0,
|
||||
pages: 1,
|
||||
space: 'srgb',
|
||||
@@ -63,22 +61,44 @@ describe('AVIF', () => {
|
||||
const data = await sharp(inputAvif)
|
||||
.resize(32)
|
||||
.toBuffer();
|
||||
const metadata = await sharp(data)
|
||||
const { size, ...metadata } = await sharp(data)
|
||||
.metadata();
|
||||
const { compression, size, ...metadataWithoutSize } = metadata;
|
||||
assert.deepStrictEqual(metadataWithoutSize, {
|
||||
assert.deepStrictEqual(metadata, {
|
||||
channels: 3,
|
||||
compression: 'av1',
|
||||
depth: 'uchar',
|
||||
format: 'heif',
|
||||
hasAlpha: false,
|
||||
hasProfile: false,
|
||||
height: 12,
|
||||
height: 14,
|
||||
isProgressive: false,
|
||||
pageHeight: 12,
|
||||
pagePrimary: 0,
|
||||
pages: 1,
|
||||
space: 'srgb',
|
||||
width: 32
|
||||
});
|
||||
});
|
||||
|
||||
it('can convert animated GIF to non-animated AVIF', async () => {
|
||||
const data = await sharp(inputGifAnimated, { animated: true })
|
||||
.resize(10)
|
||||
.avif({ effort: 0 })
|
||||
.toBuffer();
|
||||
const { size, ...metadata } = await sharp(data)
|
||||
.metadata();
|
||||
assert.deepStrictEqual(metadata, {
|
||||
channels: 4,
|
||||
compression: 'av1',
|
||||
depth: 'uchar',
|
||||
format: 'heif',
|
||||
hasAlpha: true,
|
||||
hasProfile: false,
|
||||
height: 300,
|
||||
isProgressive: false,
|
||||
pagePrimary: 0,
|
||||
pages: 1,
|
||||
space: 'srgb',
|
||||
width: 10
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,10 +3,11 @@
|
||||
const detectLibc = require('detect-libc');
|
||||
const sharp = require('../../');
|
||||
|
||||
const usingCache = detectLibc.family !== detectLibc.MUSL;
|
||||
const libcFamily = detectLibc.familySync();
|
||||
const usingCache = libcFamily !== detectLibc.MUSL;
|
||||
const usingSimd = !process.env.G_DEBUG;
|
||||
const concurrency =
|
||||
detectLibc.family === detectLibc.MUSL || process.arch === 'arm'
|
||||
libcFamily === detectLibc.MUSL || process.arch === 'arm'
|
||||
? 1
|
||||
: undefined;
|
||||
|
||||
|
||||
@@ -105,6 +105,25 @@ describe('Colour space conversion', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Convert P3 to sRGB', async () => {
|
||||
const [r, g, b] = await sharp(fixtures.inputPngP3)
|
||||
.raw()
|
||||
.toBuffer();
|
||||
assert.strictEqual(r, 255);
|
||||
assert.strictEqual(g, 0);
|
||||
assert.strictEqual(b, 0);
|
||||
});
|
||||
|
||||
it('Passthrough P3', async () => {
|
||||
const [r, g, b] = await sharp(fixtures.inputPngP3)
|
||||
.withMetadata({ icc: 'p3' })
|
||||
.raw()
|
||||
.toBuffer();
|
||||
assert.strictEqual(r, 234);
|
||||
assert.strictEqual(g, 51);
|
||||
assert.strictEqual(b, 34);
|
||||
});
|
||||
|
||||
it('Invalid pipelineColourspace input', function () {
|
||||
assert.throws(function () {
|
||||
sharp(fixtures.inputJpg)
|
||||
|
||||
@@ -247,7 +247,7 @@ describe('composite', () => {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(300, 300)
|
||||
.composite([{
|
||||
input: Buffer.from('<svg><rect x="0" y="0" width="200" height="200" rx="50" ry="50"/></svg>'),
|
||||
input: Buffer.from('<svg width="200" height="200"><rect x="0" y="0" width="200" height="200" rx="50" ry="50"/></svg>'),
|
||||
density: 96,
|
||||
blend: 'dest-in',
|
||||
cutout: true
|
||||
|
||||
@@ -6,16 +6,30 @@ const sharp = require('../../');
|
||||
const fixtures = require('../fixtures');
|
||||
|
||||
describe('Extend', function () {
|
||||
it('extend all sides equally via a single value', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(120)
|
||||
.extend(10)
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(140, info.width);
|
||||
assert.strictEqual(118, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('extend-equal-single.jpg'), data, done);
|
||||
});
|
||||
describe('extend all sides equally via a single value', function () {
|
||||
it('JPEG', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(120)
|
||||
.extend(10)
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(140, info.width);
|
||||
assert.strictEqual(118, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('extend-equal-single.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Animated WebP', function (done) {
|
||||
sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.resize(120)
|
||||
.extend(10)
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(140, info.width);
|
||||
assert.strictEqual(140 * 9, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('extend-equal-single.webp'), data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('extend all sides equally with RGB', function (done) {
|
||||
@@ -124,4 +138,30 @@ describe('Extend', function () {
|
||||
fixtures.assertSimilar(fixtures.expected('extend-2channel.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Premultiply background when compositing', async () => {
|
||||
const background = '#bf1942cc';
|
||||
const data = await sharp({
|
||||
create: {
|
||||
width: 1, height: 1, channels: 4, background: '#fff0'
|
||||
}
|
||||
})
|
||||
.composite([{
|
||||
input: {
|
||||
create: {
|
||||
width: 1, height: 1, channels: 4, background
|
||||
}
|
||||
}
|
||||
}])
|
||||
.extend({
|
||||
left: 1, background
|
||||
})
|
||||
.raw()
|
||||
.toBuffer();
|
||||
const [r1, g1, b1, a1, r2, g2, b2, a2] = data;
|
||||
assert.strictEqual(true, Math.abs(r2 - r1) < 2);
|
||||
assert.strictEqual(true, Math.abs(g2 - g1) < 2);
|
||||
assert.strictEqual(true, Math.abs(b2 - b1) < 2);
|
||||
assert.strictEqual(true, Math.abs(a2 - a1) < 2);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -39,10 +39,35 @@ describe('Partial image extraction', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Animated WebP', function () {
|
||||
it('Before resize', function (done) {
|
||||
sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.extract({ left: 0, top: 30, width: 80, height: 20 })
|
||||
.resize(320, 80)
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(80 * 9, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('gravity-center-height.webp'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('After resize', function (done) {
|
||||
sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.resize(320, 320)
|
||||
.extract({ left: 0, top: 120, width: 320, height: 80 })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(80 * 9, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('gravity-center-height.webp'), data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('TIFF', function (done) {
|
||||
sharp(fixtures.inputTiff)
|
||||
.extract({ left: 34, top: 63, width: 341, height: 529 })
|
||||
.jpeg()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(341, info.width);
|
||||
@@ -143,6 +168,16 @@ describe('Partial image extraction', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Rotate with EXIF mirroring then extract', function (done) {
|
||||
sharp(fixtures.inputJpgWithLandscapeExif7)
|
||||
.rotate()
|
||||
.extract({ left: 0, top: 208, width: 60, height: 40 })
|
||||
.toBuffer(function (err, data) {
|
||||
if (err) throw err;
|
||||
fixtures.assertSimilar(fixtures.expected('rotate-mirror-extract.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid parameters', function () {
|
||||
describe('using the legacy extract(top,left,width,height) syntax', function () {
|
||||
it('String top', function () {
|
||||
|
||||
111
test/unit/gif.js
@@ -42,7 +42,7 @@ describe('GIF input', () => {
|
||||
.then(({ data, info }) => {
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual(data.length, info.size);
|
||||
assert.strictEqual(sharp.format.magick.input.buffer ? 'gif' : 'png', info.format);
|
||||
assert.strictEqual('gif', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(80, info.height);
|
||||
assert.strictEqual(4, info.channels);
|
||||
@@ -55,40 +55,35 @@ describe('GIF input', () => {
|
||||
.then(({ data, info }) => {
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual(data.length, info.size);
|
||||
assert.strictEqual(sharp.format.magick.input.buffer ? 'gif' : 'png', info.format);
|
||||
assert.strictEqual('gif', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(2400, info.height);
|
||||
assert.strictEqual(4, info.channels);
|
||||
})
|
||||
);
|
||||
|
||||
if (!sharp.format.magick.output.buffer) {
|
||||
it('GIF buffer output should fail due to missing ImageMagick', () => {
|
||||
assert.throws(
|
||||
() => sharp().gif(),
|
||||
/GIF output requires libvips with support for ImageMagick/
|
||||
);
|
||||
});
|
||||
it('GIF with reduced colours, no dither, low effort reduces file size', async () => {
|
||||
const original = await sharp(fixtures.inputJpg)
|
||||
.resize(120, 80)
|
||||
.gif()
|
||||
.toBuffer();
|
||||
|
||||
it('GIF file output should fail due to missing ImageMagick', () => {
|
||||
assert.rejects(
|
||||
async () => await sharp().toFile('test.gif'),
|
||||
/GIF output requires libvips with support for ImageMagick/
|
||||
);
|
||||
});
|
||||
}
|
||||
const reduced = await sharp(fixtures.inputJpg)
|
||||
.resize(120, 80)
|
||||
.gif({
|
||||
colours: 128,
|
||||
dither: 0,
|
||||
effort: 1
|
||||
})
|
||||
.toBuffer();
|
||||
|
||||
it('invalid pageHeight throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().gif({ pageHeight: 0 });
|
||||
});
|
||||
assert.strictEqual(true, reduced.length < original.length);
|
||||
});
|
||||
|
||||
it('invalid loop throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().gif({ loop: -1 });
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
sharp().gif({ loop: 65536 });
|
||||
});
|
||||
@@ -96,43 +91,61 @@ describe('GIF input', () => {
|
||||
|
||||
it('invalid delay throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().gif({ delay: [-1] });
|
||||
sharp().gif({ delay: -1 });
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
sharp().gif({ delay: [65536] });
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid colour throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().gif({ colours: 1 });
|
||||
});
|
||||
assert.throws(() => {
|
||||
sharp().gif({ colours: 'fail' });
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid effort throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().gif({ effort: 0 });
|
||||
});
|
||||
assert.throws(() => {
|
||||
sharp().gif({ effort: 'fail' });
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid dither throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().gif({ dither: 1.1 });
|
||||
});
|
||||
assert.throws(() => {
|
||||
sharp().gif({ effort: 'fail' });
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with streams when only animated is set', function (done) {
|
||||
if (sharp.format.magick.output.buffer) {
|
||||
fs.createReadStream(fixtures.inputGifAnimated)
|
||||
.pipe(sharp({ animated: true }))
|
||||
.gif()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('gif', info.format);
|
||||
fixtures.assertSimilar(fixtures.inputGifAnimated, data, done);
|
||||
});
|
||||
} else {
|
||||
done();
|
||||
}
|
||||
fs.createReadStream(fixtures.inputGifAnimated)
|
||||
.pipe(sharp({ animated: true }))
|
||||
.gif()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('gif', info.format);
|
||||
fixtures.assertSimilar(fixtures.inputGifAnimated, data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with streams when only pages is set', function (done) {
|
||||
if (sharp.format.magick.output.buffer) {
|
||||
fs.createReadStream(fixtures.inputGifAnimated)
|
||||
.pipe(sharp({ pages: -1 }))
|
||||
.gif()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('gif', info.format);
|
||||
fixtures.assertSimilar(fixtures.inputGifAnimated, data, done);
|
||||
});
|
||||
} else {
|
||||
done();
|
||||
}
|
||||
fs.createReadStream(fixtures.inputGifAnimated)
|
||||
.pipe(sharp({ pages: -1 }))
|
||||
.gif()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('gif', info.format);
|
||||
fixtures.assertSimilar(fixtures.inputGifAnimated, data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -50,6 +50,21 @@ describe('HEIF', () => {
|
||||
sharp().heif({ compression: 1 });
|
||||
});
|
||||
});
|
||||
it('valid effort does not throw an error', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
sharp().heif({ speed: 6 });
|
||||
});
|
||||
});
|
||||
it('out of range effort should throw an error', () => {
|
||||
assert.throws(() => {
|
||||
sharp().heif({ effort: 10 });
|
||||
});
|
||||
});
|
||||
it('invalid effort should throw an error', () => {
|
||||
assert.throws(() => {
|
||||
sharp().heif({ effort: 'fail' });
|
||||
});
|
||||
});
|
||||
it('valid speed does not throw an error', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
sharp().heif({ speed: 6 });
|
||||
@@ -57,12 +72,12 @@ describe('HEIF', () => {
|
||||
});
|
||||
it('out of range speed should throw an error', () => {
|
||||
assert.throws(() => {
|
||||
sharp().heif({ speed: 9 });
|
||||
sharp().heif({ speed: 10 });
|
||||
});
|
||||
});
|
||||
it('invalid speed should throw an error', () => {
|
||||
assert.throws(() => {
|
||||
sharp().heif({ compression: 'fail' });
|
||||
sharp().heif({ speed: 'fail' });
|
||||
});
|
||||
});
|
||||
it('invalid chromaSubsampling should throw an error', () => {
|
||||
|
||||
116
test/unit/io.js
@@ -1,6 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const assert = require('assert');
|
||||
const rimraf = require('rimraf');
|
||||
|
||||
@@ -200,6 +201,21 @@ describe('Input/output', function () {
|
||||
readable.pipe(pipeline).pipe(writable);
|
||||
});
|
||||
|
||||
it('Stream should emit close event', function (done) {
|
||||
const readable = fs.createReadStream(fixtures.inputJpg);
|
||||
const writable = fs.createWriteStream(outputJpg);
|
||||
const pipeline = sharp().resize(320, 240);
|
||||
let closeEventEmitted = false;
|
||||
pipeline.on('close', function () {
|
||||
closeEventEmitted = true;
|
||||
});
|
||||
writable.on('close', function () {
|
||||
assert.strictEqual(true, closeEventEmitted);
|
||||
rimraf(outputJpg, done);
|
||||
});
|
||||
readable.pipe(pipeline).pipe(writable);
|
||||
});
|
||||
|
||||
it('Handle Stream to Stream error ', function (done) {
|
||||
const pipeline = sharp().resize(320, 240);
|
||||
let anErrorWasEmitted = false;
|
||||
@@ -297,6 +313,21 @@ describe('Input/output', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Support output to tif format', function (done) {
|
||||
sharp(fixtures.inputTiff)
|
||||
.resize(320, 240)
|
||||
.toFormat('tif')
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual(data.length, info.size);
|
||||
assert.strictEqual('tiff', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Fail when output File is input File', function (done) {
|
||||
sharp(fixtures.inputJpg).toFile(fixtures.inputJpg, function (err) {
|
||||
assert(err instanceof Error);
|
||||
@@ -316,6 +347,48 @@ describe('Input/output', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Fail when output File is input File (relative output, absolute input)', function (done) {
|
||||
const relativePath = path.relative(process.cwd(), fixtures.inputJpg);
|
||||
sharp(fixtures.inputJpg).toFile(relativePath, function (err) {
|
||||
assert(err instanceof Error);
|
||||
assert.strictEqual('Cannot use same file for input and output', err.message);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Fail when output File is input File via Promise (relative output, absolute input)', function (done) {
|
||||
const relativePath = path.relative(process.cwd(), fixtures.inputJpg);
|
||||
sharp(fixtures.inputJpg).toFile(relativePath).then(function (data) {
|
||||
assert(false);
|
||||
done();
|
||||
}).catch(function (err) {
|
||||
assert(err instanceof Error);
|
||||
assert.strictEqual('Cannot use same file for input and output', err.message);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Fail when output File is input File (relative input, absolute output)', function (done) {
|
||||
const relativePath = path.relative(process.cwd(), fixtures.inputJpg);
|
||||
sharp(relativePath).toFile(fixtures.inputJpg, function (err) {
|
||||
assert(err instanceof Error);
|
||||
assert.strictEqual('Cannot use same file for input and output', err.message);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Fail when output File is input File via Promise (relative input, absolute output)', function (done) {
|
||||
const relativePath = path.relative(process.cwd(), fixtures.inputJpg);
|
||||
sharp(relativePath).toFile(fixtures.inputJpg).then(function (data) {
|
||||
assert(false);
|
||||
done();
|
||||
}).catch(function (err) {
|
||||
assert(err instanceof Error);
|
||||
assert.strictEqual('Cannot use same file for input and output', err.message);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Fail when output File is empty', function (done) {
|
||||
sharp(fixtures.inputJpg).toFile('', function (err) {
|
||||
assert(err instanceof Error);
|
||||
@@ -488,19 +561,6 @@ describe('Input/output', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Autoconvert GIF input to PNG output', function (done) {
|
||||
sharp(fixtures.inputGif)
|
||||
.resize(320, 80)
|
||||
.toFile(outputZoinks, function (err, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, info.size > 0);
|
||||
assert.strictEqual(sharp.format.magick.input.buffer ? 'gif' : 'png', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(80, info.height);
|
||||
rimraf(outputZoinks, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Force JPEG format for PNG input', function (done) {
|
||||
sharp(fixtures.inputPng)
|
||||
.resize(320, 80)
|
||||
@@ -588,6 +648,19 @@ describe('Input/output', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Switch off safety limits for PNG/SVG input', () => {
|
||||
it('Valid', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
sharp({ unlimited: true });
|
||||
});
|
||||
});
|
||||
it('Invalid', () => {
|
||||
assert.throws(() => {
|
||||
sharp({ unlimited: -1 });
|
||||
}, /Expected boolean for unlimited but received -1 of type number/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Limit pixel count of input image', () => {
|
||||
it('Invalid fails - negative', () => {
|
||||
assert.throws(() => {
|
||||
@@ -611,7 +684,9 @@ describe('Input/output', function () {
|
||||
sharp(fixtures.inputJpg)
|
||||
.metadata()
|
||||
.then(({ width, height }) =>
|
||||
sharp(fixtures.inputJpg, { limitInputPixels: width * height }).toBuffer()
|
||||
sharp(fixtures.inputJpg, { limitInputPixels: width * height })
|
||||
.resize(2)
|
||||
.toBuffer()
|
||||
)
|
||||
);
|
||||
|
||||
@@ -722,6 +797,19 @@ describe('Input/output', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Fails when writing to missing directory', async () => {
|
||||
const create = {
|
||||
width: 8,
|
||||
height: 8,
|
||||
channels: 3,
|
||||
background: { r: 0, g: 0, b: 0 }
|
||||
};
|
||||
await assert.rejects(
|
||||
() => sharp({ create }).toFile('does-not-exist/out.jpg'),
|
||||
/unable to open for write/
|
||||
);
|
||||
});
|
||||
|
||||
describe('create new image', function () {
|
||||
it('RGB', function (done) {
|
||||
const create = {
|
||||
|
||||
99
test/unit/jp2.js
Normal file
@@ -0,0 +1,99 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const assert = require('assert');
|
||||
|
||||
const sharp = require('../../');
|
||||
const fixtures = require('../fixtures');
|
||||
|
||||
describe('JP2 output', () => {
|
||||
if (!sharp.format.jp2k.input.buffer) {
|
||||
it('JP2 output should fail due to missing OpenJPEG', () => {
|
||||
assert.rejects(() =>
|
||||
sharp(fixtures.inputJpg)
|
||||
.jp2()
|
||||
.toBuffer(),
|
||||
/JP2 output requires libvips with support for OpenJPEG/
|
||||
);
|
||||
});
|
||||
|
||||
it('JP2 file output should fail due to missing OpenJPEG', () => {
|
||||
assert.rejects(async () => await sharp().toFile('test.jp2'),
|
||||
/JP2 output requires libvips with support for OpenJPEG/
|
||||
);
|
||||
});
|
||||
} else {
|
||||
it('JP2 Buffer to PNG Buffer', () => {
|
||||
sharp(fs.readFileSync(fixtures.inputJp2))
|
||||
.resize(8, 15)
|
||||
.png()
|
||||
.toBuffer({ resolveWithObject: true })
|
||||
.then(({ data, info }) => {
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual(data.length, info.size);
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(8, info.width);
|
||||
assert.strictEqual(15, info.height);
|
||||
assert.strictEqual(4, info.channels);
|
||||
});
|
||||
});
|
||||
|
||||
it('JP2 quality', function (done) {
|
||||
sharp(fixtures.inputJp2)
|
||||
.resize(320, 240)
|
||||
.jp2({ quality: 70 })
|
||||
.toBuffer(function (err, buffer70) {
|
||||
if (err) throw err;
|
||||
sharp(fixtures.inputJp2)
|
||||
.resize(320, 240)
|
||||
.toBuffer(function (err, buffer80) {
|
||||
if (err) throw err;
|
||||
sharp(fixtures.inputJp2)
|
||||
.resize(320, 240)
|
||||
.jp2({ quality: 90 })
|
||||
.toBuffer(function (err, buffer90) {
|
||||
if (err) throw err;
|
||||
assert(buffer70.length < buffer80.length);
|
||||
assert(buffer80.length < buffer90.length);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Without chroma subsampling generates larger file', function (done) {
|
||||
// First generate with chroma subsampling (default)
|
||||
sharp(fixtures.inputJp2)
|
||||
.resize(320, 240)
|
||||
.jp2({ chromaSubsampling: '4:2:0' })
|
||||
.toBuffer(function (err, withChromaSubsamplingData, withChromaSubsamplingInfo) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, withChromaSubsamplingData.length > 0);
|
||||
assert.strictEqual(withChromaSubsamplingData.length, withChromaSubsamplingInfo.size);
|
||||
assert.strictEqual('jp2', withChromaSubsamplingInfo.format);
|
||||
assert.strictEqual(320, withChromaSubsamplingInfo.width);
|
||||
assert.strictEqual(240, withChromaSubsamplingInfo.height);
|
||||
// Then generate without
|
||||
sharp(fixtures.inputJp2)
|
||||
.resize(320, 240)
|
||||
.jp2({ chromaSubsampling: '4:4:4' })
|
||||
.toBuffer(function (err, withoutChromaSubsamplingData, withoutChromaSubsamplingInfo) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, withoutChromaSubsamplingData.length > 0);
|
||||
assert.strictEqual(withoutChromaSubsamplingData.length, withoutChromaSubsamplingInfo.size);
|
||||
assert.strictEqual('jp2', withoutChromaSubsamplingInfo.format);
|
||||
assert.strictEqual(320, withoutChromaSubsamplingInfo.width);
|
||||
assert.strictEqual(240, withoutChromaSubsamplingInfo.height);
|
||||
assert.strictEqual(true, withChromaSubsamplingData.length <= withoutChromaSubsamplingData.length);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Invalid JP2 chromaSubsampling value throws error', function () {
|
||||
assert.throws(function () {
|
||||
sharp().jpeg({ chromaSubsampling: '4:2:2' });
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -76,6 +76,27 @@ describe('libvips binaries', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('integrity', function () {
|
||||
it('reads value from environment variable', function () {
|
||||
const prev = process.env.npm_package_config_integrity_platform_arch;
|
||||
process.env.npm_package_config_integrity_platform_arch = 'sha512-test';
|
||||
|
||||
const integrity = libvips.integrity('platform-arch');
|
||||
assert.strictEqual('sha512-test', integrity);
|
||||
|
||||
process.env.npm_package_config_integrity_platform_arch = prev;
|
||||
});
|
||||
it('reads value from package.json', function () {
|
||||
const prev = process.env.npm_package_config_integrity_linux_x64;
|
||||
delete process.env.npm_package_config_integrity_linux_x64;
|
||||
|
||||
const integrity = libvips.integrity('linux-x64');
|
||||
assert.strictEqual('sha512-', integrity.substr(0, 7));
|
||||
|
||||
process.env.npm_package_config_integrity_linux_x64 = prev;
|
||||
});
|
||||
});
|
||||
|
||||
describe('safe directory creation', function () {
|
||||
before(function () {
|
||||
mockFS({
|
||||
|
||||
@@ -5,68 +5,46 @@ const assert = require('assert');
|
||||
const sharp = require('../../');
|
||||
const fixtures = require('../fixtures');
|
||||
|
||||
describe('Median filter', function () {
|
||||
it('1x1 window', function (done) {
|
||||
sharp(fixtures.inputJpgThRandom)
|
||||
describe('Median filter', () => {
|
||||
it('1x1 window', async () => {
|
||||
const [r, g, b] = await sharp(fixtures.inputSvgSmallViewBox)
|
||||
.median(1)
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(200, info.width);
|
||||
assert.strictEqual(200, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('median_1.jpg'), data, done);
|
||||
});
|
||||
.raw()
|
||||
.toBuffer();
|
||||
|
||||
assert.deepStrictEqual({ r: 0, g: 0, b: 0 }, { r, g, b });
|
||||
});
|
||||
|
||||
it('3x3 window', function (done) {
|
||||
sharp(fixtures.inputJpgThRandom)
|
||||
it('3x3 window', async () => {
|
||||
const [r, g, b] = await sharp(fixtures.inputSvgSmallViewBox)
|
||||
.median(3)
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(200, info.width);
|
||||
assert.strictEqual(200, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('median_3.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
it('5x5 window', function (done) {
|
||||
sharp(fixtures.inputJpgThRandom)
|
||||
.median(5)
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(200, info.width);
|
||||
assert.strictEqual(200, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('median_5.jpg'), data, done);
|
||||
});
|
||||
.raw()
|
||||
.toBuffer();
|
||||
|
||||
assert.deepStrictEqual({ r: 255, g: 0, b: 127 }, { r, g, b });
|
||||
});
|
||||
|
||||
it('color image', function (done) {
|
||||
sharp(fixtures.inputJpgRandom)
|
||||
.median(5)
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(200, info.width);
|
||||
assert.strictEqual(200, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('median_color.jpg'), data, done);
|
||||
});
|
||||
it('7x7 window', async () => {
|
||||
const [r, g, b] = await sharp(fixtures.inputSvgSmallViewBox)
|
||||
.median(7)
|
||||
.raw()
|
||||
.toBuffer();
|
||||
|
||||
assert.deepStrictEqual({ r: 255, g: 19, b: 146 }, { r, g, b });
|
||||
});
|
||||
|
||||
it('no windows size', function (done) {
|
||||
sharp(fixtures.inputJpgThRandom)
|
||||
it('default window (3x3)', async () => {
|
||||
const [r, g, b] = await sharp(fixtures.inputSvgSmallViewBox)
|
||||
.median()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(200, info.width);
|
||||
assert.strictEqual(200, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('median_3.jpg'), data, done);
|
||||
});
|
||||
.raw()
|
||||
.toBuffer();
|
||||
|
||||
assert.deepStrictEqual({ r: 255, g: 0, b: 127 }, { r, g, b });
|
||||
});
|
||||
it('invalid radius', function () {
|
||||
assert.throws(function () {
|
||||
sharp(fixtures.inputJpg).median(0.1);
|
||||
|
||||
it('invalid radius', () => {
|
||||
assert.throws(() => {
|
||||
sharp().median(0.1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -99,6 +99,7 @@ describe('Image metadata', function () {
|
||||
assert.strictEqual(1, metadata.orientation);
|
||||
assert.strictEqual('undefined', typeof metadata.exif);
|
||||
assert.strictEqual('undefined', typeof metadata.icc);
|
||||
assert.strictEqual('inch', metadata.resolutionUnit);
|
||||
done();
|
||||
});
|
||||
});
|
||||
@@ -194,6 +195,29 @@ describe('Image metadata', function () {
|
||||
|
||||
it('Animated WebP', () =>
|
||||
sharp(fixtures.inputWebPAnimated)
|
||||
.metadata()
|
||||
.then(({
|
||||
format, width, height, space, channels, depth,
|
||||
isProgressive, pages, loop, delay, hasProfile,
|
||||
hasAlpha
|
||||
}) => {
|
||||
assert.strictEqual(format, 'webp');
|
||||
assert.strictEqual(width, 80);
|
||||
assert.strictEqual(height, 80);
|
||||
assert.strictEqual(space, 'srgb');
|
||||
assert.strictEqual(channels, 4);
|
||||
assert.strictEqual(depth, 'uchar');
|
||||
assert.strictEqual(isProgressive, false);
|
||||
assert.strictEqual(pages, 9);
|
||||
assert.strictEqual(loop, 0);
|
||||
assert.deepStrictEqual(delay, [120, 120, 90, 120, 120, 90, 120, 90, 30]);
|
||||
assert.strictEqual(hasProfile, false);
|
||||
assert.strictEqual(hasAlpha, true);
|
||||
})
|
||||
);
|
||||
|
||||
it('Animated WebP with all pages', () =>
|
||||
sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.metadata()
|
||||
.then(({
|
||||
format, width, height, space, channels, depth,
|
||||
@@ -202,7 +226,7 @@ describe('Image metadata', function () {
|
||||
}) => {
|
||||
assert.strictEqual(format, 'webp');
|
||||
assert.strictEqual(width, 80);
|
||||
assert.strictEqual(height, 80);
|
||||
assert.strictEqual(height, 720);
|
||||
assert.strictEqual(space, 'srgb');
|
||||
assert.strictEqual(channels, 4);
|
||||
assert.strictEqual(depth, 'uchar');
|
||||
@@ -221,8 +245,8 @@ describe('Image metadata', function () {
|
||||
.metadata()
|
||||
.then(({
|
||||
format, width, height, space, channels, depth,
|
||||
isProgressive, pages, pageHeight, loop, delay,
|
||||
hasProfile, hasAlpha
|
||||
isProgressive, pages, loop, delay, hasProfile,
|
||||
hasAlpha
|
||||
}) => {
|
||||
assert.strictEqual(format, 'webp');
|
||||
assert.strictEqual(width, 370);
|
||||
@@ -232,7 +256,6 @@ describe('Image metadata', function () {
|
||||
assert.strictEqual(depth, 'uchar');
|
||||
assert.strictEqual(isProgressive, false);
|
||||
assert.strictEqual(pages, 10);
|
||||
assert.strictEqual(pageHeight, 285);
|
||||
assert.strictEqual(loop, 3);
|
||||
assert.deepStrictEqual(delay, [...Array(9).fill(3000), 15000]);
|
||||
assert.strictEqual(hasProfile, false);
|
||||
@@ -285,8 +308,8 @@ describe('Image metadata', function () {
|
||||
.metadata()
|
||||
.then(({
|
||||
format, width, height, space, channels, depth,
|
||||
isProgressive, pages, pageHeight, loop, delay,
|
||||
background, hasProfile, hasAlpha
|
||||
isProgressive, pages, loop, delay, background,
|
||||
hasProfile, hasAlpha
|
||||
}) => {
|
||||
assert.strictEqual(format, 'gif');
|
||||
assert.strictEqual(width, 80);
|
||||
@@ -296,7 +319,6 @@ describe('Image metadata', function () {
|
||||
assert.strictEqual(depth, 'uchar');
|
||||
assert.strictEqual(isProgressive, false);
|
||||
assert.strictEqual(pages, 30);
|
||||
assert.strictEqual(pageHeight, 80);
|
||||
assert.strictEqual(loop, 0);
|
||||
assert.deepStrictEqual(delay, Array(30).fill(30));
|
||||
assert.deepStrictEqual(background, { r: 0, g: 0, b: 0 });
|
||||
@@ -310,8 +332,8 @@ describe('Image metadata', function () {
|
||||
.metadata()
|
||||
.then(({
|
||||
format, width, height, space, channels, depth,
|
||||
isProgressive, pages, pageHeight, loop, delay,
|
||||
hasProfile, hasAlpha
|
||||
isProgressive, pages, loop, delay, hasProfile,
|
||||
hasAlpha
|
||||
}) => {
|
||||
assert.strictEqual(format, 'gif');
|
||||
assert.strictEqual(width, 370);
|
||||
@@ -321,7 +343,6 @@ describe('Image metadata', function () {
|
||||
assert.strictEqual(depth, 'uchar');
|
||||
assert.strictEqual(isProgressive, false);
|
||||
assert.strictEqual(pages, 10);
|
||||
assert.strictEqual(pageHeight, 285);
|
||||
assert.strictEqual(loop, 2);
|
||||
assert.deepStrictEqual(delay, [...Array(9).fill(3000), 15000]);
|
||||
assert.strictEqual(hasProfile, false);
|
||||
@@ -522,7 +543,7 @@ describe('Image metadata', function () {
|
||||
assert.strictEqual('Relative', profile.intent);
|
||||
assert.strictEqual('Printer', profile.deviceClass);
|
||||
});
|
||||
fixtures.assertSimilar(output, fixtures.path('expected/icc-cmyk.jpg'), { threshold: 0 }, done);
|
||||
fixtures.assertSimilar(output, fixtures.expected('icc-cmyk.jpg'), { threshold: 0 }, done);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -533,7 +554,7 @@ describe('Image metadata', function () {
|
||||
.withMetadata({ icc: fixtures.path('hilutite.icm') })
|
||||
.toFile(output, function (err, info) {
|
||||
if (err) throw err;
|
||||
fixtures.assertMaxColourDistance(output, fixtures.path('expected/hilutite.jpg'), 9);
|
||||
fixtures.assertMaxColourDistance(output, fixtures.expected('hilutite.jpg'), 9);
|
||||
done();
|
||||
});
|
||||
});
|
||||
@@ -737,7 +758,6 @@ describe('Image metadata', function () {
|
||||
depth: 'uchar',
|
||||
isProgressive: false,
|
||||
pages: 1,
|
||||
pageHeight: 858,
|
||||
pagePrimary: 0,
|
||||
compression: 'av1',
|
||||
hasProfile: false,
|
||||
|
||||
@@ -18,7 +18,9 @@ describe('Modulate', function () {
|
||||
{ saturation: null },
|
||||
{ hue: '50deg' },
|
||||
{ hue: 1.5 },
|
||||
{ hue: null }
|
||||
{ hue: null },
|
||||
{ lightness: '+50' },
|
||||
{ lightness: null }
|
||||
].forEach(function (options) {
|
||||
it('should throw', function () {
|
||||
assert.throws(function () {
|
||||
@@ -108,6 +110,22 @@ describe('Modulate', function () {
|
||||
assert.deepStrictEqual({ r: 127, g: 83, b: 81 }, { r, g, b });
|
||||
});
|
||||
|
||||
it('should be able to lighten', async () => {
|
||||
const [r, g, b] = await sharp({
|
||||
create: {
|
||||
width: 1,
|
||||
height: 1,
|
||||
channels: 3,
|
||||
background: { r: 153, g: 68, b: 68 }
|
||||
}
|
||||
})
|
||||
.modulate({ lightness: 10 })
|
||||
.raw()
|
||||
.toBuffer();
|
||||
|
||||
assert.deepStrictEqual({ r: 182, g: 93, b: 92 }, { r, g, b });
|
||||
});
|
||||
|
||||
it('should be able to modulate all channels', async () => {
|
||||
const [r, g, b] = await sharp({
|
||||
create: {
|
||||
|
||||
@@ -112,6 +112,28 @@ describe('PNG', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('16-bit grey+alpha PNG roundtrip', async () => {
|
||||
const after = await sharp(fixtures.inputPng16BitGreyAlpha)
|
||||
.toColourspace('grey16')
|
||||
.toBuffer();
|
||||
|
||||
const [alphaMeanBefore, alphaMeanAfter] = (
|
||||
await Promise.all([
|
||||
sharp(fixtures.inputPng16BitGreyAlpha).stats(),
|
||||
sharp(after).stats()
|
||||
])
|
||||
)
|
||||
.map(stats => stats.channels[1].mean);
|
||||
|
||||
assert.strictEqual(alphaMeanAfter, alphaMeanBefore);
|
||||
});
|
||||
|
||||
it('palette decode/encode roundtrip', () =>
|
||||
sharp(fixtures.inputPngPalette)
|
||||
.png({ effort: 1, palette: true })
|
||||
.toBuffer()
|
||||
);
|
||||
|
||||
it('Valid PNG libimagequant palette value does not throw error', function () {
|
||||
assert.doesNotThrow(function () {
|
||||
sharp().png({ palette: false });
|
||||
@@ -127,8 +149,8 @@ describe('PNG', function () {
|
||||
it('Valid PNG libimagequant quality value produces image of same size or smaller', function () {
|
||||
const inputPngBuffer = fs.readFileSync(fixtures.inputPng);
|
||||
return Promise.all([
|
||||
sharp(inputPngBuffer).resize(10).png({ quality: 80 }).toBuffer(),
|
||||
sharp(inputPngBuffer).resize(10).png({ quality: 100 }).toBuffer()
|
||||
sharp(inputPngBuffer).resize(10).png({ effort: 1, quality: 80 }).toBuffer(),
|
||||
sharp(inputPngBuffer).resize(10).png({ effort: 1, quality: 100 }).toBuffer()
|
||||
]).then(function (data) {
|
||||
assert.strictEqual(true, data[0].length <= data[1].length);
|
||||
});
|
||||
@@ -140,6 +162,12 @@ describe('PNG', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Invalid effort value throws error', () => {
|
||||
assert.throws(() => {
|
||||
sharp().png({ effort: 0.1 });
|
||||
});
|
||||
});
|
||||
|
||||
it('Valid PNG libimagequant colours value produces image of same size or smaller', function () {
|
||||
const inputPngBuffer = fs.readFileSync(fixtures.inputPng);
|
||||
return Promise.all([
|
||||
|
||||
@@ -148,6 +148,42 @@ describe('Resize fit=contain', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Animated WebP', function () {
|
||||
it('Width only', function (done) {
|
||||
sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.resize(320, 240, {
|
||||
fit: 'contain',
|
||||
background: { r: 255, g: 0, b: 0 }
|
||||
})
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('webp', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240 * 9, info.height);
|
||||
assert.strictEqual(4, info.channels);
|
||||
fixtures.assertSimilar(fixtures.expected('embed-animated-width.webp'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Height only', function (done) {
|
||||
sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.resize(240, 320, {
|
||||
fit: 'contain',
|
||||
background: { r: 255, g: 0, b: 0 }
|
||||
})
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('webp', info.format);
|
||||
assert.strictEqual(240, info.width);
|
||||
assert.strictEqual(320 * 9, info.height);
|
||||
assert.strictEqual(4, info.channels);
|
||||
fixtures.assertSimilar(fixtures.expected('embed-animated-height.webp'), data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Invalid position values should fail', function () {
|
||||
[-1, 8.1, 9, 1000000, false, 'vallejo'].forEach(function (position) {
|
||||
assert.throws(function () {
|
||||
|
||||
@@ -269,6 +269,30 @@ describe('Resize fit=cover', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Animated WebP', function () {
|
||||
it('Width only', function (done) {
|
||||
sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.resize(80, 320, { fit: sharp.fit.cover })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(320 * 9, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('gravity-center-width.webp'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Height only', function (done) {
|
||||
sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.resize(320, 80, { fit: sharp.fit.cover })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(80 * 9, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('gravity-center-height.webp'), data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Entropy-based strategy', function () {
|
||||
it('JPEG', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
@@ -323,6 +347,18 @@ describe('Resize fit=cover', function () {
|
||||
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Animated image rejects', () =>
|
||||
assert.rejects(() => sharp(fixtures.inputGifAnimated, { animated: true })
|
||||
.resize({
|
||||
width: 100,
|
||||
height: 8,
|
||||
position: sharp.strategy.entropy
|
||||
})
|
||||
.toBuffer(),
|
||||
/Resize strategy is not supported for multi-page images/
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
describe('Attention strategy', function () {
|
||||
@@ -379,5 +415,17 @@ describe('Resize fit=cover', function () {
|
||||
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Animated image rejects', () =>
|
||||
assert.rejects(() => sharp(fixtures.inputGifAnimated, { animated: true })
|
||||
.resize({
|
||||
width: 100,
|
||||
height: 8,
|
||||
position: sharp.strategy.attention
|
||||
})
|
||||
.toBuffer(),
|
||||
/Resize strategy is not supported for multi-page images/
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -357,6 +357,127 @@ describe('Resize dimensions', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Do enlarge when input width is less than output width', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize({
|
||||
width: 2800,
|
||||
withoutReduction: true
|
||||
})
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(2800, info.width);
|
||||
assert.strictEqual(2225, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Do enlarge when input height is less than output height', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize({
|
||||
height: 2300,
|
||||
withoutReduction: true
|
||||
})
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(2725, info.width);
|
||||
assert.strictEqual(2300, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Do not crop when fit = cover and withoutReduction = true and width >= outputWidth, and height < outputHeight', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize({
|
||||
width: 3000,
|
||||
height: 1000,
|
||||
withoutReduction: true
|
||||
})
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3000, info.width);
|
||||
assert.strictEqual(2225, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Do not crop when fit = cover and withoutReduction = true and width < outputWidth, and height >= outputHeight', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize({
|
||||
width: 1500,
|
||||
height: 2226,
|
||||
withoutReduction: true
|
||||
})
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(2725, info.width);
|
||||
assert.strictEqual(2226, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Do enlarge when input width is less than output width', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize({
|
||||
width: 2800,
|
||||
withoutReduction: false
|
||||
})
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(2800, info.width);
|
||||
assert.strictEqual(2286, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Do not resize when both withoutEnlargement and withoutReduction are true', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 320, { fit: 'fill', withoutEnlargement: true, withoutReduction: true })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(2725, info.width);
|
||||
assert.strictEqual(2225, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Do not reduce size when fit = outside and withoutReduction are true and height > outputHeight and width > outputWidth', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 320, { fit: 'outside', withoutReduction: true })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(2725, info.width);
|
||||
assert.strictEqual(2225, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Do resize when fit = outside and withoutReduction are true and input height > height and input width > width ', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(3000, 3000, { fit: 'outside', withoutReduction: true })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3674, info.width);
|
||||
assert.strictEqual(3000, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('fit=fill, downscale width and height', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 320, { fit: 'fill' })
|
||||
@@ -605,6 +726,40 @@ describe('Resize dimensions', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Ensure embedded shortest edge (height) is at least 1 pixel', function () {
|
||||
return sharp({
|
||||
create: {
|
||||
width: 200,
|
||||
height: 1,
|
||||
channels: 3,
|
||||
background: 'red'
|
||||
}
|
||||
})
|
||||
.resize({ width: 50, height: 50, fit: sharp.fit.contain })
|
||||
.toBuffer({ resolveWithObject: true })
|
||||
.then(function (output) {
|
||||
assert.strictEqual(50, output.info.width);
|
||||
assert.strictEqual(50, output.info.height);
|
||||
});
|
||||
});
|
||||
|
||||
it('Ensure embedded shortest edge (width) is at least 1 pixel', function () {
|
||||
return sharp({
|
||||
create: {
|
||||
width: 1,
|
||||
height: 200,
|
||||
channels: 3,
|
||||
background: 'red'
|
||||
}
|
||||
})
|
||||
.resize({ width: 50, height: 50, fit: sharp.fit.contain })
|
||||
.toBuffer({ resolveWithObject: true })
|
||||
.then(function (output) {
|
||||
assert.strictEqual(50, output.info.width);
|
||||
assert.strictEqual(50, output.info.height);
|
||||
});
|
||||
});
|
||||
|
||||
it('Skip shrink-on-load where one dimension <4px', async () => {
|
||||
const jpeg = await sharp({
|
||||
create: {
|
||||
|
||||
@@ -272,6 +272,34 @@ describe('Rotation', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Animated image rotate-then-extract rejects', () =>
|
||||
assert.rejects(() => sharp(fixtures.inputGifAnimated, { animated: true })
|
||||
.rotate(1)
|
||||
.extract({
|
||||
top: 1,
|
||||
left: 1,
|
||||
width: 10,
|
||||
height: 10
|
||||
})
|
||||
.toBuffer(),
|
||||
/Rotate is not supported for multi-page images/
|
||||
)
|
||||
);
|
||||
|
||||
it('Animated image extract-then-rotate rejects', () =>
|
||||
assert.rejects(() => sharp(fixtures.inputGifAnimated, { animated: true })
|
||||
.extract({
|
||||
top: 1,
|
||||
left: 1,
|
||||
width: 10,
|
||||
height: 10
|
||||
})
|
||||
.rotate(1)
|
||||
.toBuffer(),
|
||||
/Rotate is not supported for multi-page images/
|
||||
)
|
||||
);
|
||||
|
||||
it('Flip - vertical', function (done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320)
|
||||
|
||||
@@ -74,6 +74,27 @@ describe('SVG input', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('Convert SVG to PNG utilizing scale-on-load', function (done) {
|
||||
const size = 1024;
|
||||
sharp(fixtures.inputSvgSmallViewBox)
|
||||
.resize(size)
|
||||
.toFormat('png')
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(size, info.width);
|
||||
assert.strictEqual(size, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('circle.png'), data, function (err) {
|
||||
if (err) throw err;
|
||||
sharp(data).metadata(function (err, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(72, info.density);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Convert SVG to PNG at 14.4DPI', function (done) {
|
||||
sharp(fixtures.inputSvg, { density: 14.4 })
|
||||
.toFormat('png')
|
||||
|
||||
@@ -188,6 +188,26 @@ describe('TIFF', function () {
|
||||
)
|
||||
);
|
||||
|
||||
it('TIFF imputes xres and yres from withMetadataDensity if not explicitly provided', async () => {
|
||||
const data = await sharp(fixtures.inputTiff)
|
||||
.resize(8, 8)
|
||||
.tiff()
|
||||
.withMetadata({ density: 600 })
|
||||
.toBuffer();
|
||||
const { density } = await sharp(data).metadata();
|
||||
assert.strictEqual(600, density);
|
||||
});
|
||||
|
||||
it('TIFF uses xres and yres over withMetadataDensity if explicitly provided', async () => {
|
||||
const data = await sharp(fixtures.inputTiff)
|
||||
.resize(8, 8)
|
||||
.tiff({ xres: 1000, yres: 1000 })
|
||||
.withMetadata({ density: 600 })
|
||||
.toBuffer();
|
||||
const { density } = await sharp(data).metadata();
|
||||
assert.strictEqual(25400, density);
|
||||
});
|
||||
|
||||
it('TIFF invalid xres value should throw an error', function () {
|
||||
assert.throws(function () {
|
||||
sharp().tiff({ xres: '1000.0' });
|
||||
@@ -268,6 +288,30 @@ describe('TIFF', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('TIFF resolutionUnit of inch (default)', async () => {
|
||||
const data = await sharp({ create: { width: 8, height: 8, channels: 3, background: 'red' } })
|
||||
.tiff()
|
||||
.toBuffer();
|
||||
const { resolutionUnit } = await sharp(data).metadata();
|
||||
assert.strictEqual(resolutionUnit, 'inch');
|
||||
});
|
||||
|
||||
it('TIFF resolutionUnit of inch', async () => {
|
||||
const data = await sharp({ create: { width: 8, height: 8, channels: 3, background: 'red' } })
|
||||
.tiff({ resolutionUnit: 'inch' })
|
||||
.toBuffer();
|
||||
const { resolutionUnit } = await sharp(data).metadata();
|
||||
assert.strictEqual(resolutionUnit, 'inch');
|
||||
});
|
||||
|
||||
it('TIFF resolutionUnit of cm', async () => {
|
||||
const data = await sharp({ create: { width: 8, height: 8, channels: 3, background: 'red' } })
|
||||
.tiff({ resolutionUnit: 'cm' })
|
||||
.toBuffer();
|
||||
const { resolutionUnit } = await sharp(data).metadata();
|
||||
assert.strictEqual(resolutionUnit, 'cm');
|
||||
});
|
||||
|
||||
it('TIFF deflate compression with horizontal predictor shrinks test file', function (done) {
|
||||
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
|
||||
sharp(fixtures.inputTiffUncompressed)
|
||||
@@ -363,6 +407,12 @@ describe('TIFF', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('TIFF invalid resolutionUnit option throws', function () {
|
||||
assert.throws(function () {
|
||||
sharp().tiff({ resolutionUnit: 'none' });
|
||||
});
|
||||
});
|
||||
|
||||
it('TIFF horizontal predictor does not throw error', function () {
|
||||
assert.doesNotThrow(function () {
|
||||
sharp().tiff({ predictor: 'horizontal' });
|
||||
|
||||
@@ -667,7 +667,7 @@ describe('Tile', function () {
|
||||
sharp(fixtures.inputJpg)
|
||||
.webp({
|
||||
quality: 1,
|
||||
reductionEffort: 0
|
||||
effort: 0
|
||||
})
|
||||
.tile({
|
||||
layout: 'google'
|
||||
@@ -830,7 +830,7 @@ describe('Tile', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('IIIF layout', function (done) {
|
||||
it('IIIFv2 layout', function (done) {
|
||||
const name = 'output.iiif.info';
|
||||
const directory = fixtures.path(name);
|
||||
rimraf(directory, function () {
|
||||
@@ -848,6 +848,7 @@ describe('Tile', function () {
|
||||
assert.strictEqual(3, info.channels);
|
||||
assert.strictEqual('number', typeof info.size);
|
||||
const infoJson = require(path.join(directory, 'info.json'));
|
||||
assert.strictEqual('http://iiif.io/api/image/2/context.json', infoJson['@context']);
|
||||
assert.strictEqual(`${id}/${name}`, infoJson['@id']);
|
||||
fs.stat(path.join(directory, '0,0,256,256', '256,', '0', 'default.jpg'), function (err, stat) {
|
||||
if (err) throw err;
|
||||
@@ -859,6 +860,37 @@ describe('Tile', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('IIIFv3 layout', function (done) {
|
||||
const name = 'output.iiif3.info';
|
||||
const directory = fixtures.path(name);
|
||||
rimraf(directory, function () {
|
||||
const id = 'https://sharp.test.com/iiif3';
|
||||
sharp(fixtures.inputJpg)
|
||||
.tile({
|
||||
layout: 'iiif3',
|
||||
id
|
||||
})
|
||||
.toFile(directory, function (err, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('dz', info.format);
|
||||
assert.strictEqual(2725, info.width);
|
||||
assert.strictEqual(2225, info.height);
|
||||
assert.strictEqual(3, info.channels);
|
||||
assert.strictEqual('number', typeof info.size);
|
||||
const infoJson = require(path.join(directory, 'info.json'));
|
||||
assert.strictEqual('http://iiif.io/api/image/3/context.json', infoJson['@context']);
|
||||
assert.strictEqual('ImageService3', infoJson.type);
|
||||
assert.strictEqual(`${id}/${name}`, infoJson.id);
|
||||
fs.stat(path.join(directory, '0,0,256,256', '256,256', '0', 'default.jpg'), function (err, stat) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, stat.isFile());
|
||||
assert.strictEqual(true, stat.size > 0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Write to ZIP container using file extension', function (done) {
|
||||
const container = fixtures.path('output.dz.container.zip');
|
||||
const extractTo = fixtures.path('output.dz.container');
|
||||
|
||||
26
test/unit/timeout.js
Normal file
@@ -0,0 +1,26 @@
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
const sharp = require('../../');
|
||||
const fixtures = require('../fixtures');
|
||||
|
||||
describe('Timeout', function () {
|
||||
it('Will timeout after 1s when performing slow blur operation', () => assert.rejects(
|
||||
() => sharp(fixtures.inputJpg)
|
||||
.blur(100)
|
||||
.timeout({ seconds: 1 })
|
||||
.toBuffer(),
|
||||
/timeout: [0-9]+% complete/
|
||||
));
|
||||
|
||||
it('invalid object', () => assert.throws(
|
||||
() => sharp().timeout('fail'),
|
||||
/Expected object for options but received fail of type string/
|
||||
));
|
||||
|
||||
it('invalid seconds', () => assert.throws(
|
||||
() => sharp().timeout({ seconds: 'fail' }),
|
||||
/Expected integer between 0 and 3600 for seconds but received fail of type string/
|
||||
));
|
||||
});
|
||||
@@ -120,6 +120,14 @@ describe('Trim borders', function () {
|
||||
)
|
||||
);
|
||||
|
||||
it('Animated image rejects', () =>
|
||||
assert.rejects(() => sharp(fixtures.inputGifAnimated, { animated: true })
|
||||
.trim()
|
||||
.toBuffer(),
|
||||
/Trim is not supported for multi-page images/
|
||||
)
|
||||
);
|
||||
|
||||
describe('Invalid thresholds', function () {
|
||||
[-1, 'fail', {}].forEach(function (threshold) {
|
||||
it(JSON.stringify(threshold), function () {
|
||||
|
||||
@@ -131,4 +131,13 @@ describe('Utilities', function () {
|
||||
assert.strictEqual('string', typeof sharp.versions.vips);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Vendor', function () {
|
||||
it('Contains expected attributes', function () {
|
||||
assert.strictEqual('object', typeof sharp.vendor);
|
||||
assert.strictEqual('string', typeof sharp.vendor.current);
|
||||
assert.strictEqual(true, Array.isArray(sharp.vendor.installed));
|
||||
assert.strictEqual(true, sharp.vendor.installed.length > 0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -35,7 +35,7 @@ describe('WebP', function () {
|
||||
|
||||
it('should work for webp alpha quality', function (done) {
|
||||
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
|
||||
.webp({ alphaQuality: 80, reductionEffort: 0 })
|
||||
.webp({ alphaQuality: 80, effort: 0 })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
@@ -46,7 +46,7 @@ describe('WebP', function () {
|
||||
|
||||
it('should work for webp lossless', function (done) {
|
||||
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
|
||||
.webp({ lossless: true, reductionEffort: 0 })
|
||||
.webp({ lossless: true, effort: 0 })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
@@ -57,7 +57,7 @@ describe('WebP', function () {
|
||||
|
||||
it('should work for webp near-lossless', function (done) {
|
||||
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
|
||||
.webp({ nearLossless: true, quality: 50, reductionEffort: 0 })
|
||||
.webp({ nearLossless: true, quality: 50, effort: 0 })
|
||||
.toBuffer(function (err50, data50, info50) {
|
||||
if (err50) throw err50;
|
||||
assert.strictEqual(true, data50.length > 0);
|
||||
@@ -68,7 +68,7 @@ describe('WebP', function () {
|
||||
|
||||
it('should use near-lossless when both lossless and nearLossless are specified', function (done) {
|
||||
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
|
||||
.webp({ nearLossless: true, quality: 50, lossless: true, reductionEffort: 0 })
|
||||
.webp({ nearLossless: true, quality: 50, lossless: true, effort: 0 })
|
||||
.toBuffer(function (err50, data50, info50) {
|
||||
if (err50) throw err50;
|
||||
assert.strictEqual(true, data50.length > 0);
|
||||
@@ -99,37 +99,37 @@ describe('WebP', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('should produce a smaller file size with increased reductionEffort', () =>
|
||||
it('should produce a smaller file size with increased effort', () =>
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.webp()
|
||||
.toBuffer()
|
||||
.then(reductionEffort4 =>
|
||||
.then(effort4 =>
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.webp({ reductionEffort: 6 })
|
||||
.webp({ effort: 6 })
|
||||
.toBuffer()
|
||||
.then(reductionEffort6 => {
|
||||
assert.strictEqual(true, reductionEffort4.length > reductionEffort6.length);
|
||||
.then(effort6 => {
|
||||
assert.strictEqual(true, effort4.length > effort6.length);
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
it('invalid reductionEffort throws', () => {
|
||||
it('invalid effort throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().webp({ effort: true });
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid reductionEffort (deprecated) throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().webp({ reductionEffort: true });
|
||||
});
|
||||
});
|
||||
|
||||
it('out of range reductionEffort throws', () => {
|
||||
it('out of range effort throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().webp({ reductionEffort: -1 });
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid pageHeight throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().webp({ pageHeight: 0 });
|
||||
sharp().webp({ effort: -1 });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -145,7 +145,7 @@ describe('WebP', function () {
|
||||
|
||||
it('invalid delay throws', () => {
|
||||
assert.throws(() => {
|
||||
sharp().webp({ delay: [-1] });
|
||||
sharp().webp({ delay: -1 });
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
@@ -153,16 +153,13 @@ describe('WebP', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('should double the number of frames with default delay', async () => {
|
||||
const original = await sharp(fixtures.inputWebPAnimated, { pages: -1 }).metadata();
|
||||
it('should repeat a single delay for all frames', async () => {
|
||||
const updated = await sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.webp({ pageHeight: original.pageHeight / 2 })
|
||||
.webp({ delay: 100 })
|
||||
.toBuffer()
|
||||
.then(data => sharp(data, { pages: -1 }).metadata());
|
||||
|
||||
assert.strictEqual(updated.pages, original.pages * 2);
|
||||
assert.strictEqual(updated.pageHeight, original.pageHeight / 2);
|
||||
assert.deepStrictEqual(updated.delay, [...original.delay, ...Array(9).fill(120)]);
|
||||
assert.deepStrictEqual(updated.delay, Array(updated.pages).fill(100));
|
||||
});
|
||||
|
||||
it('should limit animation loop', async () => {
|
||||
@@ -186,10 +183,19 @@ describe('WebP', function () {
|
||||
assert.deepStrictEqual(updated.delay, expectedDelay);
|
||||
});
|
||||
|
||||
it('should preserve delay between frames', async () => {
|
||||
const updated = await sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.webp()
|
||||
.toBuffer()
|
||||
.then(data => sharp(data, { pages: -1 }).metadata());
|
||||
|
||||
assert.deepStrictEqual(updated.delay, [120, 120, 90, 120, 120, 90, 120, 90, 30]);
|
||||
});
|
||||
|
||||
it('should work with streams when only animated is set', function (done) {
|
||||
fs.createReadStream(fixtures.inputWebPAnimated)
|
||||
.pipe(sharp({ animated: true }))
|
||||
.webp({ lossless: true, reductionEffort: 0 })
|
||||
.webp({ lossless: true, effort: 0 })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
@@ -201,7 +207,7 @@ describe('WebP', function () {
|
||||
it('should work with streams when only pages is set', function (done) {
|
||||
fs.createReadStream(fixtures.inputWebPAnimated)
|
||||
.pipe(sharp({ pages: -1 }))
|
||||
.webp({ lossless: true, reductionEffort: 0 })
|
||||
.webp({ lossless: true, effort: 0 })
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
@@ -209,4 +215,26 @@ describe('WebP', function () {
|
||||
fixtures.assertSimilar(fixtures.inputWebPAnimated, data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should resize animated image to page height', async () => {
|
||||
const updated = await sharp(fixtures.inputWebPAnimated, { pages: -1 })
|
||||
.resize({ height: 570 })
|
||||
.webp({ effort: 0 })
|
||||
.toBuffer()
|
||||
.then(data => sharp(data, { pages: -1 }).metadata());
|
||||
|
||||
assert.strictEqual(updated.height, 570 * 9);
|
||||
assert.strictEqual(updated.pageHeight, 570);
|
||||
});
|
||||
|
||||
it('should take page parameter into account when animated is set', async () => {
|
||||
const updated = await sharp(fixtures.inputWebPAnimated, { animated: true, page: 2 })
|
||||
.resize({ height: 570 })
|
||||
.webp({ effort: 0 })
|
||||
.toBuffer()
|
||||
.then(data => sharp(data, { pages: -1 }).metadata());
|
||||
|
||||
assert.strictEqual(updated.height, 570 * 7);
|
||||
assert.strictEqual(updated.pageHeight, 570);
|
||||
});
|
||||
});
|
||||
|
||||