Compare commits
98 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b9d4c30a9f | ||
|
|
148760fe55 | ||
|
|
98ed237734 | ||
|
|
b55e58f31e | ||
|
|
0af070ed93 | ||
|
|
9fbb4fcaef | ||
|
|
6008ff8a08 | ||
|
|
cd5e11bd50 | ||
|
|
08d6822265 | ||
|
|
8b8a815fbb | ||
|
|
a44da850c1 | ||
|
|
c5ef4677b1 | ||
|
|
f8a430bdd3 | ||
|
|
cd419a261b | ||
|
|
d7776e3b98 | ||
|
|
1eefd4e562 | ||
|
|
0a16d26ec7 | ||
|
|
fc03fba602 | ||
|
|
c87fe512b4 | ||
|
|
2eaab59c48 | ||
|
|
4ec883eaa0 | ||
|
|
0063df4d4f | ||
|
|
6c61ad256f | ||
|
|
b90474affa | ||
|
|
34cbc6dec3 | ||
|
|
bb8de0cc26 | ||
|
|
863e37455a | ||
|
|
6f0e6f2e65 | ||
|
|
ebf4ccd124 | ||
|
|
b96c8e8ba4 | ||
|
|
42d2f07e44 | ||
|
|
a2988c9edc | ||
|
|
24b3344937 | ||
|
|
9608f219bd | ||
|
|
4798d9da64 | ||
|
|
8d8c6b70eb | ||
|
|
9e2207f376 | ||
|
|
802f560b9b | ||
|
|
a532659b0f | ||
|
|
25c6da2bcd | ||
|
|
02f855d57a | ||
|
|
c150263ef1 | ||
|
|
9f79f80a93 | ||
|
|
069803b83d | ||
|
|
f79760b4f2 | ||
|
|
aa5f0f4e40 | ||
|
|
286a322622 | ||
|
|
6d404f4d2c | ||
|
|
bdc50e1d6e | ||
|
|
a9bd0e79f8 | ||
|
|
a1e464cc5e | ||
|
|
081debd055 | ||
|
|
ef849fd639 | ||
|
|
a42a975c46 | ||
|
|
e8273580af | ||
|
|
5be36c2deb | ||
|
|
6cda090ce2 | ||
|
|
eac6e8b261 | ||
|
|
844deaf480 | ||
|
|
efbb0c22fd | ||
|
|
da0b594900 | ||
|
|
78dada9126 | ||
|
|
15f5cd4671 | ||
|
|
9eb2e94404 | ||
|
|
e40b068628 | ||
|
|
2c46528269 | ||
|
|
584807b4f5 | ||
|
|
a7fa7014ef | ||
|
|
f92e33fbff | ||
|
|
0f1e7ef6f6 | ||
|
|
89e204d824 | ||
|
|
2a71f1830f | ||
|
|
def99a294a | ||
|
|
9d760f3958 | ||
|
|
0265d305fe | ||
|
|
a472aea025 | ||
|
|
01ffa80338 | ||
|
|
789d4851ea | ||
|
|
4490a93430 | ||
|
|
ac0dc10bd5 | ||
|
|
5740f4545e | ||
|
|
a9d692fb43 | ||
|
|
df971207b8 | ||
|
|
3a64a0529a | ||
|
|
76cda885fb | ||
|
|
1a563360c6 | ||
|
|
ca22af203f | ||
|
|
9fa516e849 | ||
|
|
12f472126d | ||
|
|
18be09f1d7 | ||
|
|
b3c3290f90 | ||
|
|
123f95c85a | ||
|
|
5b0fba4c01 | ||
|
|
37f7ccfff4 | ||
|
|
51811d06e2 | ||
|
|
181731f8f4 | ||
|
|
ae79d26ead | ||
|
|
eacb8337fa |
1
.github/CONTRIBUTING.md
vendored
@@ -33,6 +33,7 @@ To test C++ changes, you can compile the module using `npm install --build-from-
|
||||
|
||||
Please add JavaScript [unit tests](https://github.com/lovell/sharp/tree/main/test/unit) to cover your new feature.
|
||||
A test coverage report for the JavaScript code is generated in the `coverage/lcov-report` directory.
|
||||
Please also update the [TypeScript definitions](https://github.com/lovell/sharp/tree/main/lib/index.d.ts), along with the [type definition tests](https://github.com/lovell/sharp/tree/main/test/types/sharp.test-d.ts).
|
||||
|
||||
Where possible, the functional tests use gradient-based perceptual hashes
|
||||
based on [dHash](http://www.hackerfactor.com/blog/index.php?/archives/529-Kind-of-Like-That.html)
|
||||
|
||||
4
.github/workflows/ci-darwin-arm64v8.yml
vendored
@@ -22,13 +22,13 @@ jobs:
|
||||
run:
|
||||
shell: /usr/bin/arch -arch arm64e /bin/bash -l {0}
|
||||
steps:
|
||||
- name: Dependencies
|
||||
- name: Dependencies (Node.js)
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.nodejs_version }}
|
||||
architecture: ${{ matrix.nodejs_arch }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Install
|
||||
run: npm install --build-from-source --unsafe-perm
|
||||
- name: Test
|
||||
|
||||
9
.github/workflows/ci.yml
vendored
@@ -78,14 +78,19 @@ jobs:
|
||||
- name: Dependencies (Linux musl)
|
||||
if: contains(matrix.container, 'alpine')
|
||||
run: apk add build-base git python3 font-noto --update-cache
|
||||
- name: Dependencies (macOS, Windows)
|
||||
- name: Dependencies (Python 3.10 - macOS, Windows)
|
||||
if: contains(matrix.os, 'macos') || contains(matrix.os, 'windows')
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Dependencies (Node.js - macOS, Windows)
|
||||
if: contains(matrix.os, 'macos') || contains(matrix.os, 'windows')
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.nodejs_version }}
|
||||
architecture: ${{ matrix.nodejs_arch }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Fix working directory ownership
|
||||
if: matrix.container
|
||||
run: chown root.root .
|
||||
|
||||
7
.mocharc.jsonc
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"parallel": true,
|
||||
"slow": 1000,
|
||||
"timeout": 30000,
|
||||
"require": "./test/beforeEach.js",
|
||||
"spec": "./test/unit/*.js"
|
||||
}
|
||||
@@ -102,7 +102,7 @@ covers reporting bugs, requesting features and submitting code changes.
|
||||
|
||||
## Licensing
|
||||
|
||||
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Lovell Fuller and contributors.
|
||||
Copyright 2013 Lovell Fuller and others.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -179,7 +179,7 @@
|
||||
],
|
||||
'xcode_settings': {
|
||||
'CLANG_CXX_LANGUAGE_STANDARD': 'c++11',
|
||||
'MACOSX_DEPLOYMENT_TARGET': '10.9',
|
||||
'MACOSX_DEPLOYMENT_TARGET': '10.13',
|
||||
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
|
||||
'GCC_ENABLE_CPP_RTTI': 'YES',
|
||||
'OTHER_CPLUSPLUSFLAGS': [
|
||||
|
||||
@@ -74,7 +74,7 @@ covers reporting bugs, requesting features and submitting code changes.
|
||||
|
||||
### Licensing
|
||||
|
||||
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Lovell Fuller and contributors.
|
||||
Copyright 2013 Lovell Fuller and others.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
|
||||
|
||||
## removeAlpha
|
||||
|
||||
Remove alpha channel, if any. This is a no-op if the image does not have an alpha channel.
|
||||
|
||||
See also [flatten][1].
|
||||
See also [flatten](/api-operation#flatten).
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp('rgba.png')
|
||||
.removeAlpha()
|
||||
.toFile('rgb.png', function(err, info) {
|
||||
@@ -16,61 +13,62 @@ sharp('rgba.png')
|
||||
});
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## ensureAlpha
|
||||
|
||||
Ensure the output image has an alpha transparency channel.
|
||||
If missing, the added alpha channel will have the specified
|
||||
transparency level, defaulting to fully-opaque (1).
|
||||
This is a no-op if the image already has an alpha channel.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `alpha` **[number][2]** alpha transparency level (0=fully-transparent, 1=fully-opaque) (optional, default `1`)
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid alpha transparency level
|
||||
|
||||
```javascript
|
||||
**Since**: 0.21.2
|
||||
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [alpha] | <code>number</code> | <code>1</code> | alpha transparency level (0=fully-transparent, 1=fully-opaque) |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// rgba.png will be a 4 channel image with a fully-opaque alpha channel
|
||||
await sharp('rgb.jpg')
|
||||
.ensureAlpha()
|
||||
.toFile('rgba.png')
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// rgba is a 4 channel image with a fully-transparent alpha channel
|
||||
const rgba = await sharp(rgb)
|
||||
.ensureAlpha(0)
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][3]** Invalid alpha transparency level
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.21.2
|
||||
|
||||
## extractChannel
|
||||
|
||||
Extract a single channel from a multi-channel image.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `channel` **([number][2] | [string][4])** zero-indexed channel/band number to extract, or `red`, `green`, `blue` or `alpha`.
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid channel
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| channel | <code>number</code> \| <code>string</code> | zero-indexed channel/band number to extract, or `red`, `green`, `blue` or `alpha`. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// green.jpg is a greyscale image containing the green channel of the input
|
||||
await sharp(input)
|
||||
.extractChannel('green')
|
||||
.toFile('green.jpg');
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// red1 is the red value of the first pixel, red2 the second pixel etc.
|
||||
const [red1, red2, ...] = await sharp(input)
|
||||
.extractChannel(0)
|
||||
@@ -78,45 +76,46 @@ const [red1, red2, ...] = await sharp(input)
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][3]** Invalid channel
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## joinChannel
|
||||
|
||||
Join one or more channels to the image.
|
||||
The meaning of the added channels depends on the output colourspace, set with `toColourspace()`.
|
||||
By default the output image will be web-friendly sRGB, with additional channels interpreted as alpha channels.
|
||||
Channel ordering follows vips convention:
|
||||
|
||||
* sRGB: 0: Red, 1: Green, 2: Blue, 3: Alpha.
|
||||
* CMYK: 0: Magenta, 1: Cyan, 2: Yellow, 3: Black, 4: Alpha.
|
||||
- sRGB: 0: Red, 1: Green, 2: Blue, 3: Alpha.
|
||||
- CMYK: 0: Magenta, 1: Cyan, 2: Yellow, 3: Black, 4: Alpha.
|
||||
|
||||
Buffers may be any of the image formats supported by sharp.
|
||||
For raw pixel input, the `options` object should contain a `raw` attribute, which follows the format of the attribute of the same name in the `sharp()` constructor.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `images` **([Array][5]<([string][4] | [Buffer][6])> | [string][4] | [Buffer][6])** one or more images (file paths, Buffers).
|
||||
* `options` **[Object][7]** image options, see `sharp()` constructor.
|
||||
**Throws**:
|
||||
|
||||
<!---->
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| images | <code>Array.<(string\|Buffer)></code> \| <code>string</code> \| <code>Buffer</code> | one or more images (file paths, Buffers). |
|
||||
| options | <code>Object</code> | image options, see `sharp()` constructor. |
|
||||
|
||||
* Throws **[Error][3]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## bandbool
|
||||
|
||||
Perform a bitwise boolean operation on all input image channels (bands) to produce a single channel output image.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `boolOp` **[string][4]** one of `and`, `or` or `eor` to perform that bitwise operation, like the C logic operators `&`, `|` and `^` respectively.
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| boolOp | <code>string</code> | one of `and`, `or` or `eor` to perform that bitwise operation, like the C logic operators `&`, `|` and `^` respectively. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
sharp('3-channel-rgb-input.png')
|
||||
.bandbool(sharp.bool.and)
|
||||
.toFile('1-channel-output.png', function (err, info) {
|
||||
@@ -124,22 +123,4 @@ sharp('3-channel-rgb-input.png')
|
||||
// If `I(1,1) = [247, 170, 14] = [0b11110111, 0b10101010, 0b00001111]`
|
||||
// then `O(1,1) = 0b11110111 & 0b10101010 & 0b00001111 = 0b00000010 = 2`.
|
||||
});
|
||||
```
|
||||
|
||||
* Throws **[Error][3]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
[1]: /api-operation#flatten
|
||||
|
||||
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
|
||||
|
||||
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
|
||||
|
||||
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
|
||||
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array
|
||||
|
||||
[6]: https://nodejs.org/api/buffer.html
|
||||
|
||||
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
|
||||
```
|
||||
@@ -1,28 +1,26 @@
|
||||
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
|
||||
|
||||
## tint
|
||||
|
||||
Tint the image using the provided chroma while preserving the image luminance.
|
||||
An alpha channel may be present and will be unchanged by the operation.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `rgb` **([string][1] | [Object][2])** parsed by the [color][3] module to extract chroma values.
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameter
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| rgb | <code>string</code> \| <code>Object</code> | parsed by the [color](https://www.npmjs.org/package/color) module to extract chroma values. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input)
|
||||
.tint({ r: 255, g: 240, b: 16 })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid parameter
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## greyscale
|
||||
|
||||
Convert to 8-bit greyscale; 256 shades of grey.
|
||||
This is a linear operation. If the input image is in a non-linear colour space such as sRGB, use `gamma()` with `greyscale()` for the best results.
|
||||
By default the output image will be web-friendly sRGB and contain three (identical) color channels.
|
||||
@@ -30,44 +28,50 @@ This may be overridden by other sharp operations such as `toColourspace('b-w')`,
|
||||
which will produce an output image containing one color channel.
|
||||
An alpha channel may be present, and will be unchanged by the operation.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `greyscale` **[Boolean][5]** (optional, default `true`)
|
||||
|
||||
### Examples
|
||||
| Param | Type | Default |
|
||||
| --- | --- | --- |
|
||||
| [greyscale] | <code>Boolean</code> | <code>true</code> |
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input).greyscale().toBuffer();
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## grayscale
|
||||
|
||||
Alternative spelling of `greyscale`.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `grayscale` **[Boolean][5]** (optional, default `true`)
|
||||
|
||||
Returns **Sharp** 
|
||||
| Param | Type | Default |
|
||||
| --- | --- | --- |
|
||||
| [grayscale] | <code>Boolean</code> | <code>true</code> |
|
||||
|
||||
|
||||
|
||||
## pipelineColourspace
|
||||
|
||||
Set the pipeline colourspace.
|
||||
|
||||
The input image will be converted to the provided colourspace at the start of the pipeline.
|
||||
All operations will use this colourspace before converting to the output colourspace, as defined by [toColourspace][6].
|
||||
All operations will use this colourspace before converting to the output colourspace, as defined by [toColourspace](#toColourspace).
|
||||
|
||||
This feature is experimental and has not yet been fully-tested with all operations.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `colourspace` **[string][1]?** pipeline colourspace e.g. `rgb16`, `scrgb`, `lab`, `grey16` [...][7]
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
```javascript
|
||||
**Since**: 0.29.0
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [colourspace] | <code>string</code> | pipeline colourspace e.g. `rgb16`, `scrgb`, `lab`, `grey16` [...](https://github.com/libvips/libvips/blob/41cff4e9d0838498487a00623462204eb10ee5b8/libvips/iofuncs/enumtypes.c#L774) |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Run pipeline in 16 bits per channel RGB while converting final result to 8 bits per channel sRGB.
|
||||
await sharp(input)
|
||||
.pipelineColourspace('rgb16')
|
||||
@@ -75,76 +79,54 @@ await sharp(input)
|
||||
.toFile('16bpc-pipeline-to-8bpc-output.png')
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.29.0
|
||||
|
||||
## pipelineColorspace
|
||||
|
||||
Alternative spelling of `pipelineColourspace`.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `colorspace` **[string][1]?** pipeline colorspace.
|
||||
**Throws**:
|
||||
|
||||
<!---->
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [colorspace] | <code>string</code> | pipeline colorspace. |
|
||||
|
||||
* Throws **[Error][4]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## toColourspace
|
||||
|
||||
Set the output colourspace.
|
||||
By default output image will be web-friendly sRGB, with additional channels interpreted as alpha channels.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `colourspace` **[string][1]?** output colourspace e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...][8]
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [colourspace] | <code>string</code> | output colourspace e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...](https://github.com/libvips/libvips/blob/3c0bfdf74ce1dc37a6429bed47fa76f16e2cd70a/libvips/iofuncs/enumtypes.c#L777-L794) |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Output 16 bits per pixel RGB
|
||||
await sharp(input)
|
||||
.toColourspace('rgb16')
|
||||
.toFile('16-bpp.png')
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## toColorspace
|
||||
|
||||
Alternative spelling of `toColourspace`.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `colorspace` **[string][1]?** output colorspace.
|
||||
**Throws**:
|
||||
|
||||
<!---->
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
* Throws **[Error][4]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
[1]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
|
||||
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
|
||||
|
||||
[3]: https://www.npmjs.org/package/color
|
||||
|
||||
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
|
||||
|
||||
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
|
||||
|
||||
[6]: #tocolourspace
|
||||
|
||||
[7]: https://github.com/libvips/libvips/blob/41cff4e9d0838498487a00623462204eb10ee5b8/libvips/iofuncs/enumtypes.c#L774
|
||||
|
||||
[8]: https://github.com/libvips/libvips/blob/3c0bfdf74ce1dc37a6429bed47fa76f16e2cd70a/libvips/iofuncs/enumtypes.c#L777-L794
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [colorspace] | <code>string</code> | output colorspace. |
|
||||
@@ -1,7 +1,4 @@
|
||||
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
|
||||
|
||||
## composite
|
||||
|
||||
Composite image(s) over the processed (resized, extracted etc.) image.
|
||||
|
||||
The images to composite must be the same size or smaller than the processed image.
|
||||
@@ -17,52 +14,53 @@ The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
|
||||
`hard-light`, `soft-light`, `difference`, `exclusion`.
|
||||
|
||||
More information about blend modes can be found at
|
||||
[https://www.libvips.org/API/current/libvips-conversion.html#VipsBlendMode][1]
|
||||
and [https://www.cairographics.org/operators/][2]
|
||||
https://www.libvips.org/API/current/libvips-conversion.html#VipsBlendMode
|
||||
and https://www.cairographics.org/operators/
|
||||
|
||||
### Parameters
|
||||
|
||||
* `images` **[Array][3]<[Object][4]>** Ordered list of images to composite
|
||||
**Throws**:
|
||||
|
||||
* `images[].input` **([Buffer][5] | [String][6])?** Buffer containing image data, String containing the path to an image file, or Create object (see below)
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
* `images[].input.create` **[Object][4]?** describes a blank overlay to be created.
|
||||
**Since**: 0.22.0
|
||||
|
||||
* `images[].input.create.width` **[Number][7]?** 
|
||||
* `images[].input.create.height` **[Number][7]?** 
|
||||
* `images[].input.create.channels` **[Number][7]?** 3-4
|
||||
* `images[].input.create.background` **([String][6] | [Object][4])?** parsed by the [color][8] module to extract values for red, green, blue and alpha.
|
||||
* `images[].input.text` **[Object][4]?** describes a new text image to be created.
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| images | <code>Array.<Object></code> | | Ordered list of images to composite |
|
||||
| [images[].input] | <code>Buffer</code> \| <code>String</code> | | Buffer containing image data, String containing the path to an image file, or Create object (see below) |
|
||||
| [images[].input.create] | <code>Object</code> | | describes a blank overlay to be created. |
|
||||
| [images[].input.create.width] | <code>Number</code> | | |
|
||||
| [images[].input.create.height] | <code>Number</code> | | |
|
||||
| [images[].input.create.channels] | <code>Number</code> | | 3-4 |
|
||||
| [images[].input.create.background] | <code>String</code> \| <code>Object</code> | | parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha. |
|
||||
| [images[].input.text] | <code>Object</code> | | describes a new text image to be created. |
|
||||
| [images[].input.text.text] | <code>string</code> | | text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`. |
|
||||
| [images[].input.text.font] | <code>string</code> | | font name to render with. |
|
||||
| [images[].input.text.fontfile] | <code>string</code> | | absolute filesystem path to a font file that can be used by `font`. |
|
||||
| [images[].input.text.width] | <code>number</code> | <code>0</code> | integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries. |
|
||||
| [images[].input.text.height] | <code>number</code> | <code>0</code> | integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0. |
|
||||
| [images[].input.text.align] | <code>string</code> | <code>"'left'"</code> | text alignment (`'left'`, `'centre'`, `'center'`, `'right'`). |
|
||||
| [images[].input.text.justify] | <code>boolean</code> | <code>false</code> | set this to true to apply justification to the text. |
|
||||
| [images[].input.text.dpi] | <code>number</code> | <code>72</code> | the resolution (size) at which to render the text. Does not take effect if `height` is specified. |
|
||||
| [images[].input.text.rgba] | <code>boolean</code> | <code>false</code> | set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`. |
|
||||
| [images[].input.text.spacing] | <code>number</code> | <code>0</code> | text line height in points. Will use the font line height if none is specified. |
|
||||
| [images[].blend] | <code>String</code> | <code>'over'</code> | how to blend this image with the image below. |
|
||||
| [images[].gravity] | <code>String</code> | <code>'centre'</code> | gravity at which to place the overlay. |
|
||||
| [images[].top] | <code>Number</code> | | the pixel offset from the top edge. |
|
||||
| [images[].left] | <code>Number</code> | | the pixel offset from the left edge. |
|
||||
| [images[].tile] | <code>Boolean</code> | <code>false</code> | set to true to repeat the overlay image across the entire image with the given `gravity`. |
|
||||
| [images[].premultiplied] | <code>Boolean</code> | <code>false</code> | set to true to avoid premultipling the image below. Equivalent to the `--premultiplied` vips option. |
|
||||
| [images[].density] | <code>Number</code> | <code>72</code> | number representing the DPI for vector overlay image. |
|
||||
| [images[].raw] | <code>Object</code> | | describes overlay when using raw pixel data. |
|
||||
| [images[].raw.width] | <code>Number</code> | | |
|
||||
| [images[].raw.height] | <code>Number</code> | | |
|
||||
| [images[].raw.channels] | <code>Number</code> | | |
|
||||
| [images[].animated] | <code>boolean</code> | <code>false</code> | Set to `true` to read all frames/pages of an animated image. |
|
||||
| [images[].failOn] | <code>string</code> | <code>"'warning'"</code> | @see [constructor parameters](/api-constructor#parameters) |
|
||||
| [images[].limitInputPixels] | <code>number</code> \| <code>boolean</code> | <code>268402689</code> | @see [constructor parameters](/api-constructor#parameters) |
|
||||
|
||||
* `images[].input.text.text` **[string][6]?** text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
|
||||
* `images[].input.text.font` **[string][6]?** font name to render with.
|
||||
* `images[].input.text.fontfile` **[string][6]?** absolute filesystem path to a font file that can be used by `font`.
|
||||
* `images[].input.text.width` **[number][7]** integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries. (optional, default `0`)
|
||||
* `images[].input.text.height` **[number][7]** integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0. (optional, default `0`)
|
||||
* `images[].input.text.align` **[string][6]** text alignment (`'left'`, `'centre'`, `'center'`, `'right'`). (optional, default `'left'`)
|
||||
* `images[].input.text.justify` **[boolean][9]** set this to true to apply justification to the text. (optional, default `false`)
|
||||
* `images[].input.text.dpi` **[number][7]** the resolution (size) at which to render the text. Does not take effect if `height` is specified. (optional, default `72`)
|
||||
* `images[].input.text.rgba` **[boolean][9]** set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`. (optional, default `false`)
|
||||
* `images[].input.text.spacing` **[number][7]** text line height in points. Will use the font line height if none is specified. (optional, default `0`)
|
||||
* `images[].blend` **[String][6]** how to blend this image with the image below. (optional, default `'over'`)
|
||||
* `images[].gravity` **[String][6]** gravity at which to place the overlay. (optional, default `'centre'`)
|
||||
* `images[].top` **[Number][7]?** the pixel offset from the top edge.
|
||||
* `images[].left` **[Number][7]?** the pixel offset from the left edge.
|
||||
* `images[].tile` **[Boolean][9]** set to true to repeat the overlay image across the entire image with the given `gravity`. (optional, default `false`)
|
||||
* `images[].premultiplied` **[Boolean][9]** set to true to avoid premultipling the image below. Equivalent to the `--premultiplied` vips option. (optional, default `false`)
|
||||
* `images[].density` **[Number][7]** number representing the DPI for vector overlay image. (optional, default `72`)
|
||||
* `images[].raw` **[Object][4]?** describes overlay when using raw pixel data.
|
||||
|
||||
* `images[].raw.width` **[Number][7]?** 
|
||||
* `images[].raw.height` **[Number][7]?** 
|
||||
* `images[].raw.channels` **[Number][7]?** 
|
||||
* `images[].animated` **[boolean][9]** Set to `true` to read all frames/pages of an animated image. (optional, default `false`)
|
||||
* `images[].failOn` **[string][6]** @see [constructor parameters][10] (optional, default `'warning'`)
|
||||
* `images[].limitInputPixels` **([number][7] | [boolean][9])** @see [constructor parameters][10] (optional, default `268402689`)
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
await sharp(background)
|
||||
.composite([
|
||||
{ input: layer1, gravity: 'northwest' },
|
||||
@@ -70,16 +68,16 @@ await sharp(background)
|
||||
])
|
||||
.toFile('combined.png');
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp('input.gif', { animated: true })
|
||||
.composite([
|
||||
{ input: 'overlay.png', tile: true, blend: 'saturate' }
|
||||
])
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp('input.png')
|
||||
.rotate(180)
|
||||
.resize(300)
|
||||
@@ -94,34 +92,4 @@ sharp('input.png')
|
||||
// onto orange background, composited with overlay.png with SE gravity,
|
||||
// sharpened, with metadata, 90% quality WebP image data. Phew!
|
||||
});
|
||||
```
|
||||
|
||||
* Throws **[Error][11]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.22.0
|
||||
|
||||
[1]: https://www.libvips.org/API/current/libvips-conversion.html#VipsBlendMode
|
||||
|
||||
[2]: https://www.cairographics.org/operators/
|
||||
|
||||
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array
|
||||
|
||||
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
|
||||
|
||||
[5]: https://nodejs.org/api/buffer.html
|
||||
|
||||
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
|
||||
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
|
||||
|
||||
[8]: https://www.npmjs.org/package/color
|
||||
|
||||
[9]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
|
||||
|
||||
[10]: /api-constructor#parameters
|
||||
|
||||
[11]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
|
||||
```
|
||||
@@ -1,7 +1,9 @@
|
||||
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
|
||||
|
||||
## Sharp
|
||||
|
||||
**Emits**: <code>Sharp#event:info</code>, <code>Sharp#event:warning</code>
|
||||
<a name="new_Sharp_new"></a>
|
||||
|
||||
### new
|
||||
Constructor factory to create an instance of `sharp`, to which further methods are chained.
|
||||
|
||||
JPEG, PNG, WebP, GIF, AVIF or TIFF format image data can be streamed out from this object.
|
||||
@@ -9,64 +11,57 @@ When using Stream based output, derived attributes are available from the `info`
|
||||
|
||||
Non-critical problems encountered during processing are emitted as `warning` events.
|
||||
|
||||
Implements the [stream.Duplex][1] class.
|
||||
Implements the [stream.Duplex](http://nodejs.org/api/stream.html#stream_class_stream_duplex) class.
|
||||
|
||||
### Parameters
|
||||
**Throws**:
|
||||
|
||||
* `input` **([Buffer][2] | [Uint8Array][3] | [Uint8ClampedArray][4] | [Int8Array][5] | [Uint16Array][6] | [Int16Array][7] | [Uint32Array][8] | [Int32Array][9] | [Float32Array][10] | [Float64Array][11] | [string][12])?** if present, can be
|
||||
a Buffer / Uint8Array / Uint8ClampedArray containing JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image data, or
|
||||
a TypedArray containing raw pixel image data, or
|
||||
a String containing the filesystem path to an JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image file.
|
||||
JPEG, PNG, WebP, AVIF, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
|
||||
* `options` **[Object][13]?** if present, is an Object with optional attributes.
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
* `options.failOn` **[string][12]** level of sensitivity to invalid images, one of (in order of sensitivity): 'none' (least), 'truncated', 'error' or 'warning' (most), highers level imply lower levels. (optional, default `'warning'`)
|
||||
* `options.limitInputPixels` **([number][14] | [boolean][15])** Do not process input images where the number of pixels
|
||||
(width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted.
|
||||
An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF). (optional, default `268402689`)
|
||||
* `options.unlimited` **[boolean][15]** Set this to `true` to remove safety features that help prevent memory exhaustion (JPEG, PNG, SVG, HEIF). (optional, default `false`)
|
||||
* `options.sequentialRead` **[boolean][15]** Set this to `true` to use sequential rather than random access where possible.
|
||||
This can reduce memory usage and might improve performance on some systems. (optional, default `false`)
|
||||
* `options.density` **[number][14]** number representing the DPI for vector images in the range 1 to 100000. (optional, default `72`)
|
||||
* `options.pages` **[number][14]** number of pages to extract for multi-page input (GIF, WebP, AVIF, TIFF, PDF), use -1 for all pages. (optional, default `1`)
|
||||
* `options.page` **[number][14]** page number to start extracting from for multi-page input (GIF, WebP, AVIF, TIFF, PDF), zero based. (optional, default `0`)
|
||||
* `options.subifd` **[number][14]** subIFD (Sub Image File Directory) to extract for OME-TIFF, defaults to main image. (optional, default `-1`)
|
||||
* `options.level` **[number][14]** level to extract from a multi-level input (OpenSlide), zero based. (optional, default `0`)
|
||||
* `options.animated` **[boolean][15]** Set to `true` to read all frames/pages of an animated image (equivalent of setting `pages` to `-1`). (optional, default `false`)
|
||||
* `options.raw` **[Object][13]?** describes raw pixel input image data. See `raw()` for pixel ordering.
|
||||
|
||||
* `options.raw.width` **[number][14]?** integral number of pixels wide.
|
||||
* `options.raw.height` **[number][14]?** integral number of pixels high.
|
||||
* `options.raw.channels` **[number][14]?** integral number of channels, between 1 and 4.
|
||||
* `options.raw.premultiplied` **[boolean][15]?** specifies that the raw input has already been premultiplied, set to `true`
|
||||
to avoid sharp premultiplying the image. (optional, default `false`)
|
||||
* `options.create` **[Object][13]?** describes a new image to be created.
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [input] | <code>Buffer</code> \| <code>ArrayBuffer</code> \| <code>Uint8Array</code> \| <code>Uint8ClampedArray</code> \| <code>Int8Array</code> \| <code>Uint16Array</code> \| <code>Int16Array</code> \| <code>Uint32Array</code> \| <code>Int32Array</code> \| <code>Float32Array</code> \| <code>Float64Array</code> \| <code>string</code> | | if present, can be a Buffer / ArrayBuffer / Uint8Array / Uint8ClampedArray containing JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image data, or a TypedArray containing raw pixel image data, or a String containing the filesystem path to an JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image file. JPEG, PNG, WebP, AVIF, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present. |
|
||||
| [options] | <code>Object</code> | | if present, is an Object with optional attributes. |
|
||||
| [options.failOn] | <code>string</code> | <code>"'warning'"</code> | when to abort processing of invalid pixel data, one of (in order of sensitivity): 'none' (least), 'truncated', 'error' or 'warning' (most), highers level imply lower levels, invalid metadata will always abort. |
|
||||
| [options.limitInputPixels] | <code>number</code> \| <code>boolean</code> | <code>268402689</code> | Do not process input images where the number of pixels (width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted. An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF). |
|
||||
| [options.unlimited] | <code>boolean</code> | <code>false</code> | Set this to `true` to remove safety features that help prevent memory exhaustion (JPEG, PNG, SVG, HEIF). |
|
||||
| [options.sequentialRead] | <code>boolean</code> | <code>true</code> | Set this to `false` to use random access rather than sequential read. Some operations will do this automatically. |
|
||||
| [options.density] | <code>number</code> | <code>72</code> | number representing the DPI for vector images in the range 1 to 100000. |
|
||||
| [options.ignoreIcc] | <code>number</code> | <code>false</code> | should the embedded ICC profile, if any, be ignored. |
|
||||
| [options.pages] | <code>number</code> | <code>1</code> | Number of pages to extract for multi-page input (GIF, WebP, TIFF), use -1 for all pages. |
|
||||
| [options.page] | <code>number</code> | <code>0</code> | Page number to start extracting from for multi-page input (GIF, WebP, TIFF), zero based. |
|
||||
| [options.subifd] | <code>number</code> | <code>-1</code> | subIFD (Sub Image File Directory) to extract for OME-TIFF, defaults to main image. |
|
||||
| [options.level] | <code>number</code> | <code>0</code> | level to extract from a multi-level input (OpenSlide), zero based. |
|
||||
| [options.animated] | <code>boolean</code> | <code>false</code> | Set to `true` to read all frames/pages of an animated image (GIF, WebP, TIFF), equivalent of setting `pages` to `-1`. |
|
||||
| [options.raw] | <code>Object</code> | | describes raw pixel input image data. See `raw()` for pixel ordering. |
|
||||
| [options.raw.width] | <code>number</code> | | integral number of pixels wide. |
|
||||
| [options.raw.height] | <code>number</code> | | integral number of pixels high. |
|
||||
| [options.raw.channels] | <code>number</code> | | integral number of channels, between 1 and 4. |
|
||||
| [options.raw.premultiplied] | <code>boolean</code> | | specifies that the raw input has already been premultiplied, set to `true` to avoid sharp premultiplying the image. (optional, default `false`) |
|
||||
| [options.create] | <code>Object</code> | | describes a new image to be created. |
|
||||
| [options.create.width] | <code>number</code> | | integral number of pixels wide. |
|
||||
| [options.create.height] | <code>number</code> | | integral number of pixels high. |
|
||||
| [options.create.channels] | <code>number</code> | | integral number of channels, either 3 (RGB) or 4 (RGBA). |
|
||||
| [options.create.background] | <code>string</code> \| <code>Object</code> | | parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha. |
|
||||
| [options.create.noise] | <code>Object</code> | | describes a noise to be created. |
|
||||
| [options.create.noise.type] | <code>string</code> | | type of generated noise, currently only `gaussian` is supported. |
|
||||
| [options.create.noise.mean] | <code>number</code> | | mean of pixels in generated noise. |
|
||||
| [options.create.noise.sigma] | <code>number</code> | | standard deviation of pixels in generated noise. |
|
||||
| [options.text] | <code>Object</code> | | describes a new text image to be created. |
|
||||
| [options.text.text] | <code>string</code> | | text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`. |
|
||||
| [options.text.font] | <code>string</code> | | font name to render with. |
|
||||
| [options.text.fontfile] | <code>string</code> | | absolute filesystem path to a font file that can be used by `font`. |
|
||||
| [options.text.width] | <code>number</code> | <code>0</code> | integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries. |
|
||||
| [options.text.height] | <code>number</code> | <code>0</code> | integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0. |
|
||||
| [options.text.align] | <code>string</code> | <code>"'left'"</code> | text alignment (`'left'`, `'centre'`, `'center'`, `'right'`). |
|
||||
| [options.text.justify] | <code>boolean</code> | <code>false</code> | set this to true to apply justification to the text. |
|
||||
| [options.text.dpi] | <code>number</code> | <code>72</code> | the resolution (size) at which to render the text. Does not take effect if `height` is specified. |
|
||||
| [options.text.rgba] | <code>boolean</code> | <code>false</code> | set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`. |
|
||||
| [options.text.spacing] | <code>number</code> | <code>0</code> | text line height in points. Will use the font line height if none is specified. |
|
||||
| [options.text.wrap] | <code>string</code> | <code>"'word'"</code> | word wrapping style when width is provided, one of: 'word', 'char', 'charWord' (prefer char, fallback to word) or 'none'. |
|
||||
|
||||
* `options.create.width` **[number][14]?** integral number of pixels wide.
|
||||
* `options.create.height` **[number][14]?** integral number of pixels high.
|
||||
* `options.create.channels` **[number][14]?** integral number of channels, either 3 (RGB) or 4 (RGBA).
|
||||
* `options.create.background` **([string][12] | [Object][13])?** parsed by the [color][16] module to extract values for red, green, blue and alpha.
|
||||
* `options.create.noise` **[Object][13]?** describes a noise to be created.
|
||||
|
||||
* `options.create.noise.type` **[string][12]?** type of generated noise, currently only `gaussian` is supported.
|
||||
* `options.create.noise.mean` **[number][14]?** mean of pixels in generated noise.
|
||||
* `options.create.noise.sigma` **[number][14]?** standard deviation of pixels in generated noise.
|
||||
* `options.text` **[Object][13]?** describes a new text image to be created.
|
||||
|
||||
* `options.text.text` **[string][12]?** text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
|
||||
* `options.text.font` **[string][12]?** font name to render with.
|
||||
* `options.text.fontfile` **[string][12]?** absolute filesystem path to a font file that can be used by `font`.
|
||||
* `options.text.width` **[number][14]** integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries. (optional, default `0`)
|
||||
* `options.text.height` **[number][14]** integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0. (optional, default `0`)
|
||||
* `options.text.align` **[string][12]** text alignment (`'left'`, `'centre'`, `'center'`, `'right'`). (optional, default `'left'`)
|
||||
* `options.text.justify` **[boolean][15]** set this to true to apply justification to the text. (optional, default `false`)
|
||||
* `options.text.dpi` **[number][14]** the resolution (size) at which to render the text. Does not take effect if `height` is specified. (optional, default `72`)
|
||||
* `options.text.rgba` **[boolean][15]** set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`. (optional, default `false`)
|
||||
* `options.text.spacing` **[number][14]** text line height in points. Will use the font line height if none is specified. (optional, default `0`)
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp('input.jpg')
|
||||
.resize(300, 200)
|
||||
.toFile('output.jpg', function(err) {
|
||||
@@ -74,8 +69,8 @@ sharp('input.jpg')
|
||||
// containing a scaled and cropped version of input.jpg
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Read image data from readableStream,
|
||||
// resize to 300 pixels wide,
|
||||
// emit an 'info' event with calculated dimensions
|
||||
@@ -87,8 +82,8 @@ var transformer = sharp()
|
||||
});
|
||||
readableStream.pipe(transformer).pipe(writableStream);
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Create a blank 300x200 PNG image of semi-transluent red pixels
|
||||
sharp({
|
||||
create: {
|
||||
@@ -102,13 +97,13 @@ sharp({
|
||||
.toBuffer()
|
||||
.then( ... );
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Convert an animated GIF to an animated WebP
|
||||
await sharp('in.gif', { animated: true }).toFile('out.webp');
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Read a raw array of pixels and save it to a png
|
||||
const input = Uint8Array.from([255, 255, 255, 0, 0, 0]); // or Uint8ClampedArray
|
||||
const image = sharp(input, {
|
||||
@@ -122,8 +117,8 @@ const image = sharp(input, {
|
||||
});
|
||||
await image.toFile('my-two-pixels.png');
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Generate RGB Gaussian noise
|
||||
await sharp({
|
||||
create: {
|
||||
@@ -138,8 +133,8 @@ await sharp({
|
||||
}
|
||||
}).toFile('noise.png');
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Generate an image from text
|
||||
await sharp({
|
||||
text: {
|
||||
@@ -149,8 +144,8 @@ await sharp({
|
||||
}
|
||||
}).toFile('text_bw.png');
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Generate an rgba image from text using pango markup and font
|
||||
await sharp({
|
||||
text: {
|
||||
@@ -162,19 +157,15 @@ await sharp({
|
||||
}).toFile('text_rgba.png');
|
||||
```
|
||||
|
||||
* Throws **[Error][17]** Invalid parameters
|
||||
|
||||
Returns **[Sharp][18]** 
|
||||
|
||||
## clone
|
||||
|
||||
Take a "snapshot" of the Sharp instance, returning a new instance.
|
||||
Cloned instances inherit the input of their parent instance.
|
||||
This allows multiple output Streams and therefore multiple processing pipelines to share a single input Stream.
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const pipeline = sharp().rotate();
|
||||
pipeline.clone().resize(800, 600).pipe(firstWritableStream);
|
||||
pipeline.clone().extract({ left: 20, top: 20, width: 100, height: 100 }).pipe(secondWritableStream);
|
||||
@@ -182,8 +173,8 @@ readableStream.pipe(pipeline);
|
||||
// firstWritableStream receives auto-rotated, resized readableStream
|
||||
// secondWritableStream receives auto-rotated, extracted region of readableStream
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Create a pipeline that will download an image, resize it and format it to different files
|
||||
// Using Promises to know when the pipeline is complete
|
||||
const fs = require("fs");
|
||||
@@ -228,42 +219,4 @@ Promise.all(promises)
|
||||
fs.unlinkSync("optimized-500.webp");
|
||||
} catch (e) {}
|
||||
});
|
||||
```
|
||||
|
||||
Returns **[Sharp][18]** 
|
||||
|
||||
[1]: http://nodejs.org/api/stream.html#stream_class_stream_duplex
|
||||
|
||||
[2]: https://nodejs.org/api/buffer.html
|
||||
|
||||
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array
|
||||
|
||||
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Uint8ClampedArray
|
||||
|
||||
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Int8Array
|
||||
|
||||
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Uint16Array
|
||||
|
||||
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Int16Array
|
||||
|
||||
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Uint32Array
|
||||
|
||||
[9]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Int32Array
|
||||
|
||||
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Float32Array
|
||||
|
||||
[11]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Float64Array
|
||||
|
||||
[12]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
|
||||
[13]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
|
||||
|
||||
[14]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
|
||||
|
||||
[15]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
|
||||
|
||||
[16]: https://www.npmjs.org/package/color
|
||||
|
||||
[17]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
|
||||
|
||||
[18]: #sharp
|
||||
```
|
||||
@@ -1,57 +1,57 @@
|
||||
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
|
||||
|
||||
## metadata
|
||||
|
||||
Fast access to (uncached) image metadata without decoding any compressed pixel data.
|
||||
|
||||
This is taken from the header of the input image.
|
||||
It does not include operations, such as resize, to be applied to the output image.
|
||||
This is read from the header of the input image.
|
||||
It does not take into consideration any operations to be applied to the output image,
|
||||
such as resize or rotate.
|
||||
|
||||
Dimensions in the response will respect the `page` and `pages` properties of the
|
||||
[constructor parameters][1].
|
||||
[constructor parameters](/api-constructor#parameters).
|
||||
|
||||
A `Promise` is returned when `callback` is not provided.
|
||||
|
||||
* `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
|
||||
* `size`: Total size of image in bytes, for Stream and Buffer input only
|
||||
* `width`: Number of pixels wide (EXIF orientation is not taken into consideration, see example below)
|
||||
* `height`: Number of pixels high (EXIF orientation is not taken into consideration, see example below)
|
||||
* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...][2]
|
||||
* `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
|
||||
* `depth`: Name of pixel depth format e.g. `uchar`, `char`, `ushort`, `float` [...][3]
|
||||
* `density`: Number of pixels per inch (DPI), if present
|
||||
* `chromaSubsampling`: String containing JPEG chroma subsampling, `4:2:0` or `4:4:4` for RGB, `4:2:0:4` or `4:4:4:4` for CMYK
|
||||
* `isProgressive`: Boolean indicating whether the image is interlaced using a progressive scan
|
||||
* `pages`: Number of pages/frames contained within the image, with support for TIFF, HEIF, PDF, animated GIF and animated WebP
|
||||
* `pageHeight`: Number of pixels high each page in a multi-page image will be.
|
||||
* `loop`: Number of times to loop an animated image, zero refers to a continuous loop.
|
||||
* `delay`: Delay in ms between each page in an animated image, provided as an array of integers.
|
||||
* `pagePrimary`: Number of the primary page in a HEIF image
|
||||
* `levels`: Details of each level in a multi-level image provided as an array of objects, requires libvips compiled with support for OpenSlide
|
||||
* `subifds`: Number of Sub Image File Directories in an OME-TIFF image
|
||||
* `background`: Default background colour, if present, for PNG (bKGD) and GIF images, either an RGB Object or a single greyscale value
|
||||
* `compression`: The encoder used to compress an HEIF file, `av1` (AVIF) or `hevc` (HEIC)
|
||||
* `resolutionUnit`: The unit of resolution (density), either `inch` or `cm`, if present
|
||||
* `hasProfile`: Boolean indicating the presence of an embedded ICC profile
|
||||
* `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
|
||||
* `orientation`: Number value of the EXIF Orientation header, if present
|
||||
* `exif`: Buffer containing raw EXIF data, if present
|
||||
* `icc`: Buffer containing raw [ICC][4] profile data, if present
|
||||
* `iptc`: Buffer containing raw IPTC data, if present
|
||||
* `xmp`: Buffer containing raw XMP data, if present
|
||||
* `tifftagPhotoshop`: Buffer containing raw TIFFTAG\_PHOTOSHOP data, if present
|
||||
- `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
|
||||
- `size`: Total size of image in bytes, for Stream and Buffer input only
|
||||
- `width`: Number of pixels wide (EXIF orientation is not taken into consideration, see example below)
|
||||
- `height`: Number of pixels high (EXIF orientation is not taken into consideration, see example below)
|
||||
- `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...](https://www.libvips.org/API/current/VipsImage.html#VipsInterpretation)
|
||||
- `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
|
||||
- `depth`: Name of pixel depth format e.g. `uchar`, `char`, `ushort`, `float` [...](https://www.libvips.org/API/current/VipsImage.html#VipsBandFormat)
|
||||
- `density`: Number of pixels per inch (DPI), if present
|
||||
- `chromaSubsampling`: String containing JPEG chroma subsampling, `4:2:0` or `4:4:4` for RGB, `4:2:0:4` or `4:4:4:4` for CMYK
|
||||
- `isProgressive`: Boolean indicating whether the image is interlaced using a progressive scan
|
||||
- `pages`: Number of pages/frames contained within the image, with support for TIFF, HEIF, PDF, animated GIF and animated WebP
|
||||
- `pageHeight`: Number of pixels high each page in a multi-page image will be.
|
||||
- `loop`: Number of times to loop an animated image, zero refers to a continuous loop.
|
||||
- `delay`: Delay in ms between each page in an animated image, provided as an array of integers.
|
||||
- `pagePrimary`: Number of the primary page in a HEIF image
|
||||
- `levels`: Details of each level in a multi-level image provided as an array of objects, requires libvips compiled with support for OpenSlide
|
||||
- `subifds`: Number of Sub Image File Directories in an OME-TIFF image
|
||||
- `background`: Default background colour, if present, for PNG (bKGD) and GIF images, either an RGB Object or a single greyscale value
|
||||
- `compression`: The encoder used to compress an HEIF file, `av1` (AVIF) or `hevc` (HEIC)
|
||||
- `resolutionUnit`: The unit of resolution (density), either `inch` or `cm`, if present
|
||||
- `hasProfile`: Boolean indicating the presence of an embedded ICC profile
|
||||
- `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
|
||||
- `orientation`: Number value of the EXIF Orientation header, if present
|
||||
- `exif`: Buffer containing raw EXIF data, if present
|
||||
- `icc`: Buffer containing raw [ICC](https://www.npmjs.com/package/icc) profile data, if present
|
||||
- `iptc`: Buffer containing raw IPTC data, if present
|
||||
- `xmp`: Buffer containing raw XMP data, if present
|
||||
- `tifftagPhotoshop`: Buffer containing raw TIFFTAG_PHOTOSHOP data, if present
|
||||
- `formatMagick`: String containing format for images loaded via *magick
|
||||
|
||||
### Parameters
|
||||
|
||||
* `callback` **[Function][5]?** called with the arguments `(err, metadata)`
|
||||
|
||||
### Examples
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [callback] | <code>function</code> | called with the arguments `(err, metadata)` |
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const metadata = await sharp(input).metadata();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const image = sharp(inputJpg);
|
||||
image
|
||||
.metadata()
|
||||
@@ -65,8 +65,8 @@ image
|
||||
// data contains a WebP image half the width and height of the original JPEG
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Based on EXIF rotation metadata, get the right-side-up width and height:
|
||||
|
||||
const size = getNormalSize(await sharp(input).metadata());
|
||||
@@ -78,39 +78,38 @@ function getNormalSize({ width, height, orientation }) {
|
||||
}
|
||||
```
|
||||
|
||||
Returns **([Promise][6]<[Object][7]> | Sharp)** 
|
||||
|
||||
## stats
|
||||
|
||||
Access to pixel-derived image statistics for every channel in the image.
|
||||
A `Promise` is returned when `callback` is not provided.
|
||||
|
||||
* `channels`: Array of channel statistics for each channel in the image. Each channel statistic contains
|
||||
* `min` (minimum value in the channel)
|
||||
* `max` (maximum value in the channel)
|
||||
* `sum` (sum of all values in a channel)
|
||||
* `squaresSum` (sum of squared values in a channel)
|
||||
* `mean` (mean of the values in a channel)
|
||||
* `stdev` (standard deviation for the values in a channel)
|
||||
* `minX` (x-coordinate of one of the pixel where the minimum lies)
|
||||
* `minY` (y-coordinate of one of the pixel where the minimum lies)
|
||||
* `maxX` (x-coordinate of one of the pixel where the maximum lies)
|
||||
* `maxY` (y-coordinate of one of the pixel where the maximum lies)
|
||||
* `isOpaque`: Is the image fully opaque? Will be `true` if the image has no alpha channel or if every pixel is fully opaque.
|
||||
* `entropy`: Histogram-based estimation of greyscale entropy, discarding alpha channel if any.
|
||||
* `sharpness`: Estimation of greyscale sharpness based on the standard deviation of a Laplacian convolution, discarding alpha channel if any.
|
||||
* `dominant`: Object containing most dominant sRGB colour based on a 4096-bin 3D histogram.
|
||||
- `channels`: Array of channel statistics for each channel in the image. Each channel statistic contains
|
||||
- `min` (minimum value in the channel)
|
||||
- `max` (maximum value in the channel)
|
||||
- `sum` (sum of all values in a channel)
|
||||
- `squaresSum` (sum of squared values in a channel)
|
||||
- `mean` (mean of the values in a channel)
|
||||
- `stdev` (standard deviation for the values in a channel)
|
||||
- `minX` (x-coordinate of one of the pixel where the minimum lies)
|
||||
- `minY` (y-coordinate of one of the pixel where the minimum lies)
|
||||
- `maxX` (x-coordinate of one of the pixel where the maximum lies)
|
||||
- `maxY` (y-coordinate of one of the pixel where the maximum lies)
|
||||
- `isOpaque`: Is the image fully opaque? Will be `true` if the image has no alpha channel or if every pixel is fully opaque.
|
||||
- `entropy`: Histogram-based estimation of greyscale entropy, discarding alpha channel if any.
|
||||
- `sharpness`: Estimation of greyscale sharpness based on the standard deviation of a Laplacian convolution, discarding alpha channel if any.
|
||||
- `dominant`: Object containing most dominant sRGB colour based on a 4096-bin 3D histogram.
|
||||
|
||||
**Note**: Statistics are derived from the original input image. Any operations performed on the image must first be
|
||||
written to a buffer in order to run `stats` on the result (see third example).
|
||||
|
||||
### Parameters
|
||||
|
||||
* `callback` **[Function][5]?** called with the arguments `(err, stats)`
|
||||
|
||||
### Examples
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [callback] | <code>function</code> | called with the arguments `(err, stats)` |
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const image = sharp(inputJpg);
|
||||
image
|
||||
.stats()
|
||||
@@ -118,32 +117,16 @@ image
|
||||
// stats contains the channel-wise statistics array and the isOpaque value
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const { entropy, sharpness, dominant } = await sharp(input).stats();
|
||||
const { r, g, b } = dominant;
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const image = sharp(input);
|
||||
// store intermediate result
|
||||
const part = await image.extract(region).toBuffer();
|
||||
// create new instance to obtain statistics of extracted region
|
||||
const stats = await sharp(part).stats();
|
||||
```
|
||||
|
||||
Returns **[Promise][6]<[Object][7]>** 
|
||||
|
||||
[1]: /api-constructor#parameters
|
||||
|
||||
[2]: https://www.libvips.org/API/current/VipsImage.html#VipsInterpretation
|
||||
|
||||
[3]: https://www.libvips.org/API/current/VipsImage.html#VipsBandFormat
|
||||
|
||||
[4]: https://www.npmjs.com/package/icc
|
||||
|
||||
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function
|
||||
|
||||
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Promise
|
||||
|
||||
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
|
||||
```
|
||||
@@ -1,7 +1,4 @@
|
||||
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
|
||||
|
||||
## rotate
|
||||
|
||||
Rotate the output image by either an explicit angle
|
||||
or auto-orient based on the EXIF `Orientation` tag.
|
||||
|
||||
@@ -22,16 +19,20 @@ Previous calls to `rotate` in the same pipeline will be ignored.
|
||||
Method order is important when rotating, resizing and/or extracting regions,
|
||||
for example `.rotate(x).extract(y)` will produce a different result to `.extract(y).rotate(x)`.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `angle` **[number][1]** angle of rotation. (optional, default `auto`)
|
||||
* `options` **[Object][2]?** if present, is an Object with optional attributes.
|
||||
**Throws**:
|
||||
|
||||
* `options.background` **([string][3] | [Object][2])** parsed by the [color][4] module to extract values for red, green, blue and alpha. (optional, default `"#000000"`)
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [angle] | <code>number</code> | <code>auto</code> | angle of rotation. |
|
||||
| [options] | <code>Object</code> | | if present, is an Object with optional attributes. |
|
||||
| [options.background] | <code>string</code> \| <code>Object</code> | <code>"\"#000000\""</code> | parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
const pipeline = sharp()
|
||||
.rotate()
|
||||
.resize(null, 200)
|
||||
@@ -42,8 +43,8 @@ const pipeline = sharp()
|
||||
});
|
||||
readableStream.pipe(pipeline);
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const rotateThenResize = await sharp(input)
|
||||
.rotate(90)
|
||||
.resize({ width: 16, height: 8, fit: 'fill' })
|
||||
@@ -54,46 +55,40 @@ const resizeThenRotate = await sharp(input)
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## flip
|
||||
|
||||
Flip the image about the vertical Y axis. This always occurs before rotation, if any.
|
||||
The use of `flip` implies the removal of the EXIF `Orientation` tag, if any.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `flip` **[Boolean][6]** (optional, default `true`)
|
||||
|
||||
### Examples
|
||||
| Param | Type | Default |
|
||||
| --- | --- | --- |
|
||||
| [flip] | <code>Boolean</code> | <code>true</code> |
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input).flip().toBuffer();
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## flop
|
||||
|
||||
Flop the image about the horizontal X axis. This always occurs before rotation, if any.
|
||||
The use of `flop` implies the removal of the EXIF `Orientation` tag, if any.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `flop` **[Boolean][6]** (optional, default `true`)
|
||||
|
||||
### Examples
|
||||
| Param | Type | Default |
|
||||
| --- | --- | --- |
|
||||
| [flop] | <code>Boolean</code> | <code>true</code> |
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input).flop().toBuffer();
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## affine
|
||||
|
||||
Perform an affine transform on an image. This operation will always occur after resizing, extraction and rotation, if any.
|
||||
|
||||
You must provide an array of length 4 or a 2x2 affine transformation matrix.
|
||||
@@ -101,31 +96,34 @@ By default, new pixels are filled with a black background. You can provide a bac
|
||||
A particular interpolator may also be specified. Set the `interpolator` option to an attribute of the `sharp.interpolator` Object e.g. `sharp.interpolator.nohalo`.
|
||||
|
||||
In the case of a 2x2 matrix, the transform is:
|
||||
|
||||
* X = `matrix[0, 0]` \* (x + `idx`) + `matrix[0, 1]` \* (y + `idy`) + `odx`
|
||||
* Y = `matrix[1, 0]` \* (x + `idx`) + `matrix[1, 1]` \* (y + `idy`) + `ody`
|
||||
- X = `matrix[0, 0]` \* (x + `idx`) + `matrix[0, 1]` \* (y + `idy`) + `odx`
|
||||
- Y = `matrix[1, 0]` \* (x + `idx`) + `matrix[1, 1]` \* (y + `idy`) + `ody`
|
||||
|
||||
where:
|
||||
- x and y are the coordinates in input image.
|
||||
- X and Y are the coordinates in output image.
|
||||
- (0,0) is the upper left corner.
|
||||
|
||||
* x and y are the coordinates in input image.
|
||||
* X and Y are the coordinates in output image.
|
||||
* (0,0) is the upper left corner.
|
||||
|
||||
### Parameters
|
||||
**Throws**:
|
||||
|
||||
* `matrix` **([Array][7]<[Array][7]<[number][1]>> | [Array][7]<[number][1]>)** affine transformation matrix
|
||||
* `options` **[Object][2]?** if present, is an Object with optional attributes.
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
* `options.background` **([String][3] | [Object][2])** parsed by the [color][4] module to extract values for red, green, blue and alpha. (optional, default `"#000000"`)
|
||||
* `options.idx` **[Number][1]** input horizontal offset (optional, default `0`)
|
||||
* `options.idy` **[Number][1]** input vertical offset (optional, default `0`)
|
||||
* `options.odx` **[Number][1]** output horizontal offset (optional, default `0`)
|
||||
* `options.ody` **[Number][1]** output vertical offset (optional, default `0`)
|
||||
* `options.interpolator` **[String][3]** interpolator (optional, default `sharp.interpolators.bicubic`)
|
||||
**Since**: 0.27.0
|
||||
|
||||
### Examples
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| matrix | <code>Array.<Array.<number>></code> \| <code>Array.<number></code> | | affine transformation matrix |
|
||||
| [options] | <code>Object</code> | | if present, is an Object with optional attributes. |
|
||||
| [options.background] | <code>String</code> \| <code>Object</code> | <code>"#000000"</code> | parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha. |
|
||||
| [options.idx] | <code>Number</code> | <code>0</code> | input horizontal offset |
|
||||
| [options.idy] | <code>Number</code> | <code>0</code> | input vertical offset |
|
||||
| [options.odx] | <code>Number</code> | <code>0</code> | output horizontal offset |
|
||||
| [options.ody] | <code>Number</code> | <code>0</code> | output vertical offset |
|
||||
| [options.interpolator] | <code>String</code> | <code>sharp.interpolators.bicubic</code> | interpolator |
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const pipeline = sharp()
|
||||
.affine([[1, 0.3], [0.1, 0.7]], {
|
||||
background: 'white',
|
||||
@@ -140,47 +138,45 @@ inputStream
|
||||
.pipe(pipeline);
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.27.0
|
||||
|
||||
## sharpen
|
||||
|
||||
Sharpen the image.
|
||||
|
||||
When used without parameters, performs a fast, mild sharpen of the output image.
|
||||
|
||||
When a `sigma` is provided, performs a slower, more accurate sharpen of the L channel in the LAB colour space.
|
||||
Separate control over the level of sharpening in "flat" and "jagged" areas is available.
|
||||
Fine-grained control over the level of sharpening in "flat" (m1) and "jagged" (m2) areas is available.
|
||||
|
||||
See [libvips sharpen][8] operation.
|
||||
See [libvips sharpen](https://www.libvips.org/API/current/libvips-convolution.html#vips-sharpen) operation.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **([Object][2] | [number][1])?** if present, is an Object with attributes or (deprecated) a number for `options.sigma`.
|
||||
**Throws**:
|
||||
|
||||
* `options.sigma` **[number][1]?** the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`.
|
||||
* `options.m1` **[number][1]** the level of sharpening to apply to "flat" areas. (optional, default `1.0`)
|
||||
* `options.m2` **[number][1]** the level of sharpening to apply to "jagged" areas. (optional, default `2.0`)
|
||||
* `options.x1` **[number][1]** threshold between "flat" and "jagged" (optional, default `2.0`)
|
||||
* `options.y2` **[number][1]** maximum amount of brightening. (optional, default `10.0`)
|
||||
* `options.y3` **[number][1]** maximum amount of darkening. (optional, default `20.0`)
|
||||
* `flat` **[number][1]?** (deprecated) see `options.m1`.
|
||||
* `jagged` **[number][1]?** (deprecated) see `options.m2`.
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> \| <code>number</code> | | if present, is an Object with attributes |
|
||||
| [options.sigma] | <code>number</code> | | the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`, between 0.000001 and 10 |
|
||||
| [options.m1] | <code>number</code> | <code>1.0</code> | the level of sharpening to apply to "flat" areas, between 0 and 1000000 |
|
||||
| [options.m2] | <code>number</code> | <code>2.0</code> | the level of sharpening to apply to "jagged" areas, between 0 and 1000000 |
|
||||
| [options.x1] | <code>number</code> | <code>2.0</code> | threshold between "flat" and "jagged", between 0 and 1000000 |
|
||||
| [options.y2] | <code>number</code> | <code>10.0</code> | maximum amount of brightening, between 0 and 1000000 |
|
||||
| [options.y3] | <code>number</code> | <code>20.0</code> | maximum amount of darkening, between 0 and 1000000 |
|
||||
| [flat] | <code>number</code> | | (deprecated) see `options.m1`. |
|
||||
| [jagged] | <code>number</code> | | (deprecated) see `options.m2`. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
const data = await sharp(input).sharpen().toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const data = await sharp(input).sharpen({ sigma: 2 }).toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const data = await sharp(input)
|
||||
.sharpen({
|
||||
sigma: 2,
|
||||
@@ -193,87 +189,83 @@ const data = await sharp(input)
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## median
|
||||
|
||||
Apply median filter.
|
||||
When used without parameters the default window is 3x3.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `size` **[number][1]** square mask size: size x size (optional, default `3`)
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [size] | <code>number</code> | <code>3</code> | square mask size: size x size |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input).median().toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input).median(5).toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## blur
|
||||
|
||||
Blur the image.
|
||||
|
||||
When used without parameters, performs a fast 3x3 box blur (equivalent to a box linear filter).
|
||||
|
||||
When a `sigma` is provided, performs a slower, more accurate Gaussian blur.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `sigma` **[number][1]?** a value between 0.3 and 1000 representing the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`.
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [sigma] | <code>number</code> | a value between 0.3 and 1000 representing the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
const boxBlurred = await sharp(input)
|
||||
.blur()
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const gaussianBlurred = await sharp(input)
|
||||
.blur(5)
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## flatten
|
||||
|
||||
Merge alpha transparency channel, if any, with a background, then remove the alpha channel.
|
||||
|
||||
See also [removeAlpha][9].
|
||||
See also [removeAlpha](/api-channel#removealpha).
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][2]?** 
|
||||
|
||||
* `options.background` **([string][3] | [Object][2])** background colour, parsed by the [color][4] module, defaults to black. (optional, default `{r:0,g:0,b:0}`)
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | |
|
||||
| [options.background] | <code>string</code> \| <code>Object</code> | <code>"{r: 0, g: 0, b: 0}"</code> | background colour, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black. |
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
await sharp(rgbaInput)
|
||||
.flatten({ background: '#F0A703' })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## gamma
|
||||
|
||||
Apply a gamma correction by reducing the encoding (darken) pre-resize at a factor of `1/gamma`
|
||||
then increasing the encoding (brighten) post-resize at a factor of `gamma`.
|
||||
This can improve the perceived brightness of a resized image in non-linear colour spaces.
|
||||
@@ -282,95 +274,114 @@ when applying a gamma correction.
|
||||
|
||||
Supply a second argument to use a different output gamma value, otherwise the first value is used in both cases.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `gamma` **[number][1]** value between 1.0 and 3.0. (optional, default `2.2`)
|
||||
* `gammaOut` **[number][1]?** value between 1.0 and 3.0. (optional, defaults to same as `gamma`)
|
||||
**Throws**:
|
||||
|
||||
<!---->
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [gamma] | <code>number</code> | <code>2.2</code> | value between 1.0 and 3.0. |
|
||||
| [gammaOut] | <code>number</code> | | value between 1.0 and 3.0. (optional, defaults to same as `gamma`) |
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## negate
|
||||
|
||||
Produce the "negative" of the image.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][2]?** 
|
||||
|
||||
* `options.alpha` **[Boolean][6]** Whether or not to negate any alpha channel (optional, default `true`)
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | |
|
||||
| [options.alpha] | <code>Boolean</code> | <code>true</code> | Whether or not to negate any alpha channel |
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input)
|
||||
.negate()
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input)
|
||||
.negate({ alpha: false })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## normalise
|
||||
Enhance output image contrast by stretching its luminance to cover a full dynamic range.
|
||||
|
||||
Enhance output image contrast by stretching its luminance to cover the full dynamic range.
|
||||
Uses a histogram-based approach, taking a default range of 1% to 99% to reduce sensitivity to noise at the extremes.
|
||||
|
||||
### Parameters
|
||||
Luminance values below the `lower` percentile will be underexposed by clipping to zero.
|
||||
Luminance values above the `upper` percentile will be overexposed by clipping to the max pixel value.
|
||||
|
||||
* `normalise` **[Boolean][6]** (optional, default `true`)
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
const output = await sharp(input).normalise().toBuffer();
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | |
|
||||
| [options.lower] | <code>number</code> | <code>1</code> | Percentile below which luminance values will be underexposed. |
|
||||
| [options.upper] | <code>number</code> | <code>99</code> | Percentile above which luminance values will be overexposed. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input)
|
||||
.normalise()
|
||||
.toBuffer();
|
||||
```
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input)
|
||||
.normalise({ lower: 0, upper: 100 })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## normalize
|
||||
|
||||
Alternative spelling of normalise.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `normalize` **[Boolean][6]** (optional, default `true`)
|
||||
|
||||
### Examples
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | |
|
||||
| [options.lower] | <code>number</code> | <code>1</code> | Percentile below which luminance values will be underexposed. |
|
||||
| [options.upper] | <code>number</code> | <code>99</code> | Percentile above which luminance values will be overexposed. |
|
||||
|
||||
```javascript
|
||||
const output = await sharp(input).normalize().toBuffer();
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input)
|
||||
.normalize()
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## clahe
|
||||
|
||||
Perform contrast limiting adaptive histogram equalization
|
||||
[CLAHE][10].
|
||||
[CLAHE](https://en.wikipedia.org/wiki/Adaptive_histogram_equalization#Contrast_Limited_AHE).
|
||||
|
||||
This will, in general, enhance the clarity of the image by bringing out darker details.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][2]** 
|
||||
**Throws**:
|
||||
|
||||
* `options.width` **[number][1]** integer width of the region in pixels.
|
||||
* `options.height` **[number][1]** integer height of the region in pixels.
|
||||
* `options.maxSlope` **[number][1]** maximum value for the slope of the
|
||||
cumulative histogram. A value of 0 disables contrast limiting. Valid values
|
||||
are integers in the range 0-100 (inclusive) (optional, default `3`)
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
### Examples
|
||||
**Since**: 0.28.3
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| options | <code>Object</code> | | |
|
||||
| options.width | <code>number</code> | | Integral width of the search window, in pixels. |
|
||||
| options.height | <code>number</code> | | Integral height of the search window, in pixels. |
|
||||
| [options.maxSlope] | <code>number</code> | <code>3</code> | Integral level of brightening, between 0 and 100, where 0 disables contrast limiting. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
const output = await sharp(input)
|
||||
.clahe({
|
||||
width: 3,
|
||||
@@ -379,31 +390,27 @@ const output = await sharp(input)
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.28.3
|
||||
|
||||
## convolve
|
||||
|
||||
Convolve the image with the specified kernel.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `kernel` **[Object][2]** 
|
||||
**Throws**:
|
||||
|
||||
* `kernel.width` **[number][1]** width of the kernel in pixels.
|
||||
* `kernel.height` **[number][1]** height of the kernel in pixels.
|
||||
* `kernel.kernel` **[Array][7]<[number][1]>** Array of length `width*height` containing the kernel values.
|
||||
* `kernel.scale` **[number][1]** the scale of the kernel in pixels. (optional, default `sum`)
|
||||
* `kernel.offset` **[number][1]** the offset of the kernel in pixels. (optional, default `0`)
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| kernel | <code>Object</code> | | |
|
||||
| kernel.width | <code>number</code> | | width of the kernel in pixels. |
|
||||
| kernel.height | <code>number</code> | | height of the kernel in pixels. |
|
||||
| kernel.kernel | <code>Array.<number></code> | | Array of length `width*height` containing the kernel values. |
|
||||
| [kernel.scale] | <code>number</code> | <code>sum</code> | the scale of the kernel in pixels. |
|
||||
| [kernel.offset] | <code>number</code> | <code>0</code> | the offset of the kernel in pixels. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.convolve({
|
||||
width: 3,
|
||||
@@ -417,74 +424,74 @@ sharp(input)
|
||||
});
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## threshold
|
||||
|
||||
Any pixel value greater than or equal to the threshold value will be set to 255, otherwise it will be set to 0.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `threshold` **[number][1]** a value in the range 0-255 representing the level at which the threshold will be applied. (optional, default `128`)
|
||||
* `options` **[Object][2]?** 
|
||||
**Throws**:
|
||||
|
||||
* `options.greyscale` **[Boolean][6]** convert to single channel greyscale. (optional, default `true`)
|
||||
* `options.grayscale` **[Boolean][6]** alternative spelling for greyscale. (optional, default `true`)
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
<!---->
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [threshold] | <code>number</code> | <code>128</code> | a value in the range 0-255 representing the level at which the threshold will be applied. |
|
||||
| [options] | <code>Object</code> | | |
|
||||
| [options.greyscale] | <code>Boolean</code> | <code>true</code> | convert to single channel greyscale. |
|
||||
| [options.grayscale] | <code>Boolean</code> | <code>true</code> | alternative spelling for greyscale. |
|
||||
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## boolean
|
||||
|
||||
Perform a bitwise boolean operation with operand image.
|
||||
|
||||
This operation creates an output image where each pixel is the result of
|
||||
the selected bitwise boolean `operation` between the corresponding pixels of the input images.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `operand` **([Buffer][11] | [string][3])** Buffer containing image data or string containing the path to an image file.
|
||||
* `operator` **[string][3]** one of `and`, `or` or `eor` to perform that bitwise operation, like the C logic operators `&`, `|` and `^` respectively.
|
||||
* `options` **[Object][2]?** 
|
||||
**Throws**:
|
||||
|
||||
* `options.raw` **[Object][2]?** describes operand when using raw pixel data.
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
* `options.raw.width` **[number][1]?** 
|
||||
* `options.raw.height` **[number][1]?** 
|
||||
* `options.raw.channels` **[number][1]?** 
|
||||
|
||||
<!---->
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| operand | <code>Buffer</code> \| <code>string</code> | Buffer containing image data or string containing the path to an image file. |
|
||||
| operator | <code>string</code> | one of `and`, `or` or `eor` to perform that bitwise operation, like the C logic operators `&`, `|` and `^` respectively. |
|
||||
| [options] | <code>Object</code> | |
|
||||
| [options.raw] | <code>Object</code> | describes operand when using raw pixel data. |
|
||||
| [options.raw.width] | <code>number</code> | |
|
||||
| [options.raw.height] | <code>number</code> | |
|
||||
| [options.raw.channels] | <code>number</code> | |
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## linear
|
||||
|
||||
Apply the linear formula `a` \* input + `b` to the image to adjust image levels.
|
||||
Apply the linear formula `a` * input + `b` to the image to adjust image levels.
|
||||
|
||||
When a single number is provided, it will be used for all image channels.
|
||||
When an array of numbers is provided, the array length must match the number of channels.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `a` **([number][1] | [Array][7]<[number][1]>)** multiplier (optional, default `[]`)
|
||||
* `b` **([number][1] | [Array][7]<[number][1]>)** offset (optional, default `[]`)
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [a] | <code>number</code> \| <code>Array.<number></code> | <code>[]</code> | multiplier |
|
||||
| [b] | <code>number</code> \| <code>Array.<number></code> | <code>[]</code> | offset |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
await sharp(input)
|
||||
.linear(0.5, 2)
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
await sharp(rgbInput)
|
||||
.linear(
|
||||
[0.25, 0.5, 0.75],
|
||||
@@ -493,21 +500,23 @@ await sharp(rgbInput)
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## recomb
|
||||
|
||||
Recomb the image with the specified matrix.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `inputMatrix` **[Array][7]<[Array][7]<[number][1]>>** 3x3 Recombination matrix
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
```javascript
|
||||
**Since**: 0.21.1
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| inputMatrix | <code>Array.<Array.<number>></code> | 3x3 Recombination matrix |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.recomb([
|
||||
[0.3588, 0.7044, 0.1368],
|
||||
@@ -521,32 +530,25 @@ sharp(input)
|
||||
});
|
||||
```
|
||||
|
||||
* Throws **[Error][5]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.21.1
|
||||
|
||||
## modulate
|
||||
|
||||
Transforms the image using brightness, saturation, hue rotation, and lightness.
|
||||
Brightness and lightness both operate on luminance, with the difference being that
|
||||
brightness is multiplicative whereas lightness is additive.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][2]?** 
|
||||
**Since**: 0.22.1
|
||||
|
||||
* `options.brightness` **[number][1]?** Brightness multiplier
|
||||
* `options.saturation` **[number][1]?** Saturation multiplier
|
||||
* `options.hue` **[number][1]?** Degrees for hue rotation
|
||||
* `options.lightness` **[number][1]?** Lightness addend
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [options] | <code>Object</code> | |
|
||||
| [options.brightness] | <code>number</code> | Brightness multiplier |
|
||||
| [options.saturation] | <code>number</code> | Saturation multiplier |
|
||||
| [options.hue] | <code>number</code> | Degrees for hue rotation |
|
||||
| [options.lightness] | <code>number</code> | Lightness addend |
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// increase brightness by a factor of 2
|
||||
const output = await sharp(input)
|
||||
.modulate({
|
||||
@@ -554,8 +556,8 @@ const output = await sharp(input)
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// hue-rotate by 180 degrees
|
||||
const output = await sharp(input)
|
||||
.modulate({
|
||||
@@ -563,8 +565,8 @@ const output = await sharp(input)
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// increase lightness by +50
|
||||
const output = await sharp(input)
|
||||
.modulate({
|
||||
@@ -572,8 +574,8 @@ const output = await sharp(input)
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// decreate brightness and saturation while also hue-rotating by 90 degrees
|
||||
const output = await sharp(input)
|
||||
.modulate({
|
||||
@@ -582,32 +584,4 @@ const output = await sharp(input)
|
||||
hue: 90,
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.22.1
|
||||
|
||||
[1]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
|
||||
|
||||
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
|
||||
|
||||
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
|
||||
[4]: https://www.npmjs.org/package/color
|
||||
|
||||
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
|
||||
|
||||
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
|
||||
|
||||
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array
|
||||
|
||||
[8]: https://www.libvips.org/API/current/libvips-convolution.html#vips-sharpen
|
||||
|
||||
[9]: /api-channel#removealpha
|
||||
|
||||
[10]: https://en.wikipedia.org/wiki/Adaptive_histogram_equalization#Contrast_Limited_AHE
|
||||
|
||||
[11]: https://nodejs.org/api/buffer.html
|
||||
```
|
||||
@@ -1,7 +1,4 @@
|
||||
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
|
||||
|
||||
## toFile
|
||||
|
||||
Write output image data to a file.
|
||||
|
||||
If an explicit output format is not selected, it will be inferred from the extension,
|
||||
@@ -9,92 +6,90 @@ with JPEG, PNG, WebP, AVIF, TIFF, GIF, DZI, and libvips' V format supported.
|
||||
Note that raw pixel data is only supported for buffer output.
|
||||
|
||||
By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
See [withMetadata][1] for control over this.
|
||||
See [withMetadata](#withMetadata) for control over this.
|
||||
|
||||
The caller is responsible for ensuring directory structures and permissions exist.
|
||||
|
||||
A `Promise` is returned when `callback` is not provided.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `fileOut` **[string][2]** the path to write the image data to.
|
||||
* `callback` **[Function][3]?** called on completion with two arguments `(err, info)`.
|
||||
`info` contains the output image `format`, `size` (bytes), `width`, `height`,
|
||||
`channels` and `premultiplied` (indicating if premultiplication was used).
|
||||
When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
|
||||
May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text.
|
||||
**Returns**: <code>Promise.<Object></code> - - when no callback is provided
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| fileOut | <code>string</code> | the path to write the image data to. |
|
||||
| [callback] | <code>function</code> | called on completion with two arguments `(err, info)`. `info` contains the output image `format`, `size` (bytes), `width`, `height`, `channels` and `premultiplied` (indicating if premultiplication was used). When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`. When using the attention crop strategy also contains `attentionX` and `attentionY`, the focal point of the cropped region. May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.toFile('output.png', (err, info) => { ... });
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.toFile('output.png')
|
||||
.then(info => { ... })
|
||||
.catch(err => { ... });
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid parameters
|
||||
|
||||
Returns **[Promise][5]<[Object][6]>** when no callback is provided
|
||||
|
||||
## toBuffer
|
||||
|
||||
Write output to a Buffer.
|
||||
JPEG, PNG, WebP, AVIF, TIFF, GIF and raw pixel data output are supported.
|
||||
|
||||
Use [toFormat][7] or one of the format-specific functions such as [jpeg][8], [png][9] etc. to set the output format.
|
||||
Use [toFormat](#toFormat) or one of the format-specific functions such as [jpeg](#jpeg), [png](#png) etc. to set the output format.
|
||||
|
||||
If no explicit format is set, the output format will match the input image, except SVG input which becomes PNG output.
|
||||
|
||||
By default all metadata will be removed, which includes EXIF-based orientation.
|
||||
See [withMetadata][1] for control over this.
|
||||
See [withMetadata](#withMetadata) for control over this.
|
||||
|
||||
`callback`, if present, gets three arguments `(err, data, info)` where:
|
||||
|
||||
* `err` is an error, if any.
|
||||
* `data` is the output image data.
|
||||
* `info` contains the output image `format`, `size` (bytes), `width`, `height`,
|
||||
`channels` and `premultiplied` (indicating if premultiplication was used).
|
||||
When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
|
||||
May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text.
|
||||
- `err` is an error, if any.
|
||||
- `data` is the output image data.
|
||||
- `info` contains the output image `format`, `size` (bytes), `width`, `height`,
|
||||
`channels` and `premultiplied` (indicating if premultiplication was used).
|
||||
When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
|
||||
May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text.
|
||||
|
||||
A `Promise` is returned when `callback` is not provided.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** 
|
||||
**Returns**: <code>Promise.<Buffer></code> - - when no callback is provided
|
||||
|
||||
* `options.resolveWithObject` **[boolean][10]?** Resolve the Promise with an Object containing `data` and `info` properties instead of resolving only with `data`.
|
||||
* `callback` **[Function][3]?** 
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [options] | <code>Object</code> | |
|
||||
| [options.resolveWithObject] | <code>boolean</code> | Resolve the Promise with an Object containing `data` and `info` properties instead of resolving only with `data`. |
|
||||
| [callback] | <code>function</code> | |
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.toBuffer((err, data, info) => { ... });
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.toBuffer()
|
||||
.then(data => { ... })
|
||||
.catch(err => { ... });
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.png()
|
||||
.toBuffer({ resolveWithObject: true })
|
||||
.then(({ data, info }) => { ... })
|
||||
.catch(err => { ... });
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const { data, info } = await sharp('my-image.jpg')
|
||||
// output the raw pixels
|
||||
.raw()
|
||||
@@ -111,10 +106,8 @@ await sharp(pixelArray, { raw: { width, height, channels } })
|
||||
.toFile('my-changed-image.jpg');
|
||||
```
|
||||
|
||||
Returns **[Promise][5]<[Buffer][11]>** when no callback is provided
|
||||
|
||||
## withMetadata
|
||||
|
||||
Include all metadata (EXIF, XMP, IPTC) from the input image in the output image.
|
||||
This will also convert to and add a web-friendly sRGB ICC profile unless a custom
|
||||
output profile is provided.
|
||||
@@ -124,25 +117,29 @@ sRGB colour space and strip all metadata, including the removal of any ICC profi
|
||||
|
||||
EXIF metadata is unsupported for TIFF output.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** 
|
||||
**Throws**:
|
||||
|
||||
* `options.orientation` **[number][12]?** value between 1 and 8, used to update the EXIF `Orientation` tag.
|
||||
* `options.icc` **[string][2]?** filesystem path to output ICC profile, defaults to sRGB.
|
||||
* `options.exif` **[Object][6]<[Object][6]>** Object keyed by IFD0, IFD1 etc. of key/value string pairs to write as EXIF data. (optional, default `{}`)
|
||||
* `options.density` **[number][12]?** Number of pixels per inch (DPI).
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | |
|
||||
| [options.orientation] | <code>number</code> | | value between 1 and 8, used to update the EXIF `Orientation` tag. |
|
||||
| [options.icc] | <code>string</code> | <code>"'srgb'"</code> | Filesystem path to output ICC profile, relative to `process.cwd()`, defaults to built-in sRGB. |
|
||||
| [options.exif] | <code>Object.<Object></code> | <code>{}</code> | Object keyed by IFD0, IFD1 etc. of key/value string pairs to write as EXIF data. |
|
||||
| [options.density] | <code>number</code> | | Number of pixels per inch (DPI). |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
sharp('input.jpg')
|
||||
.withMetadata()
|
||||
.toFile('output-with-metadata.jpg')
|
||||
.then(info => { ... });
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Set "IFD0-Copyright" in output EXIF metadata
|
||||
const data = await sharp(input)
|
||||
.withMetadata({
|
||||
@@ -154,65 +151,66 @@ const data = await sharp(input)
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Set output metadata to 96 DPI
|
||||
const data = await sharp(input)
|
||||
.withMetadata({ density: 96 })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## toFormat
|
||||
|
||||
Force output to a given format.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `format` **([string][2] | [Object][6])** as a string or an Object with an 'id' attribute
|
||||
* `options` **[Object][6]** output options
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> unsupported format or options
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| format | <code>string</code> \| <code>Object</code> | as a string or an Object with an 'id' attribute |
|
||||
| options | <code>Object</code> | output options |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Convert any input to PNG output
|
||||
const data = await sharp(input)
|
||||
.toFormat('png')
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** unsupported format or options
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## jpeg
|
||||
|
||||
Use these JPEG options for output image.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
**Throws**:
|
||||
|
||||
* `options.quality` **[number][12]** quality, integer 1-100 (optional, default `80`)
|
||||
* `options.progressive` **[boolean][10]** use progressive (interlace) scan (optional, default `false`)
|
||||
* `options.chromaSubsampling` **[string][2]** set to '4:4:4' to prevent chroma subsampling otherwise defaults to '4:2:0' chroma subsampling (optional, default `'4:2:0'`)
|
||||
* `options.optimiseCoding` **[boolean][10]** optimise Huffman coding tables (optional, default `true`)
|
||||
* `options.optimizeCoding` **[boolean][10]** alternative spelling of optimiseCoding (optional, default `true`)
|
||||
* `options.mozjpeg` **[boolean][10]** use mozjpeg defaults, equivalent to `{ trellisQuantisation: true, overshootDeringing: true, optimiseScans: true, quantisationTable: 3 }` (optional, default `false`)
|
||||
* `options.trellisQuantisation` **[boolean][10]** apply trellis quantisation (optional, default `false`)
|
||||
* `options.overshootDeringing` **[boolean][10]** apply overshoot deringing (optional, default `false`)
|
||||
* `options.optimiseScans` **[boolean][10]** optimise progressive scans, forces progressive (optional, default `false`)
|
||||
* `options.optimizeScans` **[boolean][10]** alternative spelling of optimiseScans (optional, default `false`)
|
||||
* `options.quantisationTable` **[number][12]** quantization table to use, integer 0-8 (optional, default `0`)
|
||||
* `options.quantizationTable` **[number][12]** alternative spelling of quantisationTable (optional, default `0`)
|
||||
* `options.force` **[boolean][10]** force JPEG output, otherwise attempt to use input format (optional, default `true`)
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | output options |
|
||||
| [options.quality] | <code>number</code> | <code>80</code> | quality, integer 1-100 |
|
||||
| [options.progressive] | <code>boolean</code> | <code>false</code> | use progressive (interlace) scan |
|
||||
| [options.chromaSubsampling] | <code>string</code> | <code>"'4:2:0'"</code> | set to '4:4:4' to prevent chroma subsampling otherwise defaults to '4:2:0' chroma subsampling |
|
||||
| [options.optimiseCoding] | <code>boolean</code> | <code>true</code> | optimise Huffman coding tables |
|
||||
| [options.optimizeCoding] | <code>boolean</code> | <code>true</code> | alternative spelling of optimiseCoding |
|
||||
| [options.mozjpeg] | <code>boolean</code> | <code>false</code> | use mozjpeg defaults, equivalent to `{ trellisQuantisation: true, overshootDeringing: true, optimiseScans: true, quantisationTable: 3 }` |
|
||||
| [options.trellisQuantisation] | <code>boolean</code> | <code>false</code> | apply trellis quantisation |
|
||||
| [options.overshootDeringing] | <code>boolean</code> | <code>false</code> | apply overshoot deringing |
|
||||
| [options.optimiseScans] | <code>boolean</code> | <code>false</code> | optimise progressive scans, forces progressive |
|
||||
| [options.optimizeScans] | <code>boolean</code> | <code>false</code> | alternative spelling of optimiseScans |
|
||||
| [options.quantisationTable] | <code>number</code> | <code>0</code> | quantization table to use, integer 0-8 |
|
||||
| [options.quantizationTable] | <code>number</code> | <code>0</code> | alternative spelling of quantisationTable |
|
||||
| [options.force] | <code>boolean</code> | <code>true</code> | force JPEG output, otherwise attempt to use input format |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Convert any input to very high quality JPEG output
|
||||
const data = await sharp(input)
|
||||
.jpeg({
|
||||
@@ -221,225 +219,186 @@ const data = await sharp(input)
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Use mozjpeg to reduce output JPEG file size (slower)
|
||||
const data = await sharp(input)
|
||||
.jpeg({ mozjpeg: true })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## png
|
||||
|
||||
Use these PNG options for output image.
|
||||
|
||||
By default, PNG output is full colour at 8 or 16 bits per pixel.
|
||||
Indexed PNG input at 1, 2 or 4 bits per pixel is converted to 8 bits per pixel.
|
||||
Set `palette` to `true` for slower, indexed PNG output.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** 
|
||||
**Throws**:
|
||||
|
||||
* `options.progressive` **[boolean][10]** use progressive (interlace) scan (optional, default `false`)
|
||||
* `options.compressionLevel` **[number][12]** zlib compression level, 0 (fastest, largest) to 9 (slowest, smallest) (optional, default `6`)
|
||||
* `options.adaptiveFiltering` **[boolean][10]** use adaptive row filtering (optional, default `false`)
|
||||
* `options.palette` **[boolean][10]** quantise to a palette-based image with alpha transparency support (optional, default `false`)
|
||||
* `options.quality` **[number][12]** use the lowest number of colours needed to achieve given quality, sets `palette` to `true` (optional, default `100`)
|
||||
* `options.effort` **[number][12]** CPU effort, between 1 (fastest) and 10 (slowest), sets `palette` to `true` (optional, default `7`)
|
||||
* `options.colours` **[number][12]** maximum number of palette entries, sets `palette` to `true` (optional, default `256`)
|
||||
* `options.colors` **[number][12]** alternative spelling of `options.colours`, sets `palette` to `true` (optional, default `256`)
|
||||
* `options.dither` **[number][12]** level of Floyd-Steinberg error diffusion, sets `palette` to `true` (optional, default `1.0`)
|
||||
* `options.force` **[boolean][10]** force PNG output, otherwise attempt to use input format (optional, default `true`)
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | |
|
||||
| [options.progressive] | <code>boolean</code> | <code>false</code> | use progressive (interlace) scan |
|
||||
| [options.compressionLevel] | <code>number</code> | <code>6</code> | zlib compression level, 0 (fastest, largest) to 9 (slowest, smallest) |
|
||||
| [options.adaptiveFiltering] | <code>boolean</code> | <code>false</code> | use adaptive row filtering |
|
||||
| [options.palette] | <code>boolean</code> | <code>false</code> | quantise to a palette-based image with alpha transparency support |
|
||||
| [options.quality] | <code>number</code> | <code>100</code> | use the lowest number of colours needed to achieve given quality, sets `palette` to `true` |
|
||||
| [options.effort] | <code>number</code> | <code>7</code> | CPU effort, between 1 (fastest) and 10 (slowest), sets `palette` to `true` |
|
||||
| [options.colours] | <code>number</code> | <code>256</code> | maximum number of palette entries, sets `palette` to `true` |
|
||||
| [options.colors] | <code>number</code> | <code>256</code> | alternative spelling of `options.colours`, sets `palette` to `true` |
|
||||
| [options.dither] | <code>number</code> | <code>1.0</code> | level of Floyd-Steinberg error diffusion, sets `palette` to `true` |
|
||||
| [options.force] | <code>boolean</code> | <code>true</code> | force PNG output, otherwise attempt to use input format |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Convert any input to full colour PNG output
|
||||
const data = await sharp(input)
|
||||
.png()
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Convert any input to indexed PNG output (slower)
|
||||
const data = await sharp(input)
|
||||
.png({ palette: true })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## webp
|
||||
|
||||
Use these WebP options for output image.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
**Throws**:
|
||||
|
||||
* `options.quality` **[number][12]** quality, integer 1-100 (optional, default `80`)
|
||||
* `options.alphaQuality` **[number][12]** quality of alpha layer, integer 0-100 (optional, default `100`)
|
||||
* `options.lossless` **[boolean][10]** use lossless compression mode (optional, default `false`)
|
||||
* `options.nearLossless` **[boolean][10]** use near\_lossless compression mode (optional, default `false`)
|
||||
* `options.smartSubsample` **[boolean][10]** use high quality chroma subsampling (optional, default `false`)
|
||||
* `options.effort` **[number][12]** CPU effort, between 0 (fastest) and 6 (slowest) (optional, default `4`)
|
||||
* `options.loop` **[number][12]** number of animation iterations, use 0 for infinite animation (optional, default `0`)
|
||||
* `options.delay` **([number][12] | [Array][13]<[number][12]>)?** delay(s) between animation frames (in milliseconds)
|
||||
* `options.minSize` **[boolean][10]** prevent use of animation key frames to minimise file size (slow) (optional, default `false`)
|
||||
* `options.mixed` **[boolean][10]** allow mixture of lossy and lossless animation frames (slow) (optional, default `false`)
|
||||
* `options.force` **[boolean][10]** force WebP output, otherwise attempt to use input format (optional, default `true`)
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | output options |
|
||||
| [options.quality] | <code>number</code> | <code>80</code> | quality, integer 1-100 |
|
||||
| [options.alphaQuality] | <code>number</code> | <code>100</code> | quality of alpha layer, integer 0-100 |
|
||||
| [options.lossless] | <code>boolean</code> | <code>false</code> | use lossless compression mode |
|
||||
| [options.nearLossless] | <code>boolean</code> | <code>false</code> | use near_lossless compression mode |
|
||||
| [options.smartSubsample] | <code>boolean</code> | <code>false</code> | use high quality chroma subsampling |
|
||||
| [options.effort] | <code>number</code> | <code>4</code> | CPU effort, between 0 (fastest) and 6 (slowest) |
|
||||
| [options.loop] | <code>number</code> | <code>0</code> | number of animation iterations, use 0 for infinite animation |
|
||||
| [options.delay] | <code>number</code> \| <code>Array.<number></code> | | delay(s) between animation frames (in milliseconds) |
|
||||
| [options.minSize] | <code>boolean</code> | <code>false</code> | prevent use of animation key frames to minimise file size (slow) |
|
||||
| [options.mixed] | <code>boolean</code> | <code>false</code> | allow mixture of lossy and lossless animation frames (slow) |
|
||||
| [options.force] | <code>boolean</code> | <code>true</code> | force WebP output, otherwise attempt to use input format |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Convert any input to lossless WebP output
|
||||
const data = await sharp(input)
|
||||
.webp({ lossless: true })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Optimise the file size of an animated WebP
|
||||
const outputWebp = await sharp(inputWebp, { animated: true })
|
||||
.webp({ effort: 6 })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## gif
|
||||
|
||||
Use these GIF options for the output image.
|
||||
|
||||
The first entry in the palette is reserved for transparency.
|
||||
|
||||
The palette of the input image will be re-used if possible.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
**Throws**:
|
||||
|
||||
* `options.reoptimise` **[boolean][10]** always generate new palettes (slow), re-use existing by default (optional, default `false`)
|
||||
* `options.reoptimize` **[boolean][10]** alternative spelling of `options.reoptimise` (optional, default `false`)
|
||||
* `options.colours` **[number][12]** maximum number of palette entries, including transparency, between 2 and 256 (optional, default `256`)
|
||||
* `options.colors` **[number][12]** alternative spelling of `options.colours` (optional, default `256`)
|
||||
* `options.effort` **[number][12]** CPU effort, between 1 (fastest) and 10 (slowest) (optional, default `7`)
|
||||
* `options.dither` **[number][12]** level of Floyd-Steinberg error diffusion, between 0 (least) and 1 (most) (optional, default `1.0`)
|
||||
* `options.loop` **[number][12]** number of animation iterations, use 0 for infinite animation (optional, default `0`)
|
||||
* `options.delay` **([number][12] | [Array][13]<[number][12]>)?** delay(s) between animation frames (in milliseconds)
|
||||
* `options.force` **[boolean][10]** force GIF output, otherwise attempt to use input format (optional, default `true`)
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
### Examples
|
||||
**Since**: 0.30.0
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | output options |
|
||||
| [options.reuse] | <code>boolean</code> | <code>true</code> | re-use existing palette, otherwise generate new (slow) |
|
||||
| [options.progressive] | <code>boolean</code> | <code>false</code> | use progressive (interlace) scan |
|
||||
| [options.colours] | <code>number</code> | <code>256</code> | maximum number of palette entries, including transparency, between 2 and 256 |
|
||||
| [options.colors] | <code>number</code> | <code>256</code> | alternative spelling of `options.colours` |
|
||||
| [options.effort] | <code>number</code> | <code>7</code> | CPU effort, between 1 (fastest) and 10 (slowest) |
|
||||
| [options.dither] | <code>number</code> | <code>1.0</code> | level of Floyd-Steinberg error diffusion, between 0 (least) and 1 (most) |
|
||||
| [options.interFrameMaxError] | <code>number</code> | <code>0</code> | maximum inter-frame error for transparency, between 0 (lossless) and 32 |
|
||||
| [options.interPaletteMaxError] | <code>number</code> | <code>3</code> | maximum inter-palette error for palette reuse, between 0 and 256 |
|
||||
| [options.loop] | <code>number</code> | <code>0</code> | number of animation iterations, use 0 for infinite animation |
|
||||
| [options.delay] | <code>number</code> \| <code>Array.<number></code> | | delay(s) between animation frames (in milliseconds) |
|
||||
| [options.force] | <code>boolean</code> | <code>true</code> | force GIF output, otherwise attempt to use input format |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Convert PNG to GIF
|
||||
await sharp(pngBuffer)
|
||||
.gif()
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Convert animated WebP to animated GIF
|
||||
await sharp('animated.webp', { animated: true })
|
||||
.toFile('animated.gif');
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Create a 128x128, cropped, non-dithered, animated thumbnail of an animated GIF
|
||||
const out = await sharp('in.gif', { animated: true })
|
||||
.resize({ width: 128, height: 128 })
|
||||
.gif({ dither: 0 })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.30.0
|
||||
|
||||
## jp2
|
||||
|
||||
Use these JP2 options for output image.
|
||||
|
||||
Requires libvips compiled with support for OpenJPEG.
|
||||
The prebuilt binaries do not include this - see
|
||||
[installing a custom libvips][14].
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
|
||||
* `options.quality` **[number][12]** quality, integer 1-100 (optional, default `80`)
|
||||
* `options.lossless` **[boolean][10]** use lossless compression mode (optional, default `false`)
|
||||
* `options.tileWidth` **[number][12]** horizontal tile size (optional, default `512`)
|
||||
* `options.tileHeight` **[number][12]** vertical tile size (optional, default `512`)
|
||||
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
// Convert any input to lossless JP2 output
|
||||
const data = await sharp(input)
|
||||
.jp2({ lossless: true })
|
||||
.toBuffer();
|
||||
**Example**
|
||||
```js
|
||||
// Lossy file size reduction of animated GIF
|
||||
await sharp('in.gif', { animated: true })
|
||||
.gif({ interFrameMaxError: 8 })
|
||||
.toFile('optim.gif');
|
||||
```
|
||||
|
||||
```javascript
|
||||
// Convert any input to very high quality JP2 output
|
||||
const data = await sharp(input)
|
||||
.jp2({
|
||||
quality: 100,
|
||||
chromaSubsampling: '4:4:4'
|
||||
})
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.29.1
|
||||
|
||||
## tiff
|
||||
|
||||
Use these TIFF options for output image.
|
||||
|
||||
The `density` can be set in pixels/inch via [withMetadata][1] instead of providing `xres` and `yres` in pixels/mm.
|
||||
The `density` can be set in pixels/inch via [withMetadata](#withMetadata) instead of providing `xres` and `yres` in pixels/mm.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
**Throws**:
|
||||
|
||||
* `options.quality` **[number][12]** quality, integer 1-100 (optional, default `80`)
|
||||
* `options.force` **[boolean][10]** force TIFF output, otherwise attempt to use input format (optional, default `true`)
|
||||
* `options.compression` **[string][2]** compression options: none, jpeg, deflate, packbits, ccittfax4, lzw, webp, zstd, jp2k (optional, default `'jpeg'`)
|
||||
* `options.predictor` **[string][2]** compression predictor options: none, horizontal, float (optional, default `'horizontal'`)
|
||||
* `options.pyramid` **[boolean][10]** write an image pyramid (optional, default `false`)
|
||||
* `options.tile` **[boolean][10]** write a tiled tiff (optional, default `false`)
|
||||
* `options.tileWidth` **[number][12]** horizontal tile size (optional, default `256`)
|
||||
* `options.tileHeight` **[number][12]** vertical tile size (optional, default `256`)
|
||||
* `options.xres` **[number][12]** horizontal resolution in pixels/mm (optional, default `1.0`)
|
||||
* `options.yres` **[number][12]** vertical resolution in pixels/mm (optional, default `1.0`)
|
||||
* `options.resolutionUnit` **[string][2]** resolution unit options: inch, cm (optional, default `'inch'`)
|
||||
* `options.bitdepth` **[number][12]** reduce bitdepth to 1, 2 or 4 bit (optional, default `8`)
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | output options |
|
||||
| [options.quality] | <code>number</code> | <code>80</code> | quality, integer 1-100 |
|
||||
| [options.force] | <code>boolean</code> | <code>true</code> | force TIFF output, otherwise attempt to use input format |
|
||||
| [options.compression] | <code>string</code> | <code>"'jpeg'"</code> | compression options: none, jpeg, deflate, packbits, ccittfax4, lzw, webp, zstd, jp2k |
|
||||
| [options.predictor] | <code>string</code> | <code>"'horizontal'"</code> | compression predictor options: none, horizontal, float |
|
||||
| [options.pyramid] | <code>boolean</code> | <code>false</code> | write an image pyramid |
|
||||
| [options.tile] | <code>boolean</code> | <code>false</code> | write a tiled tiff |
|
||||
| [options.tileWidth] | <code>number</code> | <code>256</code> | horizontal tile size |
|
||||
| [options.tileHeight] | <code>number</code> | <code>256</code> | vertical tile size |
|
||||
| [options.xres] | <code>number</code> | <code>1.0</code> | horizontal resolution in pixels/mm |
|
||||
| [options.yres] | <code>number</code> | <code>1.0</code> | vertical resolution in pixels/mm |
|
||||
| [options.resolutionUnit] | <code>string</code> | <code>"'inch'"</code> | resolution unit options: inch, cm |
|
||||
| [options.bitdepth] | <code>number</code> | <code>8</code> | reduce bitdepth to 1, 2 or 4 bit |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Convert SVG input to LZW-compressed, 1 bit per pixel TIFF output
|
||||
sharp('input.svg')
|
||||
.tiff({
|
||||
@@ -450,12 +409,8 @@ sharp('input.svg')
|
||||
.then(info => { ... });
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## avif
|
||||
|
||||
Use these AVIF options for output image.
|
||||
|
||||
Whilst it is possible to create AVIF images smaller than 16x16 pixels,
|
||||
@@ -463,92 +418,119 @@ most web browsers do not display these properly.
|
||||
|
||||
AVIF image sequences are not supported.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
**Throws**:
|
||||
|
||||
* `options.quality` **[number][12]** quality, integer 1-100 (optional, default `50`)
|
||||
* `options.lossless` **[boolean][10]** use lossless compression (optional, default `false`)
|
||||
* `options.effort` **[number][12]** CPU effort, between 0 (fastest) and 9 (slowest) (optional, default `4`)
|
||||
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
### Examples
|
||||
**Since**: 0.27.0
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | output options |
|
||||
| [options.quality] | <code>number</code> | <code>50</code> | quality, integer 1-100 |
|
||||
| [options.lossless] | <code>boolean</code> | <code>false</code> | use lossless compression |
|
||||
| [options.effort] | <code>number</code> | <code>4</code> | CPU effort, between 0 (fastest) and 9 (slowest) |
|
||||
| [options.chromaSubsampling] | <code>string</code> | <code>"'4:4:4'"</code> | set to '4:2:0' to use chroma subsampling |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
const data = await sharp(input)
|
||||
.avif({ effort: 2 })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const data = await sharp(input)
|
||||
.avif({ lossless: true })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.27.0
|
||||
|
||||
## heif
|
||||
|
||||
Use these HEIF options for output image.
|
||||
|
||||
Support for patent-encumbered HEIC images using `hevc` compression requires the use of a
|
||||
globally-installed libvips compiled with support for libheif, libde265 and x265.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
**Throws**:
|
||||
|
||||
* `options.quality` **[number][12]** quality, integer 1-100 (optional, default `50`)
|
||||
* `options.compression` **[string][2]** compression format: av1, hevc (optional, default `'av1'`)
|
||||
* `options.lossless` **[boolean][10]** use lossless compression (optional, default `false`)
|
||||
* `options.effort` **[number][12]** CPU effort, between 0 (fastest) and 9 (slowest) (optional, default `4`)
|
||||
* `options.chromaSubsampling` **[string][2]** set to '4:2:0' to use chroma subsampling (optional, default `'4:4:4'`)
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
### Examples
|
||||
**Since**: 0.23.0
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | output options |
|
||||
| [options.quality] | <code>number</code> | <code>50</code> | quality, integer 1-100 |
|
||||
| [options.compression] | <code>string</code> | <code>"'av1'"</code> | compression format: av1, hevc |
|
||||
| [options.lossless] | <code>boolean</code> | <code>false</code> | use lossless compression |
|
||||
| [options.effort] | <code>number</code> | <code>4</code> | CPU effort, between 0 (fastest) and 9 (slowest) |
|
||||
| [options.chromaSubsampling] | <code>string</code> | <code>"'4:4:4'"</code> | set to '4:2:0' to use chroma subsampling |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
const data = await sharp(input)
|
||||
.heif({ compression: 'hevc' })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
Returns **Sharp** 
|
||||
## jxl
|
||||
Use these JPEG-XL (JXL) options for output image.
|
||||
|
||||
This feature is experimental, please do not use in production systems.
|
||||
|
||||
Requires libvips compiled with support for libjxl.
|
||||
The prebuilt binaries do not include this - see
|
||||
[installing a custom libvips](https://sharp.pixelplumbing.com/install#custom-libvips).
|
||||
|
||||
Image metadata (EXIF, XMP) is unsupported.
|
||||
|
||||
|
||||
**Throws**:
|
||||
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
**Since**: 0.31.3
|
||||
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | output options |
|
||||
| [options.distance] | <code>number</code> | <code>1.0</code> | maximum encoding error, between 0 (highest quality) and 15 (lowest quality) |
|
||||
| [options.quality] | <code>number</code> | | calculate `distance` based on JPEG-like quality, between 1 and 100, overrides distance if specified |
|
||||
| [options.decodingTier] | <code>number</code> | <code>0</code> | target decode speed tier, between 0 (highest quality) and 4 (lowest quality) |
|
||||
| [options.lossless] | <code>boolean</code> | <code>false</code> | use lossless compression |
|
||||
| [options.effort] | <code>number</code> | <code>7</code> | CPU effort, between 3 (fastest) and 9 (slowest) |
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.23.0
|
||||
|
||||
## raw
|
||||
|
||||
Force output to be raw, uncompressed pixel data.
|
||||
Pixel ordering is left-to-right, top-to-bottom, without padding.
|
||||
Channel ordering will be RGB or RGBA for non-greyscale colourspaces.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]?** output options
|
||||
**Throws**:
|
||||
|
||||
* `options.depth` **[string][2]** bit depth, one of: char, uchar (default), short, ushort, int, uint, float, complex, double, dpcomplex (optional, default `'uchar'`)
|
||||
- <code>Error</code> Invalid options
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | output options |
|
||||
| [options.depth] | <code>string</code> | <code>"'uchar'"</code> | bit depth, one of: char, uchar (default), short, ushort, int, uint, float, complex, double, dpcomplex |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Extract raw, unsigned 8-bit RGB pixel data from JPEG input
|
||||
const { data, info } = await sharp('input.jpg')
|
||||
.raw()
|
||||
.toBuffer({ resolveWithObject: true });
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Extract alpha channel as raw, unsigned 16-bit pixel data from PNG input
|
||||
const data = await sharp('input.png')
|
||||
.ensureAlpha()
|
||||
@@ -558,10 +540,8 @@ const data = await sharp('input.png')
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid options
|
||||
|
||||
## tile
|
||||
|
||||
Use tile-based deep zoom (image pyramid) output.
|
||||
|
||||
Set the format and options for tile images via the `toFormat`, `jpeg`, `png` or `webp` functions.
|
||||
@@ -569,26 +549,34 @@ Use a `.zip` or `.szi` file extension with `toFile` to write to a compressed arc
|
||||
|
||||
The container will be set to `zip` when the output is a Buffer or Stream, otherwise it will default to `fs`.
|
||||
|
||||
### Parameters
|
||||
Requires libvips compiled with support for libgsf.
|
||||
The prebuilt binaries do not include this - see
|
||||
[installing a custom libvips](https://sharp.pixelplumbing.com/install#custom-libvips).
|
||||
|
||||
* `options` **[Object][6]?** 
|
||||
|
||||
* `options.size` **[number][12]** tile size in pixels, a value between 1 and 8192. (optional, default `256`)
|
||||
* `options.overlap` **[number][12]** tile overlap in pixels, a value between 0 and 8192. (optional, default `0`)
|
||||
* `options.angle` **[number][12]** tile angle of rotation, must be a multiple of 90. (optional, default `0`)
|
||||
* `options.background` **([string][2] | [Object][6])** background colour, parsed by the [color][15] module, defaults to white without transparency. (optional, default `{r:255,g:255,b:255,alpha:1}`)
|
||||
* `options.depth` **[string][2]?** how deep to make the pyramid, possible values are `onepixel`, `onetile` or `one`, default based on layout.
|
||||
* `options.skipBlanks` **[number][12]** threshold to skip tile generation, a value 0 - 255 for 8-bit images or 0 - 65535 for 16-bit images (optional, default `-1`)
|
||||
* `options.container` **[string][2]** tile container, with value `fs` (filesystem) or `zip` (compressed file). (optional, default `'fs'`)
|
||||
* `options.layout` **[string][2]** filesystem layout, possible values are `dz`, `iiif`, `iiif3`, `zoomify` or `google`. (optional, default `'dz'`)
|
||||
* `options.centre` **[boolean][10]** centre image in tile. (optional, default `false`)
|
||||
* `options.center` **[boolean][10]** alternative spelling of centre. (optional, default `false`)
|
||||
* `options.id` **[string][2]** when `layout` is `iiif`/`iiif3`, sets the `@id`/`id` attribute of `info.json` (optional, default `'https://example.com/iiif'`)
|
||||
* `options.basename` **[string][2]?** the name of the directory within the zip file when container is `zip`.
|
||||
**Throws**:
|
||||
|
||||
### Examples
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
```javascript
|
||||
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> | | |
|
||||
| [options.size] | <code>number</code> | <code>256</code> | tile size in pixels, a value between 1 and 8192. |
|
||||
| [options.overlap] | <code>number</code> | <code>0</code> | tile overlap in pixels, a value between 0 and 8192. |
|
||||
| [options.angle] | <code>number</code> | <code>0</code> | tile angle of rotation, must be a multiple of 90. |
|
||||
| [options.background] | <code>string</code> \| <code>Object</code> | <code>"{r: 255, g: 255, b: 255, alpha: 1}"</code> | background colour, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to white without transparency. |
|
||||
| [options.depth] | <code>string</code> | | how deep to make the pyramid, possible values are `onepixel`, `onetile` or `one`, default based on layout. |
|
||||
| [options.skipBlanks] | <code>number</code> | <code>-1</code> | threshold to skip tile generation, a value 0 - 255 for 8-bit images or 0 - 65535 for 16-bit images |
|
||||
| [options.container] | <code>string</code> | <code>"'fs'"</code> | tile container, with value `fs` (filesystem) or `zip` (compressed file). |
|
||||
| [options.layout] | <code>string</code> | <code>"'dz'"</code> | filesystem layout, possible values are `dz`, `iiif`, `iiif3`, `zoomify` or `google`. |
|
||||
| [options.centre] | <code>boolean</code> | <code>false</code> | centre image in tile. |
|
||||
| [options.center] | <code>boolean</code> | <code>false</code> | alternative spelling of centre. |
|
||||
| [options.id] | <code>string</code> | <code>"'https://example.com/iiif'"</code> | when `layout` is `iiif`/`iiif3`, sets the `@id`/`id` attribute of `info.json` |
|
||||
| [options.basename] | <code>string</code> | | the name of the directory within the zip file when container is `zip`. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
sharp('input.tiff')
|
||||
.png()
|
||||
.tile({
|
||||
@@ -599,41 +587,38 @@ sharp('input.tiff')
|
||||
// output_files contains 512x512 tiles grouped by zoom level
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const zipFileWithTiles = await sharp(input)
|
||||
.tile({ basename: "tiles" })
|
||||
.toBuffer();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const iiififier = sharp().tile({ layout: "iiif" });
|
||||
readableStream
|
||||
.pipe(iiififier)
|
||||
.pipe(writeableStream);
|
||||
```
|
||||
|
||||
* Throws **[Error][4]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## timeout
|
||||
|
||||
Set a timeout for processing, in seconds.
|
||||
Use a value of zero to continue processing indefinitely, the default behaviour.
|
||||
|
||||
The clock starts when libvips opens an input image for processing.
|
||||
Time spent waiting for a libuv thread to become available is not included.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][6]** 
|
||||
**Since**: 0.29.2
|
||||
|
||||
* `options.seconds` **[number][12]** Number of seconds after which processing will be stopped
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| options | <code>Object</code> | |
|
||||
| options.seconds | <code>number</code> | Number of seconds after which processing will be stopped |
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Ensure processing takes no longer than 3 seconds
|
||||
try {
|
||||
const data = await sharp(input)
|
||||
@@ -643,40 +628,4 @@ try {
|
||||
} catch (err) {
|
||||
if (err.message.includes('timeout')) { ... }
|
||||
}
|
||||
```
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
**Meta**
|
||||
|
||||
* **since**: 0.29.2
|
||||
|
||||
[1]: #withmetadata
|
||||
|
||||
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
|
||||
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function
|
||||
|
||||
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
|
||||
|
||||
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Promise
|
||||
|
||||
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
|
||||
|
||||
[7]: #toformat
|
||||
|
||||
[8]: #jpeg
|
||||
|
||||
[9]: #png
|
||||
|
||||
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
|
||||
|
||||
[11]: https://nodejs.org/api/buffer.html
|
||||
|
||||
[12]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
|
||||
|
||||
[13]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array
|
||||
|
||||
[14]: https://sharp.pixelplumbing.com/install#custom-libvips
|
||||
|
||||
[15]: https://www.npmjs.org/package/color
|
||||
```
|
||||
@@ -1,63 +1,62 @@
|
||||
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
|
||||
|
||||
## resize
|
||||
|
||||
Resize image to `width`, `height` or `width x height`.
|
||||
|
||||
When both a `width` and `height` are provided, the possible methods by which the image should **fit** these are:
|
||||
- `cover`: (default) Preserving aspect ratio, attempt to ensure the image covers both provided dimensions by cropping/clipping to fit.
|
||||
- `contain`: Preserving aspect ratio, contain within both provided dimensions using "letterboxing" where necessary.
|
||||
- `fill`: Ignore the aspect ratio of the input and stretch to both provided dimensions.
|
||||
- `inside`: Preserving aspect ratio, resize the image to be as large as possible while ensuring its dimensions are less than or equal to both those specified.
|
||||
- `outside`: Preserving aspect ratio, resize the image to be as small as possible while ensuring its dimensions are greater than or equal to both those specified.
|
||||
|
||||
* `cover`: (default) Preserving aspect ratio, ensure the image covers both provided dimensions by cropping/clipping to fit.
|
||||
* `contain`: Preserving aspect ratio, contain within both provided dimensions using "letterboxing" where necessary.
|
||||
* `fill`: Ignore the aspect ratio of the input and stretch to both provided dimensions.
|
||||
* `inside`: Preserving aspect ratio, resize the image to be as large as possible while ensuring its dimensions are less than or equal to both those specified.
|
||||
* `outside`: Preserving aspect ratio, resize the image to be as small as possible while ensuring its dimensions are greater than or equal to both those specified.
|
||||
Some of these values are based on the [object-fit](https://developer.mozilla.org/en-US/docs/Web/CSS/object-fit) CSS property.
|
||||
|
||||
Some of these values are based on the [object-fit][1] CSS property.
|
||||
<img alt="Examples of various values for the fit property when resizing" width="100%" style="aspect-ratio: 998/243" src="https://cdn.jsdelivr.net/gh/lovell/sharp@main/docs/image/api-resize-fit.png">
|
||||
|
||||
When using a `fit` of `cover` or `contain`, the default **position** is `centre`. Other options are:
|
||||
When using a **fit** of `cover` or `contain`, the default **position** is `centre`. Other options are:
|
||||
- `sharp.position`: `top`, `right top`, `right`, `right bottom`, `bottom`, `left bottom`, `left`, `left top`.
|
||||
- `sharp.gravity`: `north`, `northeast`, `east`, `southeast`, `south`, `southwest`, `west`, `northwest`, `center` or `centre`.
|
||||
- `sharp.strategy`: `cover` only, dynamically crop using either the `entropy` or `attention` strategy.
|
||||
|
||||
* `sharp.position`: `top`, `right top`, `right`, `right bottom`, `bottom`, `left bottom`, `left`, `left top`.
|
||||
* `sharp.gravity`: `north`, `northeast`, `east`, `southeast`, `south`, `southwest`, `west`, `northwest`, `center` or `centre`.
|
||||
* `sharp.strategy`: `cover` only, dynamically crop using either the `entropy` or `attention` strategy.
|
||||
|
||||
Some of these values are based on the [object-position][2] CSS property.
|
||||
Some of these values are based on the [object-position](https://developer.mozilla.org/en-US/docs/Web/CSS/object-position) CSS property.
|
||||
|
||||
The experimental strategy-based approach resizes so one dimension is at its target length
|
||||
then repeatedly ranks edge regions, discarding the edge with the lowest score based on the selected strategy.
|
||||
|
||||
* `entropy`: focus on the region with the highest [Shannon entropy][3].
|
||||
* `attention`: focus on the region with the highest luminance frequency, colour saturation and presence of skin tones.
|
||||
- `entropy`: focus on the region with the highest [Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29).
|
||||
- `attention`: focus on the region with the highest luminance frequency, colour saturation and presence of skin tones.
|
||||
|
||||
Possible interpolation kernels are:
|
||||
|
||||
* `nearest`: Use [nearest neighbour interpolation][4].
|
||||
* `cubic`: Use a [Catmull-Rom spline][5].
|
||||
* `mitchell`: Use a [Mitchell-Netravali spline][6].
|
||||
* `lanczos2`: Use a [Lanczos kernel][7] with `a=2`.
|
||||
* `lanczos3`: Use a Lanczos kernel with `a=3` (the default).
|
||||
- `nearest`: Use [nearest neighbour interpolation](http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation).
|
||||
- `cubic`: Use a [Catmull-Rom spline](https://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline).
|
||||
- `mitchell`: Use a [Mitchell-Netravali spline](https://www.cs.utexas.edu/~fussell/courses/cs384g-fall2013/lectures/mitchell/Mitchell.pdf).
|
||||
- `lanczos2`: Use a [Lanczos kernel](https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel) with `a=2`.
|
||||
- `lanczos3`: Use a Lanczos kernel with `a=3` (the default).
|
||||
|
||||
Only one resize can occur per pipeline.
|
||||
Previous calls to `resize` in the same pipeline will be ignored.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `width` **[number][8]?** pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height.
|
||||
* `height` **[number][8]?** pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
|
||||
* `options` **[Object][9]?** 
|
||||
**Throws**:
|
||||
|
||||
* `options.width` **[String][10]?** alternative means of specifying `width`. If both are present this take priority.
|
||||
* `options.height` **[String][10]?** alternative means of specifying `height`. If both are present this take priority.
|
||||
* `options.fit` **[String][10]** how the image should be resized to fit both provided dimensions, one of `cover`, `contain`, `fill`, `inside` or `outside`. (optional, default `'cover'`)
|
||||
* `options.position` **[String][10]** position, gravity or strategy to use when `fit` is `cover` or `contain`. (optional, default `'centre'`)
|
||||
* `options.background` **([String][10] | [Object][9])** background colour when `fit` is `contain`, parsed by the [color][11] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
|
||||
* `options.kernel` **[String][10]** the kernel to use for image reduction. (optional, default `'lanczos3'`)
|
||||
* `options.withoutEnlargement` **[Boolean][12]** do not enlarge if the width *or* height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option. (optional, default `false`)
|
||||
* `options.withoutReduction` **[Boolean][12]** do not reduce if the width *or* height are already greater than the specified dimensions, equivalent to GraphicsMagick's `<` geometry option. (optional, default `false`)
|
||||
* `options.fastShrinkOnLoad` **[Boolean][12]** take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images. (optional, default `true`)
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [width] | <code>number</code> | | How many pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height. |
|
||||
| [height] | <code>number</code> | | How many pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width. |
|
||||
| [options] | <code>Object</code> | | |
|
||||
| [options.width] | <code>number</code> | | An alternative means of specifying `width`. If both are present this takes priority. |
|
||||
| [options.height] | <code>number</code> | | An alternative means of specifying `height`. If both are present this takes priority. |
|
||||
| [options.fit] | <code>String</code> | <code>'cover'</code> | How the image should be resized/cropped to fit the target dimension(s), one of `cover`, `contain`, `fill`, `inside` or `outside`. |
|
||||
| [options.position] | <code>String</code> | <code>'centre'</code> | A position, gravity or strategy to use when `fit` is `cover` or `contain`. |
|
||||
| [options.background] | <code>String</code> \| <code>Object</code> | <code>{r: 0, g: 0, b: 0, alpha: 1}</code> | background colour when `fit` is `contain`, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black without transparency. |
|
||||
| [options.kernel] | <code>String</code> | <code>'lanczos3'</code> | The kernel to use for image reduction. Use the `fastShrinkOnLoad` option to control kernel vs shrink-on-load. |
|
||||
| [options.withoutEnlargement] | <code>Boolean</code> | <code>false</code> | Do not scale up if the width *or* height are already less than the target dimensions, equivalent to GraphicsMagick's `>` geometry option. This may result in output dimensions smaller than the target dimensions. |
|
||||
| [options.withoutReduction] | <code>Boolean</code> | <code>false</code> | Do not scale down if the width *or* height are already greater than the target dimensions, equivalent to GraphicsMagick's `<` geometry option. This may still result in a crop to reach the target dimensions. |
|
||||
| [options.fastShrinkOnLoad] | <code>Boolean</code> | <code>true</code> | Take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern or round-down of an auto-scaled dimension. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.resize({ width: 100 })
|
||||
.toBuffer()
|
||||
@@ -65,8 +64,8 @@ sharp(input)
|
||||
// 100 pixels wide, auto-scaled height
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.resize({ height: 100 })
|
||||
.toBuffer()
|
||||
@@ -74,8 +73,8 @@ sharp(input)
|
||||
// 100 pixels high, auto-scaled width
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.resize(200, 300, {
|
||||
kernel: sharp.kernel.nearest,
|
||||
@@ -90,8 +89,8 @@ sharp(input)
|
||||
// contained within the north-east corner of a semi-transparent white canvas
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const transformer = sharp()
|
||||
.resize({
|
||||
width: 200,
|
||||
@@ -105,8 +104,8 @@ readableStream
|
||||
.pipe(transformer)
|
||||
.pipe(writableStream);
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.resize(200, 200, {
|
||||
fit: sharp.fit.inside,
|
||||
@@ -120,8 +119,8 @@ sharp(input)
|
||||
// and no larger than the input image
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.resize(200, 200, {
|
||||
fit: sharp.fit.outside,
|
||||
@@ -135,8 +134,8 @@ sharp(input)
|
||||
// and no smaller than the input image
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const scaleByHalf = await sharp(input)
|
||||
.metadata()
|
||||
.then(({ width }) => sharp(input)
|
||||
@@ -145,28 +144,30 @@ const scaleByHalf = await sharp(input)
|
||||
);
|
||||
```
|
||||
|
||||
* Throws **[Error][13]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## extend
|
||||
|
||||
Extends/pads the edges of the image with the provided background colour.
|
||||
Extend / pad / extrude one or more edges of the image with either
|
||||
the provided background colour or pixels derived from the image.
|
||||
This operation will always occur after resizing and extraction, if any.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `extend` **([number][8] | [Object][9])** single pixel count to add to all edges or an Object with per-edge counts
|
||||
**Throws**:
|
||||
|
||||
* `extend.top` **[number][8]** (optional, default `0`)
|
||||
* `extend.left` **[number][8]** (optional, default `0`)
|
||||
* `extend.bottom` **[number][8]** (optional, default `0`)
|
||||
* `extend.right` **[number][8]** (optional, default `0`)
|
||||
* `extend.background` **([String][10] | [Object][9])** background colour, parsed by the [color][11] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| extend | <code>number</code> \| <code>Object</code> | | single pixel count to add to all edges or an Object with per-edge counts |
|
||||
| [extend.top] | <code>number</code> | <code>0</code> | |
|
||||
| [extend.left] | <code>number</code> | <code>0</code> | |
|
||||
| [extend.bottom] | <code>number</code> | <code>0</code> | |
|
||||
| [extend.right] | <code>number</code> | <code>0</code> | |
|
||||
| [extend.extendWith] | <code>String</code> | <code>'background'</code> | populate new pixels using this method, one of: background, copy, repeat, mirror. |
|
||||
| [extend.background] | <code>String</code> \| <code>Object</code> | <code>{r: 0, g: 0, b: 0, alpha: 1}</code> | background colour, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black without transparency. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Resize to 140 pixels wide, then add 10 transparent pixels
|
||||
// to the top, left and right edges and 20 to the bottom edge
|
||||
sharp(input)
|
||||
@@ -180,8 +181,8 @@ sharp(input)
|
||||
})
|
||||
...
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Add a row of 10 red pixels to the bottom
|
||||
sharp(input)
|
||||
.extend({
|
||||
@@ -190,39 +191,49 @@ sharp(input)
|
||||
})
|
||||
...
|
||||
```
|
||||
**Example**
|
||||
```js
|
||||
// Extrude image by 8 pixels to the right, mirroring existing right hand edge
|
||||
sharp(input)
|
||||
.extend({
|
||||
right: 8,
|
||||
background: 'mirror'
|
||||
})
|
||||
...
|
||||
```
|
||||
|
||||
* Throws **[Error][13]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## extract
|
||||
|
||||
Extract/crop a region of the image.
|
||||
|
||||
* Use `extract` before `resize` for pre-resize extraction.
|
||||
* Use `extract` after `resize` for post-resize extraction.
|
||||
* Use `extract` before and after for both.
|
||||
- Use `extract` before `resize` for pre-resize extraction.
|
||||
- Use `extract` after `resize` for post-resize extraction.
|
||||
- Use `extract` before and after for both.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **[Object][9]** describes the region to extract using integral pixel values
|
||||
**Throws**:
|
||||
|
||||
* `options.left` **[number][8]** zero-indexed offset from left edge
|
||||
* `options.top` **[number][8]** zero-indexed offset from top edge
|
||||
* `options.width` **[number][8]** width of region to extract
|
||||
* `options.height` **[number][8]** height of region to extract
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| options | <code>Object</code> | describes the region to extract using integral pixel values |
|
||||
| options.left | <code>number</code> | zero-indexed offset from left edge |
|
||||
| options.top | <code>number</code> | zero-indexed offset from top edge |
|
||||
| options.width | <code>number</code> | width of region to extract |
|
||||
| options.height | <code>number</code> | height of region to extract |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.extract({ left: left, top: top, width: width, height: height })
|
||||
.toFile(output, function(err) {
|
||||
// Extract a region of the input image, saving in the same format.
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp(input)
|
||||
.extract({ left: leftOffsetPre, top: topOffsetPre, width: widthPre, height: heightPre })
|
||||
.resize(width, height)
|
||||
@@ -232,12 +243,8 @@ sharp(input)
|
||||
});
|
||||
```
|
||||
|
||||
* Throws **[Error][13]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
## trim
|
||||
|
||||
Trim pixels from all edges that contain values similar to the given background colour, which defaults to that of the top-left pixel.
|
||||
|
||||
Images with an alpha channel will use the combined bounding box of alpha and non-alpha channels.
|
||||
@@ -247,16 +254,20 @@ If the result of this operation would trim an image to nothing then no change is
|
||||
The `info` response Object, obtained from callback of `.toFile()` or `.toBuffer()`,
|
||||
will contain `trimOffsetLeft` and `trimOffsetTop` properties.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `trim` **([string][10] | [number][8] | [Object][9])** the specific background colour to trim, the threshold for doing so or an Object with both.
|
||||
**Throws**:
|
||||
|
||||
* `trim.background` **([string][10] | [Object][9])** background colour, parsed by the [color][11] module, defaults to that of the top-left pixel. (optional, default `'top-left pixel'`)
|
||||
* `trim.threshold` **[number][8]** the allowed difference from the above colour, a positive number. (optional, default `10`)
|
||||
- <code>Error</code> Invalid parameters
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| trim | <code>string</code> \| <code>number</code> \| <code>Object</code> | | the specific background colour to trim, the threshold for doing so or an Object with both. |
|
||||
| [trim.background] | <code>string</code> \| <code>Object</code> | <code>"'top-left pixel'"</code> | background colour, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to that of the top-left pixel. |
|
||||
| [trim.threshold] | <code>number</code> | <code>10</code> | the allowed difference from the above colour, a positive number. |
|
||||
|
||||
**Example**
|
||||
```js
|
||||
// Trim pixels with a colour similar to that of the top-left pixel.
|
||||
sharp(input)
|
||||
.trim()
|
||||
@@ -264,8 +275,8 @@ sharp(input)
|
||||
...
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Trim pixels with the exact same colour as that of the top-left pixel.
|
||||
sharp(input)
|
||||
.trim(0)
|
||||
@@ -273,8 +284,8 @@ sharp(input)
|
||||
...
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Trim only pixels with a similar colour to red.
|
||||
sharp(input)
|
||||
.trim("#FF0000")
|
||||
@@ -282,8 +293,8 @@ sharp(input)
|
||||
...
|
||||
});
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
// Trim all "yellow-ish" pixels, being more lenient with the higher threshold.
|
||||
sharp(input)
|
||||
.trim({
|
||||
@@ -293,34 +304,4 @@ sharp(input)
|
||||
.toFile(output, function(err, info) {
|
||||
...
|
||||
});
|
||||
```
|
||||
|
||||
* Throws **[Error][13]** Invalid parameters
|
||||
|
||||
Returns **Sharp** 
|
||||
|
||||
[1]: https://developer.mozilla.org/en-US/docs/Web/CSS/object-fit
|
||||
|
||||
[2]: https://developer.mozilla.org/en-US/docs/Web/CSS/object-position
|
||||
|
||||
[3]: https://en.wikipedia.org/wiki/Entropy_%28information_theory%29
|
||||
|
||||
[4]: http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation
|
||||
|
||||
[5]: https://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline
|
||||
|
||||
[6]: https://www.cs.utexas.edu/~fussell/courses/cs384g-fall2013/lectures/mitchell/Mitchell.pdf
|
||||
|
||||
[7]: https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel
|
||||
|
||||
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
|
||||
|
||||
[9]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
|
||||
|
||||
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
|
||||
[11]: https://www.npmjs.org/package/color
|
||||
|
||||
[12]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
|
||||
|
||||
[13]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
|
||||
```
|
||||
@@ -1,101 +1,96 @@
|
||||
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
|
||||
|
||||
## format
|
||||
|
||||
An Object containing nested boolean values representing the available input and output formats/methods.
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
console.log(sharp.format);
|
||||
```
|
||||
|
||||
Returns **[Object][1]** 
|
||||
|
||||
## interpolators
|
||||
|
||||
An Object containing the available interpolators and their proper values
|
||||
|
||||
Type: [string][2]
|
||||
|
||||
### nearest
|
||||
|
||||
[Nearest neighbour interpolation][3]. Suitable for image enlargement only.
|
||||
|
||||
### bilinear
|
||||
|
||||
[Bilinear interpolation][4]. Faster than bicubic but with less smooth results.
|
||||
|
||||
### bicubic
|
||||
|
||||
[Bicubic interpolation][5] (the default).
|
||||
|
||||
### locallyBoundedBicubic
|
||||
|
||||
[LBB interpolation][6]. Prevents some "[acutance][7]" but typically reduces performance by a factor of 2.
|
||||
|
||||
### nohalo
|
||||
|
||||
[Nohalo interpolation][8]. Prevents acutance but typically reduces performance by a factor of 3.
|
||||
|
||||
### vertexSplitQuadraticBasisSpline
|
||||
|
||||
[VSQBS interpolation][9]. Prevents "staircasing" when enlarging.
|
||||
|
||||
## versions
|
||||
An Object containing the version numbers of sharp, libvips and its dependencies.
|
||||
|
||||
An Object containing the version numbers of libvips and its dependencies.
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
console.log(sharp.versions);
|
||||
```
|
||||
|
||||
## vendor
|
||||
|
||||
## interpolators
|
||||
An Object containing the available interpolators and their proper values
|
||||
|
||||
|
||||
**Read only**: true
|
||||
**Properties**
|
||||
|
||||
| Name | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| nearest | <code>string</code> | <code>"nearest"</code> | [Nearest neighbour interpolation](http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation). Suitable for image enlargement only. |
|
||||
| bilinear | <code>string</code> | <code>"bilinear"</code> | [Bilinear interpolation](http://en.wikipedia.org/wiki/Bilinear_interpolation). Faster than bicubic but with less smooth results. |
|
||||
| bicubic | <code>string</code> | <code>"bicubic"</code> | [Bicubic interpolation](http://en.wikipedia.org/wiki/Bicubic_interpolation) (the default). |
|
||||
| locallyBoundedBicubic | <code>string</code> | <code>"lbb"</code> | [LBB interpolation](https://github.com/libvips/libvips/blob/master/libvips/resample/lbb.cpp#L100). Prevents some "[acutance](http://en.wikipedia.org/wiki/Acutance)" but typically reduces performance by a factor of 2. |
|
||||
| nohalo | <code>string</code> | <code>"nohalo"</code> | [Nohalo interpolation](http://eprints.soton.ac.uk/268086/). Prevents acutance but typically reduces performance by a factor of 3. |
|
||||
| vertexSplitQuadraticBasisSpline | <code>string</code> | <code>"vsqbs"</code> | [VSQBS interpolation](https://github.com/libvips/libvips/blob/master/libvips/resample/vsqbs.cpp#L48). Prevents "staircasing" when enlarging. |
|
||||
|
||||
|
||||
|
||||
## format
|
||||
An Object containing nested boolean values representing the available input and output formats/methods.
|
||||
|
||||
|
||||
**Example**
|
||||
```js
|
||||
console.log(sharp.format);
|
||||
```
|
||||
|
||||
|
||||
## vendor
|
||||
An Object containing the platform and architecture
|
||||
of the current and installed vendored binaries.
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
console.log(sharp.vendor);
|
||||
```
|
||||
|
||||
## cache
|
||||
|
||||
Gets or, when options are provided, sets the limits of *libvips'* operation cache.
|
||||
## queue
|
||||
An EventEmitter that emits a `change` event when a task is either:
|
||||
- queued, waiting for _libuv_ to provide a worker thread
|
||||
- complete
|
||||
|
||||
|
||||
**Example**
|
||||
```js
|
||||
sharp.queue.on('change', function(queueLength) {
|
||||
console.log('Queue contains ' + queueLength + ' task(s)');
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
## cache
|
||||
Gets or, when options are provided, sets the limits of _libvips'_ operation cache.
|
||||
Existing entries in the cache will be trimmed after any change in limits.
|
||||
This method always returns cache statistics,
|
||||
useful for determining how much working memory is required for a particular task.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `options` **([Object][1] | [boolean][10])** Object with the following attributes, or boolean where true uses default cache settings and false removes all caching (optional, default `true`)
|
||||
|
||||
* `options.memory` **[number][11]** is the maximum memory in MB to use for this cache (optional, default `50`)
|
||||
* `options.files` **[number][11]** is the maximum number of files to hold open (optional, default `20`)
|
||||
* `options.items` **[number][11]** is the maximum number of operations to cache (optional, default `100`)
|
||||
| Param | Type | Default | Description |
|
||||
| --- | --- | --- | --- |
|
||||
| [options] | <code>Object</code> \| <code>boolean</code> | <code>true</code> | Object with the following attributes, or boolean where true uses default cache settings and false removes all caching |
|
||||
| [options.memory] | <code>number</code> | <code>50</code> | is the maximum memory in MB to use for this cache |
|
||||
| [options.files] | <code>number</code> | <code>20</code> | is the maximum number of files to hold open |
|
||||
| [options.items] | <code>number</code> | <code>100</code> | is the maximum number of operations to cache |
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const stats = sharp.cache();
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
sharp.cache( { items: 200 } );
|
||||
sharp.cache( { files: 0 } );
|
||||
sharp.cache(false);
|
||||
```
|
||||
|
||||
Returns **[Object][1]** 
|
||||
|
||||
## concurrency
|
||||
|
||||
Gets or, when a concurrency is provided, sets
|
||||
the maximum number of threads *libvips* should use to process *each image*.
|
||||
the maximum number of threads _libvips_ should use to process _each image_.
|
||||
These are from a thread pool managed by glib,
|
||||
which helps avoid the overhead of creating new threads.
|
||||
|
||||
@@ -115,102 +110,59 @@ The maximum number of images that sharp can process in parallel
|
||||
is controlled by libuv's `UV_THREADPOOL_SIZE` environment variable,
|
||||
which defaults to 4.
|
||||
|
||||
[https://nodejs.org/api/cli.html#uv\_threadpool\_sizesize][12]
|
||||
https://nodejs.org/api/cli.html#uv_threadpool_sizesize
|
||||
|
||||
For example, by default, a machine with 8 CPU cores will process
|
||||
4 images in parallel and use up to 8 threads per image,
|
||||
so there will be up to 32 concurrent threads.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `concurrency` **[number][11]?** 
|
||||
**Returns**: <code>number</code> - concurrency
|
||||
|
||||
### Examples
|
||||
| Param | Type |
|
||||
| --- | --- |
|
||||
| [concurrency] | <code>number</code> |
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const threads = sharp.concurrency(); // 4
|
||||
sharp.concurrency(2); // 2
|
||||
sharp.concurrency(0); // 4
|
||||
```
|
||||
|
||||
Returns **[number][11]** concurrency
|
||||
|
||||
## queue
|
||||
|
||||
An EventEmitter that emits a `change` event when a task is either:
|
||||
|
||||
* queued, waiting for *libuv* to provide a worker thread
|
||||
* complete
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
sharp.queue.on('change', function(queueLength) {
|
||||
console.log('Queue contains ' + queueLength + ' task(s)');
|
||||
});
|
||||
```
|
||||
|
||||
## counters
|
||||
|
||||
Provides access to internal task counters.
|
||||
- queue is the number of tasks this module has queued waiting for _libuv_ to provide a worker thread from its pool.
|
||||
- process is the number of resize tasks currently being processed.
|
||||
|
||||
* queue is the number of tasks this module has queued waiting for *libuv* to provide a worker thread from its pool.
|
||||
* process is the number of resize tasks currently being processed.
|
||||
|
||||
### Examples
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const counters = sharp.counters(); // { queue: 2, process: 4 }
|
||||
```
|
||||
|
||||
Returns **[Object][1]** 
|
||||
|
||||
## simd
|
||||
|
||||
Get and set use of SIMD vector unit instructions.
|
||||
Requires libvips to have been compiled with liborc support.
|
||||
|
||||
Improves the performance of `resize`, `blur` and `sharpen` operations
|
||||
by taking advantage of the SIMD vector unit of the CPU, e.g. Intel SSE and ARM NEON.
|
||||
|
||||
### Parameters
|
||||
|
||||
* `simd` **[boolean][10]** (optional, default `true`)
|
||||
|
||||
### Examples
|
||||
| Param | Type | Default |
|
||||
| --- | --- | --- |
|
||||
| [simd] | <code>boolean</code> | <code>true</code> |
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const simd = sharp.simd();
|
||||
// simd is `true` if the runtime use of liborc is currently enabled
|
||||
```
|
||||
|
||||
```javascript
|
||||
**Example**
|
||||
```js
|
||||
const simd = sharp.simd(false);
|
||||
// prevent libvips from using liborc at runtime
|
||||
```
|
||||
|
||||
Returns **[boolean][10]** 
|
||||
|
||||
[1]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
|
||||
|
||||
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
|
||||
[3]: http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation
|
||||
|
||||
[4]: http://en.wikipedia.org/wiki/Bilinear_interpolation
|
||||
|
||||
[5]: http://en.wikipedia.org/wiki/Bicubic_interpolation
|
||||
|
||||
[6]: https://github.com/libvips/libvips/blob/master/libvips/resample/lbb.cpp#L100
|
||||
|
||||
[7]: http://en.wikipedia.org/wiki/Acutance
|
||||
|
||||
[8]: http://eprints.soton.ac.uk/268086/
|
||||
|
||||
[9]: https://github.com/libvips/libvips/blob/master/libvips/resample/vsqbs.cpp#L48
|
||||
|
||||
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
|
||||
|
||||
[11]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
|
||||
|
||||
[12]: https://nodejs.org/api/cli.html#uv_threadpool_sizesize
|
||||
```
|
||||
@@ -1,7 +1,11 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const jsdoc2md = require('jsdoc-to-markdown');
|
||||
|
||||
[
|
||||
'constructor',
|
||||
@@ -14,13 +18,21 @@ const path = require('path');
|
||||
'output',
|
||||
'utility'
|
||||
].forEach(async (m) => {
|
||||
const documentation = await import('documentation');
|
||||
|
||||
const input = path.join('lib', `${m}.js`);
|
||||
const output = path.join('docs', `api-${m}.md`);
|
||||
|
||||
const ast = await documentation.build(input, { shallow: true });
|
||||
const markdown = await documentation.formats.md(ast, { markdownToc: false });
|
||||
const ast = await jsdoc2md.getTemplateData({ files: input });
|
||||
const markdown = await jsdoc2md.render({
|
||||
data: ast,
|
||||
'global-index-format': 'none',
|
||||
'module-index-format': 'none'
|
||||
});
|
||||
|
||||
await fs.writeFile(output, markdown);
|
||||
const cleanMarkdown = markdown
|
||||
.replace(/(## [A-Za-z]+)[^\n]*/g, '$1') // simplify headings to match those of documentationjs, ensures existing URLs work
|
||||
.replace(/<a name="[A-Za-z+]+"><\/a>/g, '') // remove anchors, let docute add these (at markdown to HTML render time)
|
||||
.replace(/\*\*Kind\*\*: global[^\n]+/g, '') // remove all "global" Kind labels (requires JSDoc refactoring)
|
||||
.trim();
|
||||
|
||||
await fs.writeFile(output, cleanMarkdown);
|
||||
});
|
||||
|
||||
@@ -1,8 +1,113 @@
|
||||
# Changelog
|
||||
|
||||
## v0.32 - *flow*
|
||||
|
||||
Requires libvips v8.14.2
|
||||
|
||||
### v0.32.0 - 24th March 2023
|
||||
|
||||
* Default to using sequential rather than random access read where possible.
|
||||
|
||||
* Replace GIF output `optimise` / `optimize` option with `reuse`.
|
||||
|
||||
* Add `progressive` option to GIF output for interlacing.
|
||||
|
||||
* Add `wrap` option to text image creation.
|
||||
|
||||
* Add `formatMagick` property to metadata of images loaded via *magick.
|
||||
|
||||
* Prefer integer (un)premultiply for faster resizing of RGBA images.
|
||||
|
||||
* Add `ignoreIcc` input option to ignore embedded ICC profile.
|
||||
|
||||
* Allow use of GPS (IFD3) EXIF metadata.
|
||||
[#2767](https://github.com/lovell/sharp/issues/2767)
|
||||
|
||||
* TypeScript definitions are now maintained and published directly, deprecating the `@types/sharp` package.
|
||||
[#3369](https://github.com/lovell/sharp/issues/3369)
|
||||
|
||||
* Prebuilt binaries: ensure macOS 10.13+ support, as documented.
|
||||
[#3438](https://github.com/lovell/sharp/issues/3438)
|
||||
|
||||
* Prebuilt binaries: prevent use of glib slice allocator, improves QEMU support.
|
||||
[#3448](https://github.com/lovell/sharp/issues/3448)
|
||||
|
||||
* Add focus point coordinates to output when using attention based crop.
|
||||
[#3470](https://github.com/lovell/sharp/pull/3470)
|
||||
[@ejoebstl](https://github.com/ejoebstl)
|
||||
|
||||
* Expose sharp version as `sharp.versions.sharp`.
|
||||
[#3471](https://github.com/lovell/sharp/issues/3471)
|
||||
|
||||
* Respect `fastShrinkOnLoad` resize option for WebP input.
|
||||
[#3516](https://github.com/lovell/sharp/issues/3516)
|
||||
|
||||
* Reduce sharpen `sigma` maximum from 10000 to 10.
|
||||
[#3521](https://github.com/lovell/sharp/issues/3521)
|
||||
|
||||
* Add support for `ArrayBuffer` input.
|
||||
[#3548](https://github.com/lovell/sharp/pull/3548)
|
||||
[@kapouer](https://github.com/kapouer)
|
||||
|
||||
* Add support to `extend` operation for `extendWith` to allow copy/mirror/repeat.
|
||||
[#3556](https://github.com/lovell/sharp/pull/3556)
|
||||
[@janaz](https://github.com/janaz)
|
||||
|
||||
* Ensure all async JS callbacks are wrapped to help avoid possible race condition.
|
||||
[#3569](https://github.com/lovell/sharp/issues/3569)
|
||||
|
||||
* Prebuilt binaries: support for tile-based output temporarily removed due to licensing issue.
|
||||
[#3581](https://github.com/lovell/sharp/issues/3581)
|
||||
|
||||
* Add support to `normalise` for `lower` and `upper` percentiles.
|
||||
[#3583](https://github.com/lovell/sharp/pull/3583)
|
||||
[@LachlanNewman](https://github.com/LachlanNewman)
|
||||
|
||||
## v0.31 - *eagle*
|
||||
|
||||
Requires libvips v8.13.2
|
||||
Requires libvips v8.13.3
|
||||
|
||||
### v0.31.3 - 21st December 2022
|
||||
|
||||
* Add experimental support for JPEG-XL images. Requires libvips compiled with libjxl.
|
||||
[#2731](https://github.com/lovell/sharp/issues/2731)
|
||||
|
||||
* Add runtime detection of V8 memory cage, ensures compatibility with Electron 21 onwards.
|
||||
[#3384](https://github.com/lovell/sharp/issues/3384)
|
||||
|
||||
* Expose `interFrameMaxError` and `interPaletteMaxError` GIF optimisation properties.
|
||||
[#3401](https://github.com/lovell/sharp/issues/3401)
|
||||
|
||||
* Allow installation on Linux with glibc patch versions e.g. Fedora 38.
|
||||
[#3423](https://github.com/lovell/sharp/issues/3423)
|
||||
|
||||
* Expand range of existing `sharpen` parameters to match libvips.
|
||||
[#3427](https://github.com/lovell/sharp/issues/3427)
|
||||
|
||||
* Prevent possible race condition awaiting metadata of Stream-based input.
|
||||
[#3451](https://github.com/lovell/sharp/issues/3451)
|
||||
|
||||
* Improve `extractChannel` support for 16-bit output colourspaces.
|
||||
[#3453](https://github.com/lovell/sharp/issues/3453)
|
||||
|
||||
* Ignore `sequentialRead` option when calculating image statistics.
|
||||
[#3462](https://github.com/lovell/sharp/issues/3462)
|
||||
|
||||
* Small performance improvement for operations that introduce a non-opaque background.
|
||||
[#3465](https://github.com/lovell/sharp/issues/3465)
|
||||
|
||||
* Ensure integral output of `linear` operation.
|
||||
[#3468](https://github.com/lovell/sharp/issues/3468)
|
||||
|
||||
### v0.31.2 - 4th November 2022
|
||||
|
||||
* Upgrade to libvips v8.13.3 for upstream bug fixes.
|
||||
|
||||
* Ensure manual flip, rotate, resize operation ordering (regression in 0.31.1)
|
||||
[#3391](https://github.com/lovell/sharp/issues/3391)
|
||||
|
||||
* Ensure auto-rotation works without resize (regression in 0.31.1)
|
||||
[#3422](https://github.com/lovell/sharp/issues/3422)
|
||||
|
||||
### v0.31.1 - 29th September 2022
|
||||
|
||||
|
||||
@@ -263,3 +263,12 @@ GitHub: https://github.com/antonmarsden
|
||||
|
||||
Name: Marcos Casagrande
|
||||
GitHub: https://github.com/marcosc90
|
||||
|
||||
Name: Emanuel Jöbstl
|
||||
GitHub: https://github.com/ejoebstl
|
||||
|
||||
Name: Tomasz Janowski
|
||||
GitHub: https://github.com/janaz
|
||||
|
||||
Name: Lachlan Newman
|
||||
GitHub: https://github.com/LachlanNewman
|
||||
|
||||
BIN
docs/image/api-resize-fit.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
BIN
docs/image/sharp-logo-32.png
Normal file
|
After Width: | Height: | Size: 652 B |
BIN
docs/image/sharp-logo-600.png
Normal file
|
After Width: | Height: | Size: 2.8 KiB |
|
Before Width: | Height: | Size: 661 B |
@@ -5,13 +5,15 @@
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<meta name="description" content="Resize large images in common formats to smaller, web-friendly JPEG, PNG, WebP, GIF and AVIF images of varying dimensions">
|
||||
<meta property="og:title" content="sharp - High performance Node.js image processing">
|
||||
<meta property="og:image" content="https://cdn.jsdelivr.net/gh/lovell/sharp@main/docs/image/sharp-logo-600.png">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; object-src 'none'; style-src 'unsafe-inline';
|
||||
img-src 'unsafe-inline' data: https://cdn.jsdelivr.net/gh/lovell/ https://www.google-analytics.com;
|
||||
connect-src 'self' https://www.google-analytics.com;
|
||||
script-src 'self' 'unsafe-inline' 'unsafe-eval'
|
||||
https://www.google-analytics.com/analytics.js;">
|
||||
<link rel="icon" type="image/svg+xml" href="https://cdn.jsdelivr.net/gh/lovell/sharp@main/docs/image/sharp-logo.svg">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="https://cdn.jsdelivr.net/gh/lovell/sharp@main/docs/image/sharp-logo.png">
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="https://cdn.jsdelivr.net/gh/lovell/sharp@main/docs/image/sharp-logo-32.png">
|
||||
<link rel="author" href="/humans.txt" type="text/plain">
|
||||
<link rel="dns-prefetch" href="https://www.google-analytics.com">
|
||||
<script type="application/ld+json">
|
||||
@@ -29,7 +31,7 @@
|
||||
"@type": "Person",
|
||||
"name": "Lovell Fuller"
|
||||
},
|
||||
"copyrightYear": [2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022],
|
||||
"copyrightYear": 2013,
|
||||
"license": "https://www.apache.org/licenses/LICENSE-2.0"
|
||||
}
|
||||
</script>
|
||||
@@ -77,9 +79,7 @@
|
||||
.map(function (sidebarLink) {
|
||||
return sidebarLink.title;
|
||||
})[0];
|
||||
return title
|
||||
? md.replace(/<!-- Generated by documentation.js. Update this documentation by updating the source code. -->/, '# ' + title)
|
||||
: md;
|
||||
return title ? `# ${title}\n${md}` : md;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -28,7 +28,7 @@ is downloaded via HTTPS, verified via Subresource Integrity
|
||||
and decompressed into `node_modules/sharp/vendor` during `npm install`.
|
||||
|
||||
This provides support for the
|
||||
JPEG, PNG, WebP, AVIF, TIFF, GIF and SVG (input) image formats.
|
||||
JPEG, PNG, WebP, AVIF (limited to 8-bit depth), TIFF, GIF and SVG (input) image formats.
|
||||
|
||||
The following platforms have prebuilt libvips but not sharp:
|
||||
|
||||
@@ -115,7 +115,8 @@ and that it can be located using `pkg-config --modversion vips-cpp`.
|
||||
For help compiling libvips and its dependencies, please see
|
||||
[building libvips from source](https://www.libvips.org/install.html#building-libvips-from-source).
|
||||
|
||||
The use of a globally-installed libvips is unsupported on Windows.
|
||||
The use of a globally-installed libvips is unsupported on Windows
|
||||
and on macOS when running Node.js under Rosetta.
|
||||
|
||||
## Building from source
|
||||
|
||||
@@ -147,7 +148,7 @@ or the `npm_config_sharp_local_prebuilds` environment variable.
|
||||
URL example:
|
||||
if `sharp_binary_host` is set to `https://hostname/path`
|
||||
and the sharp version is `1.2.3` then the resultant URL will be
|
||||
`https://hostname/path/sharp-v1.2.3-napi-v5-platform-arch.tar.gz`.
|
||||
`https://hostname/path/v1.2.3/sharp-v1.2.3-napi-v5-platform-arch.tar.gz`.
|
||||
|
||||
Filename example:
|
||||
if `sharp_local_prebuilds` is set to `/path`
|
||||
@@ -178,6 +179,16 @@ and the libvips version is `4.5.6` then the resultant filename will be
|
||||
|
||||
See the Chinese mirror below for a further example.
|
||||
|
||||
If these binaries are modified, new integrity hashes can be provided
|
||||
at install time via `npm_package_config_integrity_platform_arch`
|
||||
environment variables, for example set
|
||||
`npm_package_config_integrity_linux_x64` to `sha512-abc...`.
|
||||
|
||||
The integrity hash of a file can be generated via:
|
||||
```sh
|
||||
sha512sum libvips-x.y.z-platform-arch.tar.br | cut -f1 -d' ' | xxd -r -p | base64 -w 0
|
||||
```
|
||||
|
||||
## Chinese mirror
|
||||
|
||||
A mirror site based in China, provided by Alibaba, contains binaries for both sharp and libvips.
|
||||
@@ -227,16 +238,6 @@ the use of an alternative memory allocator such as
|
||||
Those using musl-based Linux (e.g. Alpine) and non-Linux systems are
|
||||
unaffected.
|
||||
|
||||
## Heroku
|
||||
|
||||
Add the
|
||||
[jemalloc buildpack](https://github.com/gaffneyc/heroku-buildpack-jemalloc)
|
||||
to reduce the effects of memory fragmentation.
|
||||
|
||||
Set
|
||||
[NODE_MODULES_CACHE](https://devcenter.heroku.com/articles/nodejs-support#cache-behavior)
|
||||
to `false` when using the `yarn` package manager.
|
||||
|
||||
## AWS Lambda
|
||||
|
||||
The `node_modules` directory of the
|
||||
@@ -255,6 +256,9 @@ SHARP_IGNORE_GLOBAL_LIBVIPS=1 npm install --arch=x64 --platform=linux --libc=gli
|
||||
To get the best performance select the largest memory available.
|
||||
A 1536 MB function provides ~12x more CPU time than a 128 MB function.
|
||||
|
||||
When integrating with AWS API Gateway, ensure it is configured with the relevant
|
||||
[binary media types](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-payload-encodings.html).
|
||||
|
||||
## Bundlers
|
||||
|
||||
### webpack
|
||||
@@ -301,6 +305,17 @@ custom:
|
||||
- npm install --arch=x64 --platform=linux sharp
|
||||
```
|
||||
|
||||
## TypeScript
|
||||
|
||||
TypeScript definitions are published as part of
|
||||
the `sharp` package from v0.32.0.
|
||||
|
||||
Previously these were available via the `@types/sharp` package,
|
||||
which is now deprecated.
|
||||
|
||||
When using Typescript, please ensure `devDependencies` includes
|
||||
the `@types/node` package.
|
||||
|
||||
## Fonts
|
||||
|
||||
When creating text images or rendering SVG images that contain text elements,
|
||||
@@ -343,9 +358,12 @@ Module did not self-register
|
||||
|
||||
### Canvas and Windows
|
||||
|
||||
The prebuilt binaries provided by `canvas` for Windows depend on the unmaintained GTK 2, last updated in 2011.
|
||||
The prebuilt binaries provided by `canvas` for Windows
|
||||
from v2.7.0 onwards depend on the Visual C++ Runtime (MSVCRT).
|
||||
These conflict with the binaries provided by sharp,
|
||||
which depend on the more modern Universal C Runtime (UCRT).
|
||||
|
||||
These conflict with the modern, up-to-date binaries provided by sharp.
|
||||
See [Automattic/node-canvas#2155](https://github.com/Automattic/node-canvas/issues/2155).
|
||||
|
||||
If both modules are used in the same Windows process, the following error will occur:
|
||||
```
|
||||
|
||||
@@ -2,48 +2,104 @@
|
||||
|
||||
A test to benchmark the performance of this module relative to alternatives.
|
||||
|
||||
## The contenders
|
||||
|
||||
* [jimp](https://www.npmjs.com/package/jimp) v0.16.1 - Image processing in pure JavaScript. Provides bicubic interpolation.
|
||||
* [mapnik](https://www.npmjs.org/package/mapnik) v4.5.9 - Whilst primarily a map renderer, Mapnik contains bitmap image utilities.
|
||||
* [imagemagick](https://www.npmjs.com/package/imagemagick) v0.1.3 - Supports filesystem only and "*has been unmaintained for a long time*".
|
||||
* [gm](https://www.npmjs.com/package/gm) v1.23.1 - Fully featured wrapper around GraphicsMagick's `gm` command line utility.
|
||||
* [@squoosh/lib](https://www.npmjs.com/package/@squoosh/lib) v0.4.0 - Image libraries transpiled to WebAssembly, includes GPLv3 code.
|
||||
* [@squoosh/cli](https://www.npmjs.com/package/@squoosh/cli) v0.7.2 - Command line wrapper around `@squoosh/lib`, avoids GPLv3 by spawning process.
|
||||
* sharp v0.31.0 / libvips v8.13.1 - Caching within libvips disabled to ensure a fair comparison.
|
||||
|
||||
## The task
|
||||
|
||||
Decompress a 2725x2225 JPEG image,
|
||||
resize to 720x588 using Lanczos 3 resampling (where available),
|
||||
then compress to JPEG at a "quality" setting of 80.
|
||||
|
||||
## Test environment
|
||||
|
||||
* AWS EC2 eu-west-1 [c6a.xlarge](https://aws.amazon.com/ec2/instance-types/c6a/) (4x AMD EPYC 7R13)
|
||||
* Ubuntu 22.04 (ami-051f7c00cb18501ee)
|
||||
* Node.js 16.17.0
|
||||
|
||||
## Results
|
||||
|
||||
| Module | Input | Output | Ops/sec | Speed-up |
|
||||
| :----------------- | :----- | :----- | ------: | -------: |
|
||||
| jimp | buffer | buffer | 0.96 | 1.0 |
|
||||
| squoosh-cli | file | file | 1.10 | 1.1 |
|
||||
| squoosh-lib | buffer | buffer | 1.87 | 1.9 |
|
||||
| mapnik | buffer | buffer | 3.48 | 3.6 |
|
||||
| gm | buffer | buffer | 8.53 | 8.9 |
|
||||
| gm | file | file | 8.60 | 9.0 |
|
||||
| imagemagick | file | file | 9.30 | 9.7 |
|
||||
| sharp | stream | stream | 32.86 | 34.2 |
|
||||
| sharp | file | file | 34.82 | 36.3 |
|
||||
| sharp | buffer | buffer | 35.41 | 36.9 |
|
||||
|
||||
Greater libvips performance can be expected with caching enabled (default)
|
||||
and using 8+ core machines, especially those with larger L1/L2 CPU caches.
|
||||
|
||||
The I/O limits of the relevant (de)compression library will generally determine maximum throughput.
|
||||
|
||||
## Contenders
|
||||
|
||||
* [jimp](https://www.npmjs.com/package/jimp) v0.22.7 - Image processing in pure JavaScript.
|
||||
* [imagemagick](https://www.npmjs.com/package/imagemagick) v0.1.3 - Supports filesystem only and "*has been unmaintained for a long time*".
|
||||
* [gm](https://www.npmjs.com/package/gm) v1.25.0 - Fully featured wrapper around GraphicsMagick's `gm` command line utility.
|
||||
* [@squoosh/lib](https://www.npmjs.com/package/@squoosh/lib) v0.4.0 - Image libraries transpiled to WebAssembly, includes GPLv3 code, but "*Project no longer maintained*".
|
||||
* [@squoosh/cli](https://www.npmjs.com/package/@squoosh/cli) v0.7.3 - Command line wrapper around `@squoosh/lib`, avoids GPLv3 by spawning process, but "*Project no longer maintained*".
|
||||
* sharp v0.32.0 / libvips v8.14.2 - Caching within libvips disabled to ensure a fair comparison.
|
||||
|
||||
## Environment
|
||||
|
||||
### AMD64
|
||||
|
||||
* AWS EC2 us-east-2 [c6a.xlarge](https://aws.amazon.com/ec2/instance-types/c6a/) (4x AMD EPYC 7R13)
|
||||
* Ubuntu 22.04 20230303 (ami-0122295b0eb922138)
|
||||
* Node.js 16.19.1
|
||||
|
||||
### ARM64
|
||||
|
||||
* AWS EC2 us-east-2 [c7g.xlarge](https://aws.amazon.com/ec2/instance-types/c7g/) (4x ARM Graviton3)
|
||||
* Ubuntu 22.04 20230303 (ami-0af198159897e7a29)
|
||||
* Node.js 16.19.1
|
||||
|
||||
## Task: JPEG
|
||||
|
||||
Decompress a 2725x2225 JPEG image,
|
||||
resize to 720x588 using Lanczos 3 resampling (where available),
|
||||
then compress to JPEG at a "quality" setting of 80.
|
||||
|
||||
Note: jimp does not support Lanczos 3, bicubic resampling used instead.
|
||||
|
||||
#### Results: JPEG (AMD64)
|
||||
|
||||
| Module | Input | Output | Ops/sec | Speed-up |
|
||||
| :----------------- | :----- | :----- | ------: | -------: |
|
||||
| jimp | buffer | buffer | 0.84 | 1.0 |
|
||||
| squoosh-cli | file | file | 1.07 | 1.3 |
|
||||
| squoosh-lib | buffer | buffer | 1.82 | 2.2 |
|
||||
| gm | buffer | buffer | 8.41 | 10.0 |
|
||||
| gm | file | file | 8.45 | 10.0 |
|
||||
| imagemagick | file | file | 8.77 | 10.4 |
|
||||
| sharp | stream | stream | 36.36 | 43.3 |
|
||||
| sharp | file | file | 38.67 | 46.0 |
|
||||
| sharp | buffer | buffer | 39.44 | 47.0 |
|
||||
|
||||
#### Results: JPEG (ARM64)
|
||||
|
||||
| Module | Input | Output | Ops/sec | Speed-up |
|
||||
| :----------------- | :----- | :----- | ------: | -------: |
|
||||
| jimp | buffer | buffer | 1.02 | 1.0 |
|
||||
| squoosh-cli | file | file | 1.11 | 1.1 |
|
||||
| squoosh-lib | buffer | buffer | 2.08 | 2.0 |
|
||||
| gm | buffer | buffer | 8.80 | 8.6 |
|
||||
| gm | file | file | 10.05 | 9.9 |
|
||||
| imagemagick | file | file | 10.28 | 10.1 |
|
||||
| sharp | stream | stream | 26.87 | 26.3 |
|
||||
| sharp | file | file | 27.88 | 27.3 |
|
||||
| sharp | buffer | buffer | 28.40 | 27.8 |
|
||||
|
||||
## Task: PNG
|
||||
|
||||
Decompress a 2048x1536 RGBA PNG image,
|
||||
premultiply the alpha channel,
|
||||
resize to 720x540 using Lanczos 3 resampling (where available),
|
||||
unpremultiply then compress as PNG with a "default" zlib compression level of 6
|
||||
and without adaptive filtering.
|
||||
|
||||
Note: jimp does not support premultiply/unpremultiply.
|
||||
|
||||
### Results: PNG (AMD64)
|
||||
|
||||
| Module | Input | Output | Ops/sec | Speed-up |
|
||||
| :----------------- | :----- | :----- | ------: | -------: |
|
||||
| squoosh-cli | file | file | 0.40 | 1.0 |
|
||||
| squoosh-lib | buffer | buffer | 0.47 | 1.2 |
|
||||
| gm | file | file | 6.47 | 16.2 |
|
||||
| jimp | buffer | buffer | 6.60 | 16.5 |
|
||||
| imagemagick | file | file | 7.08 | 17.7 |
|
||||
| sharp | file | file | 17.80 | 44.5 |
|
||||
| sharp | buffer | buffer | 18.02 | 45.0 |
|
||||
|
||||
### Results: PNG (ARM64)
|
||||
|
||||
| Module | Input | Output | Ops/sec | Speed-up |
|
||||
| :----------------- | :----- | :----- | ------: | -------: |
|
||||
| squoosh-cli | file | file | 0.40 | 1.0 |
|
||||
| squoosh-lib | buffer | buffer | 0.48 | 1.2 |
|
||||
| gm | file | file | 7.20 | 18.0 |
|
||||
| jimp | buffer | buffer | 7.62 | 19.1 |
|
||||
| imagemagick | file | file | 7.96 | 19.9 |
|
||||
| sharp | file | file | 12.97 | 32.4 |
|
||||
| sharp | buffer | buffer | 13.12 | 45.0 |
|
||||
|
||||
## Running the benchmark test
|
||||
|
||||
Requires Docker.
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
@@ -37,7 +40,7 @@ for (const match of matches) {
|
||||
].forEach((section) => {
|
||||
const contents = fs.readFileSync(path.join(__dirname, '..', `api-${section}.md`), 'utf8');
|
||||
const matches = contents.matchAll(
|
||||
/\n## (?<title>[A-Za-z]+)\n\n(?<firstparagraph>.+?)\n\n(?<parameters>### Parameters.+?Returns)?/gs
|
||||
/## (?<title>[A-Za-z]+)\n(?<firstparagraph>.+?)\n\n.+?(?<parameters>\| Param .+?\n\n)?\*\*Example/gs
|
||||
);
|
||||
for (const match of matches) {
|
||||
const { title, firstparagraph, parameters } = match.groups;
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const stopWords = require('./stop-words');
|
||||
@@ -13,8 +16,9 @@ const extractDescription = (str) =>
|
||||
.trim();
|
||||
|
||||
const extractParameters = (str) =>
|
||||
[...str.matchAll(/options\.(?<name>[^.`]+)/gs)]
|
||||
[...str.matchAll(/options\.(?<name>[^.`\] ]+)/gs)]
|
||||
.map((match) => match.groups.name)
|
||||
.map((name) => name.replace(/([A-Z])/g, ' $1').toLowerCase())
|
||||
.join(' ');
|
||||
|
||||
const extractKeywords = (str) =>
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = [
|
||||
@@ -12,9 +15,11 @@ module.exports = [
|
||||
'and',
|
||||
'any',
|
||||
'are',
|
||||
'available',
|
||||
'based',
|
||||
'been',
|
||||
'before',
|
||||
'best',
|
||||
'both',
|
||||
'call',
|
||||
'callback',
|
||||
@@ -59,14 +64,20 @@ module.exports = [
|
||||
'must',
|
||||
'non',
|
||||
'not',
|
||||
'now',
|
||||
'occur',
|
||||
'occurs',
|
||||
'one',
|
||||
'options',
|
||||
'other',
|
||||
'out',
|
||||
'over',
|
||||
'part',
|
||||
'perform',
|
||||
'performs',
|
||||
'please',
|
||||
'pre',
|
||||
'previously',
|
||||
'produce',
|
||||
'provide',
|
||||
'provided',
|
||||
@@ -74,10 +85,12 @@ module.exports = [
|
||||
'requires',
|
||||
'requiresharp',
|
||||
'returned',
|
||||
'run',
|
||||
'same',
|
||||
'see',
|
||||
'set',
|
||||
'sets',
|
||||
'sharp',
|
||||
'should',
|
||||
'since',
|
||||
'site',
|
||||
@@ -109,6 +122,7 @@ module.exports = [
|
||||
'using',
|
||||
'value',
|
||||
'values',
|
||||
'via',
|
||||
'were',
|
||||
'when',
|
||||
'which',
|
||||
@@ -116,5 +130,6 @@ module.exports = [
|
||||
'will',
|
||||
'with',
|
||||
'without',
|
||||
'you'
|
||||
'you',
|
||||
'your'
|
||||
];
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const libvips = require('../lib/libvips');
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
@@ -128,9 +131,6 @@ try {
|
||||
if (arch === 'ia32' && !platformAndArch.startsWith('win32')) {
|
||||
throw new Error(`Intel Architecture 32-bit systems require manual installation of libvips >= ${minimumLibvipsVersion}`);
|
||||
}
|
||||
if (platformAndArch === 'darwin-arm64') {
|
||||
throw new Error("Please run 'brew install vips' to install libvips on Apple M1 (ARM64) systems");
|
||||
}
|
||||
if (platformAndArch === 'freebsd-x64' || platformAndArch === 'openbsd-x64' || platformAndArch === 'sunos-x64') {
|
||||
throw new Error(`BSD/SunOS systems require manual installation of libvips >= ${minimumLibvipsVersion}`);
|
||||
}
|
||||
@@ -138,7 +138,8 @@ try {
|
||||
const libcFamily = detectLibc.familySync();
|
||||
const libcVersion = detectLibc.versionSync();
|
||||
if (libcFamily === detectLibc.GLIBC && libcVersion && minimumGlibcVersionByArch[arch]) {
|
||||
if (semverLessThan(`${libcVersion}.0`, `${minimumGlibcVersionByArch[arch]}.0`)) {
|
||||
const libcVersionWithoutPatch = libcVersion.split('.').slice(0, 2).join('.');
|
||||
if (semverLessThan(`${libcVersionWithoutPatch}.0`, `${minimumGlibcVersionByArch[arch]}.0`)) {
|
||||
handleError(new Error(`Use with glibc ${libcVersion} requires manual installation of libvips >= ${minimumLibvipsVersion}`));
|
||||
}
|
||||
}
|
||||
@@ -152,7 +153,6 @@ try {
|
||||
if (!semverSatisfies(process.versions.node, supportedNodeVersion)) {
|
||||
handleError(new Error(`Expected Node.js version ${supportedNodeVersion} but found ${process.versions.node}`));
|
||||
}
|
||||
|
||||
// Download to per-process temporary file
|
||||
const tarFilename = ['libvips', minimumLibvipsVersionLabelled, platformAndArch].join('-') + '.tar.br';
|
||||
const tarPathCache = path.join(libvips.cachePath(), tarFilename);
|
||||
@@ -167,7 +167,7 @@ try {
|
||||
} else {
|
||||
const url = distBaseUrl + tarFilename;
|
||||
libvips.log(`Downloading ${url}`);
|
||||
simpleGet({ url: url, agent: agent() }, function (err, response) {
|
||||
simpleGet({ url: url, agent: agent(libvips.log) }, function (err, response) {
|
||||
if (err) {
|
||||
fail(err);
|
||||
} else if (response.statusCode === 404) {
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const url = require('url');
|
||||
@@ -18,7 +21,7 @@ function env (key) {
|
||||
return process.env[key];
|
||||
}
|
||||
|
||||
module.exports = function () {
|
||||
module.exports = function (log) {
|
||||
try {
|
||||
const proxy = new url.URL(proxies.map(env).find(is.string));
|
||||
const tunnel = proxy.protocol === 'https:'
|
||||
@@ -27,6 +30,7 @@ module.exports = function () {
|
||||
const proxyAuth = proxy.username && proxy.password
|
||||
? `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`
|
||||
: null;
|
||||
log(`Via proxy ${proxy.protocol}://${proxy.hostname}:${proxy.port} ${proxyAuth ? 'with' : 'no'} credentials`);
|
||||
return tunnel({
|
||||
proxy: {
|
||||
port: Number(proxy.port),
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const is = require('./is');
|
||||
@@ -98,7 +101,7 @@ function extractChannel (channel) {
|
||||
} else {
|
||||
throw is.invalidParameterError('channel', 'integer or one of: red, green, blue, alpha', channel);
|
||||
}
|
||||
return this.toColourspace('b-w');
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const color = require('color');
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const is = require('./is');
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const util = require('util');
|
||||
@@ -113,25 +116,25 @@ const debuglog = util.debuglog('sharp');
|
||||
* }
|
||||
* }).toFile('text_rgba.png');
|
||||
*
|
||||
* @param {(Buffer|Uint8Array|Uint8ClampedArray|Int8Array|Uint16Array|Int16Array|Uint32Array|Int32Array|Float32Array|Float64Array|string)} [input] - if present, can be
|
||||
* a Buffer / Uint8Array / Uint8ClampedArray containing JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image data, or
|
||||
* @param {(Buffer|ArrayBuffer|Uint8Array|Uint8ClampedArray|Int8Array|Uint16Array|Int16Array|Uint32Array|Int32Array|Float32Array|Float64Array|string)} [input] - if present, can be
|
||||
* a Buffer / ArrayBuffer / Uint8Array / Uint8ClampedArray containing JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image data, or
|
||||
* a TypedArray containing raw pixel image data, or
|
||||
* a String containing the filesystem path to an JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image file.
|
||||
* JPEG, PNG, WebP, AVIF, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
|
||||
* @param {Object} [options] - if present, is an Object with optional attributes.
|
||||
* @param {string} [options.failOn='warning'] - level of sensitivity to invalid images, one of (in order of sensitivity): 'none' (least), 'truncated', 'error' or 'warning' (most), highers level imply lower levels.
|
||||
* @param {string} [options.failOn='warning'] - when to abort processing of invalid pixel data, one of (in order of sensitivity): 'none' (least), 'truncated', 'error' or 'warning' (most), highers level imply lower levels, invalid metadata will always abort.
|
||||
* @param {number|boolean} [options.limitInputPixels=268402689] - Do not process input images where the number of pixels
|
||||
* (width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted.
|
||||
* An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF).
|
||||
* @param {boolean} [options.unlimited=false] - Set this to `true` to remove safety features that help prevent memory exhaustion (JPEG, PNG, SVG, HEIF).
|
||||
* @param {boolean} [options.sequentialRead=false] - Set this to `true` to use sequential rather than random access where possible.
|
||||
* This can reduce memory usage and might improve performance on some systems.
|
||||
* @param {boolean} [options.sequentialRead=true] - Set this to `false` to use random access rather than sequential read. Some operations will do this automatically.
|
||||
* @param {number} [options.density=72] - number representing the DPI for vector images in the range 1 to 100000.
|
||||
* @param {number} [options.pages=1] - number of pages to extract for multi-page input (GIF, WebP, AVIF, TIFF, PDF), use -1 for all pages.
|
||||
* @param {number} [options.page=0] - page number to start extracting from for multi-page input (GIF, WebP, AVIF, TIFF, PDF), zero based.
|
||||
* @param {number} [options.ignoreIcc=false] - should the embedded ICC profile, if any, be ignored.
|
||||
* @param {number} [options.pages=1] - Number of pages to extract for multi-page input (GIF, WebP, TIFF), use -1 for all pages.
|
||||
* @param {number} [options.page=0] - Page number to start extracting from for multi-page input (GIF, WebP, TIFF), zero based.
|
||||
* @param {number} [options.subifd=-1] - subIFD (Sub Image File Directory) to extract for OME-TIFF, defaults to main image.
|
||||
* @param {number} [options.level=0] - level to extract from a multi-level input (OpenSlide), zero based.
|
||||
* @param {boolean} [options.animated=false] - Set to `true` to read all frames/pages of an animated image (equivalent of setting `pages` to `-1`).
|
||||
* @param {boolean} [options.animated=false] - Set to `true` to read all frames/pages of an animated image (GIF, WebP, TIFF), equivalent of setting `pages` to `-1`.
|
||||
* @param {Object} [options.raw] - describes raw pixel input image data. See `raw()` for pixel ordering.
|
||||
* @param {number} [options.raw.width] - integral number of pixels wide.
|
||||
* @param {number} [options.raw.height] - integral number of pixels high.
|
||||
@@ -158,6 +161,7 @@ const debuglog = util.debuglog('sharp');
|
||||
* @param {number} [options.text.dpi=72] - the resolution (size) at which to render the text. Does not take effect if `height` is specified.
|
||||
* @param {boolean} [options.text.rgba=false] - set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`.
|
||||
* @param {number} [options.text.spacing=0] - text line height in points. Will use the font line height if none is specified.
|
||||
* @param {string} [options.text.wrap='word'] - word wrapping style when width is provided, one of: 'word', 'char', 'charWord' (prefer char, fallback to word) or 'none'.
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid parameters
|
||||
*/
|
||||
@@ -196,6 +200,7 @@ const Sharp = function (input, options) {
|
||||
extendLeft: 0,
|
||||
extendRight: 0,
|
||||
extendBackground: [0, 0, 0, 255],
|
||||
extendWith: 'background',
|
||||
withoutEnlargement: false,
|
||||
withoutReduction: false,
|
||||
affineMatrix: [],
|
||||
@@ -230,6 +235,8 @@ const Sharp = function (input, options) {
|
||||
gammaOut: 0,
|
||||
greyscale: false,
|
||||
normalise: false,
|
||||
normaliseLower: 1,
|
||||
normaliseUpper: 99,
|
||||
claheWidth: 0,
|
||||
claheHeight: 0,
|
||||
claheMaxSlope: 3,
|
||||
@@ -289,7 +296,10 @@ const Sharp = function (input, options) {
|
||||
gifBitdepth: 8,
|
||||
gifEffort: 7,
|
||||
gifDither: 1,
|
||||
gifReoptimise: false,
|
||||
gifInterFrameMaxError: 0,
|
||||
gifInterPaletteMaxError: 3,
|
||||
gifReuse: true,
|
||||
gifProgressive: false,
|
||||
tiffQuality: 80,
|
||||
tiffCompression: 'jpeg',
|
||||
tiffPredictor: 'horizontal',
|
||||
@@ -306,6 +316,10 @@ const Sharp = function (input, options) {
|
||||
heifCompression: 'av1',
|
||||
heifEffort: 4,
|
||||
heifChromaSubsampling: '4:4:4',
|
||||
jxlDistance: 1,
|
||||
jxlDecodingTier: 0,
|
||||
jxlEffort: 7,
|
||||
jxlLossless: false,
|
||||
rawDepth: 'uchar',
|
||||
tileSize: 256,
|
||||
tileOverlap: 0,
|
||||
|
||||
1614
lib/index.d.ts
vendored
Normal file
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const Sharp = require('./constructor');
|
||||
|
||||
47
lib/input.js
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const color = require('color');
|
||||
@@ -21,9 +24,9 @@ const align = {
|
||||
* @private
|
||||
*/
|
||||
function _inputOptionsFromObject (obj) {
|
||||
const { raw, density, limitInputPixels, unlimited, sequentialRead, failOn, failOnError, animated, page, pages, subifd } = obj;
|
||||
return [raw, density, limitInputPixels, unlimited, sequentialRead, failOn, failOnError, animated, page, pages, subifd].some(is.defined)
|
||||
? { raw, density, limitInputPixels, unlimited, sequentialRead, failOn, failOnError, animated, page, pages, subifd }
|
||||
const { raw, density, limitInputPixels, ignoreIcc, unlimited, sequentialRead, failOn, failOnError, animated, page, pages, subifd } = obj;
|
||||
return [raw, density, limitInputPixels, ignoreIcc, unlimited, sequentialRead, failOn, failOnError, animated, page, pages, subifd].some(is.defined)
|
||||
? { raw, density, limitInputPixels, ignoreIcc, unlimited, sequentialRead, failOn, failOnError, animated, page, pages, subifd }
|
||||
: undefined;
|
||||
}
|
||||
|
||||
@@ -35,8 +38,9 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
|
||||
const inputDescriptor = {
|
||||
failOn: 'warning',
|
||||
limitInputPixels: Math.pow(0x3FFF, 2),
|
||||
ignoreIcc: false,
|
||||
unlimited: false,
|
||||
sequentialRead: false
|
||||
sequentialRead: true
|
||||
};
|
||||
if (is.string(input)) {
|
||||
// filesystem
|
||||
@@ -47,6 +51,11 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
|
||||
throw Error('Input Buffer is empty');
|
||||
}
|
||||
inputDescriptor.buffer = input;
|
||||
} else if (is.arrayBuffer(input)) {
|
||||
if (input.byteLength === 0) {
|
||||
throw Error('Input bit Array is empty');
|
||||
}
|
||||
inputDescriptor.buffer = Buffer.from(input, 0, input.byteLength);
|
||||
} else if (is.typedArray(input)) {
|
||||
if (input.length === 0) {
|
||||
throw Error('Input Bit Array is empty');
|
||||
@@ -92,6 +101,14 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
|
||||
throw is.invalidParameterError('density', 'number between 1 and 100000', inputOptions.density);
|
||||
}
|
||||
}
|
||||
// Ignore embeddded ICC profile
|
||||
if (is.defined(inputOptions.ignoreIcc)) {
|
||||
if (is.bool(inputOptions.ignoreIcc)) {
|
||||
inputDescriptor.ignoreIcc = inputOptions.ignoreIcc;
|
||||
} else {
|
||||
throw is.invalidParameterError('ignoreIcc', 'boolean', inputOptions.ignoreIcc);
|
||||
}
|
||||
}
|
||||
// limitInputPixels
|
||||
if (is.defined(inputOptions.limitInputPixels)) {
|
||||
if (is.bool(inputOptions.limitInputPixels)) {
|
||||
@@ -327,6 +344,13 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
|
||||
throw is.invalidParameterError('text.spacing', 'number', inputOptions.text.spacing);
|
||||
}
|
||||
}
|
||||
if (is.defined(inputOptions.text.wrap)) {
|
||||
if (is.string(inputOptions.text.wrap) && is.inArray(inputOptions.text.wrap, ['word', 'char', 'wordChar', 'none'])) {
|
||||
inputDescriptor.textWrap = inputOptions.text.wrap;
|
||||
} else {
|
||||
throw is.invalidParameterError('text.wrap', 'one of: word, char, wordChar, none', inputOptions.text.wrap);
|
||||
}
|
||||
}
|
||||
delete inputDescriptor.buffer;
|
||||
} else {
|
||||
throw new Error('Expected a valid string to create an image with text.');
|
||||
@@ -387,8 +411,9 @@ function _isStreamInput () {
|
||||
/**
|
||||
* Fast access to (uncached) image metadata without decoding any compressed pixel data.
|
||||
*
|
||||
* This is taken from the header of the input image.
|
||||
* It does not include operations, such as resize, to be applied to the output image.
|
||||
* This is read from the header of the input image.
|
||||
* It does not take into consideration any operations to be applied to the output image,
|
||||
* such as resize or rotate.
|
||||
*
|
||||
* Dimensions in the response will respect the `page` and `pages` properties of the
|
||||
* {@link /api-constructor#parameters|constructor parameters}.
|
||||
@@ -423,6 +448,7 @@ function _isStreamInput () {
|
||||
* - `iptc`: Buffer containing raw IPTC data, if present
|
||||
* - `xmp`: Buffer containing raw XMP data, if present
|
||||
* - `tifftagPhotoshop`: Buffer containing raw TIFFTAG_PHOTOSHOP data, if present
|
||||
* - `formatMagick`: String containing format for images loaded via *magick
|
||||
*
|
||||
* @example
|
||||
* const metadata = await sharp(input).metadata();
|
||||
@@ -469,7 +495,7 @@ function metadata (callback) {
|
||||
} else {
|
||||
if (this._isStreamInput()) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.on('finish', () => {
|
||||
const finished = () => {
|
||||
this._flattenBufferIn();
|
||||
sharp.metadata(this.options, (err, metadata) => {
|
||||
if (err) {
|
||||
@@ -478,7 +504,12 @@ function metadata (callback) {
|
||||
resolve(metadata);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
if (this.writableFinished) {
|
||||
finished();
|
||||
} else {
|
||||
this.once('finish', finished);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
return new Promise((resolve, reject) => {
|
||||
|
||||
12
lib/is.js
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
@@ -71,6 +74,14 @@ const typedArray = function (val) {
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Is this value an ArrayBuffer object?
|
||||
* @private
|
||||
*/
|
||||
const arrayBuffer = function (val) {
|
||||
return val instanceof ArrayBuffer;
|
||||
};
|
||||
|
||||
/**
|
||||
* Is this value a non-empty string?
|
||||
* @private
|
||||
@@ -134,6 +145,7 @@ module.exports = {
|
||||
bool: bool,
|
||||
buffer: buffer,
|
||||
typedArray: typedArray,
|
||||
arrayBuffer: arrayBuffer,
|
||||
string: string,
|
||||
number: number,
|
||||
integer: integer,
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
@@ -115,6 +118,7 @@ const useGlobalLibvips = function () {
|
||||
}
|
||||
/* istanbul ignore next */
|
||||
if (isRosetta()) {
|
||||
log('Detected Rosetta, skipping search for globally-installed libvips');
|
||||
return false;
|
||||
}
|
||||
const globalVipsVersion = globalLibvipsVersion();
|
||||
|
||||
146
lib/operation.js
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const color = require('color');
|
||||
@@ -205,9 +208,11 @@ function affine (matrix, options) {
|
||||
|
||||
/**
|
||||
* Sharpen the image.
|
||||
*
|
||||
* When used without parameters, performs a fast, mild sharpen of the output image.
|
||||
*
|
||||
* When a `sigma` is provided, performs a slower, more accurate sharpen of the L channel in the LAB colour space.
|
||||
* Separate control over the level of sharpening in "flat" and "jagged" areas is available.
|
||||
* Fine-grained control over the level of sharpening in "flat" (m1) and "jagged" (m2) areas is available.
|
||||
*
|
||||
* See {@link https://www.libvips.org/API/current/libvips-convolution.html#vips-sharpen|libvips sharpen} operation.
|
||||
*
|
||||
@@ -229,13 +234,13 @@ function affine (matrix, options) {
|
||||
* })
|
||||
* .toBuffer();
|
||||
*
|
||||
* @param {Object|number} [options] - if present, is an Object with attributes or (deprecated) a number for `options.sigma`.
|
||||
* @param {number} [options.sigma] - the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`.
|
||||
* @param {number} [options.m1=1.0] - the level of sharpening to apply to "flat" areas.
|
||||
* @param {number} [options.m2=2.0] - the level of sharpening to apply to "jagged" areas.
|
||||
* @param {number} [options.x1=2.0] - threshold between "flat" and "jagged"
|
||||
* @param {number} [options.y2=10.0] - maximum amount of brightening.
|
||||
* @param {number} [options.y3=20.0] - maximum amount of darkening.
|
||||
* @param {Object|number} [options] - if present, is an Object with attributes
|
||||
* @param {number} [options.sigma] - the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`, between 0.000001 and 10
|
||||
* @param {number} [options.m1=1.0] - the level of sharpening to apply to "flat" areas, between 0 and 1000000
|
||||
* @param {number} [options.m2=2.0] - the level of sharpening to apply to "jagged" areas, between 0 and 1000000
|
||||
* @param {number} [options.x1=2.0] - threshold between "flat" and "jagged", between 0 and 1000000
|
||||
* @param {number} [options.y2=10.0] - maximum amount of brightening, between 0 and 1000000
|
||||
* @param {number} [options.y3=20.0] - maximum amount of darkening, between 0 and 1000000
|
||||
* @param {number} [flat] - (deprecated) see `options.m1`.
|
||||
* @param {number} [jagged] - (deprecated) see `options.m2`.
|
||||
* @returns {Sharp}
|
||||
@@ -268,44 +273,44 @@ function sharpen (options, flat, jagged) {
|
||||
}
|
||||
}
|
||||
} else if (is.plainObject(options)) {
|
||||
if (is.number(options.sigma) && is.inRange(options.sigma, 0.01, 10000)) {
|
||||
if (is.number(options.sigma) && is.inRange(options.sigma, 0.000001, 10)) {
|
||||
this.options.sharpenSigma = options.sigma;
|
||||
} else {
|
||||
throw is.invalidParameterError('options.sigma', 'number between 0.01 and 10000', options.sigma);
|
||||
throw is.invalidParameterError('options.sigma', 'number between 0.000001 and 10', options.sigma);
|
||||
}
|
||||
if (is.defined(options.m1)) {
|
||||
if (is.number(options.m1) && is.inRange(options.m1, 0, 10000)) {
|
||||
if (is.number(options.m1) && is.inRange(options.m1, 0, 1000000)) {
|
||||
this.options.sharpenM1 = options.m1;
|
||||
} else {
|
||||
throw is.invalidParameterError('options.m1', 'number between 0 and 10000', options.m1);
|
||||
throw is.invalidParameterError('options.m1', 'number between 0 and 1000000', options.m1);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.m2)) {
|
||||
if (is.number(options.m2) && is.inRange(options.m2, 0, 10000)) {
|
||||
if (is.number(options.m2) && is.inRange(options.m2, 0, 1000000)) {
|
||||
this.options.sharpenM2 = options.m2;
|
||||
} else {
|
||||
throw is.invalidParameterError('options.m2', 'number between 0 and 10000', options.m2);
|
||||
throw is.invalidParameterError('options.m2', 'number between 0 and 1000000', options.m2);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.x1)) {
|
||||
if (is.number(options.x1) && is.inRange(options.x1, 0, 10000)) {
|
||||
if (is.number(options.x1) && is.inRange(options.x1, 0, 1000000)) {
|
||||
this.options.sharpenX1 = options.x1;
|
||||
} else {
|
||||
throw is.invalidParameterError('options.x1', 'number between 0 and 10000', options.x1);
|
||||
throw is.invalidParameterError('options.x1', 'number between 0 and 1000000', options.x1);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.y2)) {
|
||||
if (is.number(options.y2) && is.inRange(options.y2, 0, 10000)) {
|
||||
if (is.number(options.y2) && is.inRange(options.y2, 0, 1000000)) {
|
||||
this.options.sharpenY2 = options.y2;
|
||||
} else {
|
||||
throw is.invalidParameterError('options.y2', 'number between 0 and 10000', options.y2);
|
||||
throw is.invalidParameterError('options.y2', 'number between 0 and 1000000', options.y2);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.y3)) {
|
||||
if (is.number(options.y3) && is.inRange(options.y3, 0, 10000)) {
|
||||
if (is.number(options.y3) && is.inRange(options.y3, 0, 1000000)) {
|
||||
this.options.sharpenY3 = options.y3;
|
||||
} else {
|
||||
throw is.invalidParameterError('options.y3', 'number between 0 and 10000', options.y3);
|
||||
throw is.invalidParameterError('options.y3', 'number between 0 and 1000000', options.y3);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -464,16 +469,50 @@ function negate (options) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhance output image contrast by stretching its luminance to cover the full dynamic range.
|
||||
* Enhance output image contrast by stretching its luminance to cover a full dynamic range.
|
||||
*
|
||||
* Uses a histogram-based approach, taking a default range of 1% to 99% to reduce sensitivity to noise at the extremes.
|
||||
*
|
||||
* Luminance values below the `lower` percentile will be underexposed by clipping to zero.
|
||||
* Luminance values above the `upper` percentile will be overexposed by clipping to the max pixel value.
|
||||
*
|
||||
* @example
|
||||
* const output = await sharp(input).normalise().toBuffer();
|
||||
* const output = await sharp(input)
|
||||
* .normalise()
|
||||
* .toBuffer();
|
||||
*
|
||||
* @param {Boolean} [normalise=true]
|
||||
* @example
|
||||
* const output = await sharp(input)
|
||||
* .normalise({ lower: 0, upper: 100 })
|
||||
* .toBuffer();
|
||||
*
|
||||
* @param {Object} [options]
|
||||
* @param {number} [options.lower=1] - Percentile below which luminance values will be underexposed.
|
||||
* @param {number} [options.upper=99] - Percentile above which luminance values will be overexposed.
|
||||
* @returns {Sharp}
|
||||
*/
|
||||
function normalise (normalise) {
|
||||
this.options.normalise = is.bool(normalise) ? normalise : true;
|
||||
function normalise (options) {
|
||||
if (is.plainObject(options)) {
|
||||
if (is.defined(options.lower)) {
|
||||
if (is.number(options.lower) && is.inRange(options.lower, 0, 99)) {
|
||||
this.options.normaliseLower = options.lower;
|
||||
} else {
|
||||
throw is.invalidParameterError('lower', 'number between 0 and 99', options.lower);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.upper)) {
|
||||
if (is.number(options.upper) && is.inRange(options.upper, 1, 100)) {
|
||||
this.options.normaliseUpper = options.upper;
|
||||
} else {
|
||||
throw is.invalidParameterError('upper', 'number between 1 and 100', options.upper);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.options.normaliseLower >= this.options.normaliseUpper) {
|
||||
throw is.invalidParameterError('range', 'lower to be less than upper',
|
||||
`${this.options.normaliseLower} >= ${this.options.normaliseUpper}`);
|
||||
}
|
||||
this.options.normalise = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -481,13 +520,17 @@ function normalise (normalise) {
|
||||
* Alternative spelling of normalise.
|
||||
*
|
||||
* @example
|
||||
* const output = await sharp(input).normalize().toBuffer();
|
||||
* const output = await sharp(input)
|
||||
* .normalize()
|
||||
* .toBuffer();
|
||||
*
|
||||
* @param {Boolean} [normalize=true]
|
||||
* @param {Object} [options]
|
||||
* @param {number} [options.lower=1] - Percentile below which luminance values will be underexposed.
|
||||
* @param {number} [options.upper=99] - Percentile above which luminance values will be overexposed.
|
||||
* @returns {Sharp}
|
||||
*/
|
||||
function normalize (normalize) {
|
||||
return this.normalise(normalize);
|
||||
function normalize (options) {
|
||||
return this.normalise(options);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -507,35 +550,34 @@ function normalize (normalize) {
|
||||
* .toBuffer();
|
||||
*
|
||||
* @param {Object} options
|
||||
* @param {number} options.width - integer width of the region in pixels.
|
||||
* @param {number} options.height - integer height of the region in pixels.
|
||||
* @param {number} [options.maxSlope=3] - maximum value for the slope of the
|
||||
* cumulative histogram. A value of 0 disables contrast limiting. Valid values
|
||||
* are integers in the range 0-100 (inclusive)
|
||||
* @param {number} options.width - Integral width of the search window, in pixels.
|
||||
* @param {number} options.height - Integral height of the search window, in pixels.
|
||||
* @param {number} [options.maxSlope=3] - Integral level of brightening, between 0 and 100, where 0 disables contrast limiting.
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid parameters
|
||||
*/
|
||||
function clahe (options) {
|
||||
if (!is.plainObject(options)) {
|
||||
if (is.plainObject(options)) {
|
||||
if (is.integer(options.width) && options.width > 0) {
|
||||
this.options.claheWidth = options.width;
|
||||
} else {
|
||||
throw is.invalidParameterError('width', 'integer greater than zero', options.width);
|
||||
}
|
||||
if (is.integer(options.height) && options.height > 0) {
|
||||
this.options.claheHeight = options.height;
|
||||
} else {
|
||||
throw is.invalidParameterError('height', 'integer greater than zero', options.height);
|
||||
}
|
||||
if (is.defined(options.maxSlope)) {
|
||||
if (is.integer(options.maxSlope) && is.inRange(options.maxSlope, 0, 100)) {
|
||||
this.options.claheMaxSlope = options.maxSlope;
|
||||
} else {
|
||||
throw is.invalidParameterError('maxSlope', 'integer between 0 and 100', options.maxSlope);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw is.invalidParameterError('options', 'plain object', options);
|
||||
}
|
||||
if (!('width' in options) || !is.integer(options.width) || options.width <= 0) {
|
||||
throw is.invalidParameterError('width', 'integer above zero', options.width);
|
||||
} else {
|
||||
this.options.claheWidth = options.width;
|
||||
}
|
||||
if (!('height' in options) || !is.integer(options.height) || options.height <= 0) {
|
||||
throw is.invalidParameterError('height', 'integer above zero', options.height);
|
||||
} else {
|
||||
this.options.claheHeight = options.height;
|
||||
}
|
||||
if (!is.defined(options.maxSlope)) {
|
||||
this.options.claheMaxSlope = 3;
|
||||
} else if (!is.integer(options.maxSlope) || options.maxSlope < 0 || options.maxSlope > 100) {
|
||||
throw is.invalidParameterError('maxSlope', 'integer 0-100', options.maxSlope);
|
||||
} else {
|
||||
this.options.claheMaxSlope = options.maxSlope;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
124
lib/output.js
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
@@ -22,10 +25,13 @@ const formats = new Map([
|
||||
['jp2', 'jp2'],
|
||||
['jpx', 'jp2'],
|
||||
['j2k', 'jp2'],
|
||||
['j2c', 'jp2']
|
||||
['j2c', 'jp2'],
|
||||
['jxl', 'jxl']
|
||||
]);
|
||||
|
||||
const errJp2Save = new Error('JP2 output requires libvips with support for OpenJPEG');
|
||||
const jp2Regex = /\.jp[2x]|j2[kc]$/i;
|
||||
|
||||
const errJp2Save = () => new Error('JP2 output requires libvips with support for OpenJPEG');
|
||||
|
||||
const bitdepthFromColourCount = (colours) => 1 << 31 - Math.clz32(Math.ceil(Math.log2(colours)));
|
||||
|
||||
@@ -58,6 +64,7 @@ const bitdepthFromColourCount = (colours) => 1 << 31 - Math.clz32(Math.ceil(Math
|
||||
* `info` contains the output image `format`, `size` (bytes), `width`, `height`,
|
||||
* `channels` and `premultiplied` (indicating if premultiplication was used).
|
||||
* When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
|
||||
* When using the attention crop strategy also contains `attentionX` and `attentionY`, the focal point of the cropped region.
|
||||
* May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text.
|
||||
* @returns {Promise<Object>} - when no callback is provided
|
||||
* @throws {Error} Invalid parameters
|
||||
@@ -68,6 +75,8 @@ function toFile (fileOut, callback) {
|
||||
err = new Error('Missing output file path');
|
||||
} else if (is.string(this.options.input.file) && path.resolve(this.options.input.file) === path.resolve(fileOut)) {
|
||||
err = new Error('Cannot use same file for input and output');
|
||||
} else if (jp2Regex.test(fileOut) && !this.constructor.format.jp2k.output.file) {
|
||||
err = errJp2Save();
|
||||
}
|
||||
if (err) {
|
||||
if (is.fn(callback)) {
|
||||
@@ -187,7 +196,7 @@ function toBuffer (options, callback) {
|
||||
*
|
||||
* @param {Object} [options]
|
||||
* @param {number} [options.orientation] value between 1 and 8, used to update the EXIF `Orientation` tag.
|
||||
* @param {string} [options.icc] filesystem path to output ICC profile, defaults to sRGB.
|
||||
* @param {string} [options.icc='srgb'] Filesystem path to output ICC profile, relative to `process.cwd()`, defaults to built-in sRGB.
|
||||
* @param {Object<Object>} [options.exif={}] Object keyed by IFD0, IFD1 etc. of key/value string pairs to write as EXIF data.
|
||||
* @param {number} [options.density] Number of pixels per inch (DPI).
|
||||
* @returns {Sharp}
|
||||
@@ -547,13 +556,21 @@ function webp (options) {
|
||||
* .gif({ dither: 0 })
|
||||
* .toBuffer();
|
||||
*
|
||||
* @example
|
||||
* // Lossy file size reduction of animated GIF
|
||||
* await sharp('in.gif', { animated: true })
|
||||
* .gif({ interFrameMaxError: 8 })
|
||||
* .toFile('optim.gif');
|
||||
*
|
||||
* @param {Object} [options] - output options
|
||||
* @param {boolean} [options.reoptimise=false] - always generate new palettes (slow), re-use existing by default
|
||||
* @param {boolean} [options.reoptimize=false] - alternative spelling of `options.reoptimise`
|
||||
* @param {boolean} [options.reuse=true] - re-use existing palette, otherwise generate new (slow)
|
||||
* @param {boolean} [options.progressive=false] - use progressive (interlace) scan
|
||||
* @param {number} [options.colours=256] - maximum number of palette entries, including transparency, between 2 and 256
|
||||
* @param {number} [options.colors=256] - alternative spelling of `options.colours`
|
||||
* @param {number} [options.effort=7] - CPU effort, between 1 (fastest) and 10 (slowest)
|
||||
* @param {number} [options.dither=1.0] - level of Floyd-Steinberg error diffusion, between 0 (least) and 1 (most)
|
||||
* @param {number} [options.interFrameMaxError=0] - maximum inter-frame error for transparency, between 0 (lossless) and 32
|
||||
* @param {number} [options.interPaletteMaxError=3] - maximum inter-palette error for palette reuse, between 0 and 256
|
||||
* @param {number} [options.loop=0] - number of animation iterations, use 0 for infinite animation
|
||||
* @param {number|number[]} [options.delay] - delay(s) between animation frames (in milliseconds)
|
||||
* @param {boolean} [options.force=true] - force GIF output, otherwise attempt to use input format
|
||||
@@ -562,10 +579,11 @@ function webp (options) {
|
||||
*/
|
||||
function gif (options) {
|
||||
if (is.object(options)) {
|
||||
if (is.defined(options.reoptimise)) {
|
||||
this._setBooleanOption('gifReoptimise', options.reoptimise);
|
||||
} else if (is.defined(options.reoptimize)) {
|
||||
this._setBooleanOption('gifReoptimise', options.reoptimize);
|
||||
if (is.defined(options.reuse)) {
|
||||
this._setBooleanOption('gifReuse', options.reuse);
|
||||
}
|
||||
if (is.defined(options.progressive)) {
|
||||
this._setBooleanOption('gifProgressive', options.progressive);
|
||||
}
|
||||
const colours = options.colours || options.colors;
|
||||
if (is.defined(colours)) {
|
||||
@@ -589,6 +607,20 @@ function gif (options) {
|
||||
throw is.invalidParameterError('dither', 'number between 0.0 and 1.0', options.dither);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.interFrameMaxError)) {
|
||||
if (is.number(options.interFrameMaxError) && is.inRange(options.interFrameMaxError, 0, 32)) {
|
||||
this.options.gifInterFrameMaxError = options.interFrameMaxError;
|
||||
} else {
|
||||
throw is.invalidParameterError('interFrameMaxError', 'number between 0.0 and 32.0', options.interFrameMaxError);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.interPaletteMaxError)) {
|
||||
if (is.number(options.interPaletteMaxError) && is.inRange(options.interPaletteMaxError, 0, 256)) {
|
||||
this.options.gifInterPaletteMaxError = options.interPaletteMaxError;
|
||||
} else {
|
||||
throw is.invalidParameterError('interPaletteMaxError', 'number between 0.0 and 256.0', options.interPaletteMaxError);
|
||||
}
|
||||
}
|
||||
}
|
||||
trySetAnimationOptions(options, this.options);
|
||||
return this._updateFormatOut('gif', options);
|
||||
@@ -630,7 +662,7 @@ function gif (options) {
|
||||
/* istanbul ignore next */
|
||||
function jp2 (options) {
|
||||
if (!this.constructor.format.jp2k.output.buffer) {
|
||||
throw errJp2Save;
|
||||
throw errJp2Save();
|
||||
}
|
||||
if (is.object(options)) {
|
||||
if (is.defined(options.quality)) {
|
||||
@@ -663,7 +695,7 @@ function jp2 (options) {
|
||||
}
|
||||
if (is.defined(options.chromaSubsampling)) {
|
||||
if (is.string(options.chromaSubsampling) && is.inArray(options.chromaSubsampling, ['4:2:0', '4:4:4'])) {
|
||||
this.options.heifChromaSubsampling = options.chromaSubsampling;
|
||||
this.options.jp2ChromaSubsampling = options.chromaSubsampling;
|
||||
} else {
|
||||
throw is.invalidParameterError('chromaSubsampling', 'one of: 4:2:0, 4:4:4', options.chromaSubsampling);
|
||||
}
|
||||
@@ -912,6 +944,71 @@ function heif (options) {
|
||||
return this._updateFormatOut('heif', options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use these JPEG-XL (JXL) options for output image.
|
||||
*
|
||||
* This feature is experimental, please do not use in production systems.
|
||||
*
|
||||
* Requires libvips compiled with support for libjxl.
|
||||
* The prebuilt binaries do not include this - see
|
||||
* {@link https://sharp.pixelplumbing.com/install#custom-libvips installing a custom libvips}.
|
||||
*
|
||||
* Image metadata (EXIF, XMP) is unsupported.
|
||||
*
|
||||
* @since 0.31.3
|
||||
*
|
||||
* @param {Object} [options] - output options
|
||||
* @param {number} [options.distance=1.0] - maximum encoding error, between 0 (highest quality) and 15 (lowest quality)
|
||||
* @param {number} [options.quality] - calculate `distance` based on JPEG-like quality, between 1 and 100, overrides distance if specified
|
||||
* @param {number} [options.decodingTier=0] - target decode speed tier, between 0 (highest quality) and 4 (lowest quality)
|
||||
* @param {boolean} [options.lossless=false] - use lossless compression
|
||||
* @param {number} [options.effort=7] - CPU effort, between 3 (fastest) and 9 (slowest)
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid options
|
||||
*/
|
||||
function jxl (options) {
|
||||
if (is.object(options)) {
|
||||
if (is.defined(options.quality)) {
|
||||
if (is.integer(options.quality) && is.inRange(options.quality, 1, 100)) {
|
||||
// https://github.com/libjxl/libjxl/blob/0aeea7f180bafd6893c1db8072dcb67d2aa5b03d/tools/cjxl_main.cc#L640-L644
|
||||
this.options.jxlDistance = options.quality >= 30
|
||||
? 0.1 + (100 - options.quality) * 0.09
|
||||
: 53 / 3000 * options.quality * options.quality - 23 / 20 * options.quality + 25;
|
||||
} else {
|
||||
throw is.invalidParameterError('quality', 'integer between 1 and 100', options.quality);
|
||||
}
|
||||
} else if (is.defined(options.distance)) {
|
||||
if (is.number(options.distance) && is.inRange(options.distance, 0, 15)) {
|
||||
this.options.jxlDistance = options.distance;
|
||||
} else {
|
||||
throw is.invalidParameterError('distance', 'number between 0.0 and 15.0', options.distance);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.decodingTier)) {
|
||||
if (is.integer(options.decodingTier) && is.inRange(options.decodingTier, 0, 4)) {
|
||||
this.options.jxlDecodingTier = options.decodingTier;
|
||||
} else {
|
||||
throw is.invalidParameterError('decodingTier', 'integer between 0 and 4', options.decodingTier);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.lossless)) {
|
||||
if (is.bool(options.lossless)) {
|
||||
this.options.jxlLossless = options.lossless;
|
||||
} else {
|
||||
throw is.invalidParameterError('lossless', 'boolean', options.lossless);
|
||||
}
|
||||
}
|
||||
if (is.defined(options.effort)) {
|
||||
if (is.integer(options.effort) && is.inRange(options.effort, 3, 9)) {
|
||||
this.options.jxlEffort = options.effort;
|
||||
} else {
|
||||
throw is.invalidParameterError('effort', 'integer between 3 and 9', options.effort);
|
||||
}
|
||||
}
|
||||
}
|
||||
return this._updateFormatOut('jxl', options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Force output to be raw, uncompressed pixel data.
|
||||
* Pixel ordering is left-to-right, top-to-bottom, without padding.
|
||||
@@ -959,6 +1056,10 @@ function raw (options) {
|
||||
*
|
||||
* The container will be set to `zip` when the output is a Buffer or Stream, otherwise it will default to `fs`.
|
||||
*
|
||||
* Requires libvips compiled with support for libgsf.
|
||||
* The prebuilt binaries do not include this - see
|
||||
* {@link https://sharp.pixelplumbing.com/install#custom-libvips installing a custom libvips}.
|
||||
*
|
||||
* @example
|
||||
* sharp('input.tiff')
|
||||
* .png()
|
||||
@@ -1282,6 +1383,7 @@ module.exports = function (Sharp) {
|
||||
tiff,
|
||||
avif,
|
||||
heif,
|
||||
jxl,
|
||||
gif,
|
||||
raw,
|
||||
tile,
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const detectLibc = require('detect-libc');
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const is = require('./is');
|
||||
@@ -36,6 +39,18 @@ const position = {
|
||||
'left top': 8
|
||||
};
|
||||
|
||||
/**
|
||||
* How to extend the image.
|
||||
* @member
|
||||
* @private
|
||||
*/
|
||||
const extendWith = {
|
||||
background: 'background',
|
||||
copy: 'copy',
|
||||
repeat: 'repeat',
|
||||
mirror: 'mirror'
|
||||
};
|
||||
|
||||
/**
|
||||
* Strategies for automagic cover behaviour.
|
||||
* @member
|
||||
@@ -103,7 +118,7 @@ function isResizeExpected (options) {
|
||||
* Resize image to `width`, `height` or `width x height`.
|
||||
*
|
||||
* When both a `width` and `height` are provided, the possible methods by which the image should **fit** these are:
|
||||
* - `cover`: (default) Preserving aspect ratio, ensure the image covers both provided dimensions by cropping/clipping to fit.
|
||||
* - `cover`: (default) Preserving aspect ratio, attempt to ensure the image covers both provided dimensions by cropping/clipping to fit.
|
||||
* - `contain`: Preserving aspect ratio, contain within both provided dimensions using "letterboxing" where necessary.
|
||||
* - `fill`: Ignore the aspect ratio of the input and stretch to both provided dimensions.
|
||||
* - `inside`: Preserving aspect ratio, resize the image to be as large as possible while ensuring its dimensions are less than or equal to both those specified.
|
||||
@@ -111,7 +126,9 @@ function isResizeExpected (options) {
|
||||
*
|
||||
* Some of these values are based on the [object-fit](https://developer.mozilla.org/en-US/docs/Web/CSS/object-fit) CSS property.
|
||||
*
|
||||
* When using a `fit` of `cover` or `contain`, the default **position** is `centre`. Other options are:
|
||||
* <img alt="Examples of various values for the fit property when resizing" width="100%" style="aspect-ratio: 998/243" src="https://cdn.jsdelivr.net/gh/lovell/sharp@main/docs/image/api-resize-fit.png">
|
||||
*
|
||||
* When using a **fit** of `cover` or `contain`, the default **position** is `centre`. Other options are:
|
||||
* - `sharp.position`: `top`, `right top`, `right`, `right bottom`, `bottom`, `left bottom`, `left`, `left top`.
|
||||
* - `sharp.gravity`: `north`, `northeast`, `east`, `southeast`, `south`, `southwest`, `west`, `northwest`, `center` or `centre`.
|
||||
* - `sharp.strategy`: `cover` only, dynamically crop using either the `entropy` or `attention` strategy.
|
||||
@@ -214,32 +231,32 @@ function isResizeExpected (options) {
|
||||
* .toBuffer()
|
||||
* );
|
||||
*
|
||||
* @param {number} [width] - pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height.
|
||||
* @param {number} [height] - pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
|
||||
* @param {number} [width] - How many pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height.
|
||||
* @param {number} [height] - How many pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
|
||||
* @param {Object} [options]
|
||||
* @param {String} [options.width] - alternative means of specifying `width`. If both are present this take priority.
|
||||
* @param {String} [options.height] - alternative means of specifying `height`. If both are present this take priority.
|
||||
* @param {String} [options.fit='cover'] - how the image should be resized to fit both provided dimensions, one of `cover`, `contain`, `fill`, `inside` or `outside`.
|
||||
* @param {String} [options.position='centre'] - position, gravity or strategy to use when `fit` is `cover` or `contain`.
|
||||
* @param {number} [options.width] - An alternative means of specifying `width`. If both are present this takes priority.
|
||||
* @param {number} [options.height] - An alternative means of specifying `height`. If both are present this takes priority.
|
||||
* @param {String} [options.fit='cover'] - How the image should be resized/cropped to fit the target dimension(s), one of `cover`, `contain`, `fill`, `inside` or `outside`.
|
||||
* @param {String} [options.position='centre'] - A position, gravity or strategy to use when `fit` is `cover` or `contain`.
|
||||
* @param {String|Object} [options.background={r: 0, g: 0, b: 0, alpha: 1}] - background colour when `fit` is `contain`, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black without transparency.
|
||||
* @param {String} [options.kernel='lanczos3'] - the kernel to use for image reduction.
|
||||
* @param {Boolean} [options.withoutEnlargement=false] - do not enlarge if the width *or* height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option.
|
||||
* @param {Boolean} [options.withoutReduction=false] - do not reduce if the width *or* height are already greater than the specified dimensions, equivalent to GraphicsMagick's `<` geometry option.
|
||||
* @param {Boolean} [options.fastShrinkOnLoad=true] - take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images.
|
||||
* @param {String} [options.kernel='lanczos3'] - The kernel to use for image reduction. Use the `fastShrinkOnLoad` option to control kernel vs shrink-on-load.
|
||||
* @param {Boolean} [options.withoutEnlargement=false] - Do not scale up if the width *or* height are already less than the target dimensions, equivalent to GraphicsMagick's `>` geometry option. This may result in output dimensions smaller than the target dimensions.
|
||||
* @param {Boolean} [options.withoutReduction=false] - Do not scale down if the width *or* height are already greater than the target dimensions, equivalent to GraphicsMagick's `<` geometry option. This may still result in a crop to reach the target dimensions.
|
||||
* @param {Boolean} [options.fastShrinkOnLoad=true] - Take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern or round-down of an auto-scaled dimension.
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid parameters
|
||||
*/
|
||||
function resize (width, height, options) {
|
||||
function resize (widthOrOptions, height, options) {
|
||||
if (isResizeExpected(this.options)) {
|
||||
this.options.debuglog('ignoring previous resize options');
|
||||
}
|
||||
if (is.defined(width)) {
|
||||
if (is.object(width) && !is.defined(options)) {
|
||||
options = width;
|
||||
} else if (is.integer(width) && width > 0) {
|
||||
this.options.width = width;
|
||||
if (is.defined(widthOrOptions)) {
|
||||
if (is.object(widthOrOptions) && !is.defined(options)) {
|
||||
options = widthOrOptions;
|
||||
} else if (is.integer(widthOrOptions) && widthOrOptions > 0) {
|
||||
this.options.width = widthOrOptions;
|
||||
} else {
|
||||
throw is.invalidParameterError('width', 'positive integer', width);
|
||||
throw is.invalidParameterError('width', 'positive integer', widthOrOptions);
|
||||
}
|
||||
} else {
|
||||
this.options.width = -1;
|
||||
@@ -320,7 +337,8 @@ function resize (width, height, options) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Extends/pads the edges of the image with the provided background colour.
|
||||
* Extend / pad / extrude one or more edges of the image with either
|
||||
* the provided background colour or pixels derived from the image.
|
||||
* This operation will always occur after resizing and extraction, if any.
|
||||
*
|
||||
* @example
|
||||
@@ -346,11 +364,21 @@ function resize (width, height, options) {
|
||||
* })
|
||||
* ...
|
||||
*
|
||||
* @example
|
||||
* // Extrude image by 8 pixels to the right, mirroring existing right hand edge
|
||||
* sharp(input)
|
||||
* .extend({
|
||||
* right: 8,
|
||||
* background: 'mirror'
|
||||
* })
|
||||
* ...
|
||||
*
|
||||
* @param {(number|Object)} extend - single pixel count to add to all edges or an Object with per-edge counts
|
||||
* @param {number} [extend.top=0]
|
||||
* @param {number} [extend.left=0]
|
||||
* @param {number} [extend.bottom=0]
|
||||
* @param {number} [extend.right=0]
|
||||
* @param {String} [extend.extendWith='background'] - populate new pixels using this method, one of: background, copy, repeat, mirror.
|
||||
* @param {String|Object} [extend.background={r: 0, g: 0, b: 0, alpha: 1}] - background colour, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black without transparency.
|
||||
* @returns {Sharp}
|
||||
* @throws {Error} Invalid parameters
|
||||
@@ -391,6 +419,13 @@ function extend (extend) {
|
||||
}
|
||||
}
|
||||
this._setBackgroundColourOption('extendBackground', extend.background);
|
||||
if (is.defined(extend.extendWith)) {
|
||||
if (is.string(extendWith[extend.extendWith])) {
|
||||
this.options.extendWith = extendWith[extend.extendWith];
|
||||
} else {
|
||||
throw is.invalidParameterError('extendWith', 'one of: background, copy, repeat, mirror', extend.extendWith);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw is.invalidParameterError('extend', 'integer or object', extend);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const platformAndArch = require('./platform')();
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
@@ -43,7 +46,7 @@ const interpolators = {
|
||||
};
|
||||
|
||||
/**
|
||||
* An Object containing the version numbers of libvips and its dependencies.
|
||||
* An Object containing the version numbers of sharp, libvips and its dependencies.
|
||||
* @member
|
||||
* @example
|
||||
* console.log(sharp.versions);
|
||||
@@ -54,6 +57,7 @@ let versions = {
|
||||
try {
|
||||
versions = require(`../vendor/${versions.vips}/${platformAndArch}/versions.json`);
|
||||
} catch (_err) { /* ignore */ }
|
||||
versions.sharp = require('../package.json').version;
|
||||
|
||||
/**
|
||||
* An Object containing the platform and architecture
|
||||
|
||||
57
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "sharp",
|
||||
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP, GIF, AVIF and TIFF images",
|
||||
"version": "0.31.1",
|
||||
"version": "0.32.0",
|
||||
"author": "Lovell Fuller <npm@lovell.info>",
|
||||
"homepage": "https://github.com/lovell/sharp",
|
||||
"contributors": [
|
||||
@@ -85,21 +85,24 @@
|
||||
"Brodan <christopher.hranj@gmail.com",
|
||||
"Ankur Parihar <ankur.github@gmail.com>",
|
||||
"Brahim Ait elhaj <brahima@gmail.com>",
|
||||
"Mart Jansink <m.jansink@gmail.com>"
|
||||
"Mart Jansink <m.jansink@gmail.com>",
|
||||
"Lachlan Newman <lachnewman007@gmail.com>"
|
||||
],
|
||||
"scripts": {
|
||||
"install": "(node install/libvips && node install/dll-copy && prebuild-install) || (node install/can-compile && node-gyp rebuild && node install/dll-copy)",
|
||||
"clean": "rm -rf node_modules/ build/ vendor/ .nyc_output/ coverage/ test/fixtures/output.*",
|
||||
"test": "npm run test-lint && npm run test-unit && npm run test-licensing",
|
||||
"test": "npm run test-lint && npm run test-unit && npm run test-licensing && npm run test-types",
|
||||
"test-lint": "semistandard && cpplint",
|
||||
"test-unit": "nyc --reporter=lcov --reporter=text --check-coverage --branches=100 mocha --slow=1000 --timeout=20000 ./test/unit/*.js",
|
||||
"test-unit": "nyc --reporter=lcov --reporter=text --check-coverage --branches=100 mocha",
|
||||
"test-licensing": "license-checker --production --summary --onlyAllow=\"Apache-2.0;BSD;ISC;MIT\"",
|
||||
"test-leak": "./test/leak/leak.sh",
|
||||
"docs-build": "documentation lint lib && node docs/build && node docs/search-index/build",
|
||||
"test-types": "tsd",
|
||||
"docs-build": "node docs/build && node docs/search-index/build",
|
||||
"docs-serve": "cd docs && npx serve",
|
||||
"docs-publish": "cd docs && npx firebase-tools deploy --project pixelplumbing --only hosting:pixelplumbing-sharp"
|
||||
},
|
||||
"main": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"files": [
|
||||
"binding.gyp",
|
||||
"install/**",
|
||||
@@ -131,43 +134,44 @@
|
||||
"dependencies": {
|
||||
"color": "^4.2.3",
|
||||
"detect-libc": "^2.0.1",
|
||||
"node-addon-api": "^5.0.0",
|
||||
"node-addon-api": "^6.0.0",
|
||||
"prebuild-install": "^7.1.1",
|
||||
"semver": "^7.3.7",
|
||||
"semver": "^7.3.8",
|
||||
"simple-get": "^4.0.1",
|
||||
"tar-fs": "^2.1.1",
|
||||
"tunnel-agent": "^0.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "*",
|
||||
"async": "^3.2.4",
|
||||
"cc": "^3.0.1",
|
||||
"documentation": "^14.0.0",
|
||||
"exif-reader": "^1.0.3",
|
||||
"exif-reader": "^1.2.0",
|
||||
"extract-zip": "^2.0.1",
|
||||
"icc": "^2.0.0",
|
||||
"jsdoc-to-markdown": "^8.0.0",
|
||||
"license-checker": "^25.0.1",
|
||||
"mocha": "^10.0.0",
|
||||
"mock-fs": "^5.1.4",
|
||||
"mocha": "^10.2.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"nyc": "^15.1.0",
|
||||
"prebuild": "^11.0.4",
|
||||
"rimraf": "^3.0.2",
|
||||
"semistandard": "^16.0.1"
|
||||
"semistandard": "^16.0.1",
|
||||
"tsd": "^0.28.0"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"config": {
|
||||
"libvips": "8.13.2",
|
||||
"libvips": "8.14.2",
|
||||
"integrity": {
|
||||
"darwin-arm64v8": "sha512-4tsE/HMQDT9srV/ovSJlr7IxKnhvH9qpArCAf5Xpb/uNcAiT7BcZ+HYwX2lbf3UY8REB1TR4ThEL/lmPnzMUHw==",
|
||||
"darwin-x64": "sha512-D4ZSvlgLpf+KzKB2OD+K8NWl0JKzzIbvWwIjjwBycIHTMkaiams3Kp/AQ/bKudqof02Ks6LtP0X4XWvCaoRoUA==",
|
||||
"linux-arm64v8": "sha512-9ZvUM2NBluhoeUz9X7/zJ48xJ5d7KzI1cO6lsiv4HKo5fOYw/vEY28XodFJzhyfu9NuKxh3Hs9FtoQGNvvAFkw==",
|
||||
"linux-armv6": "sha512-vu0R8DF0k7KseU62fzrJadHNk5oeJriFLVn3KxCKEfV+Wkj7rX4lQhiPmOuD7/wRcUY+GGdoZ52vysDwMQhfzA==",
|
||||
"linux-armv7": "sha512-UdfhJTjGFgrwc3Kaos5G1ZAK2+t/16Prtnl6FAT+m7cG5EXzYAqzgvk4qtakAH7UTnVe8MUgOfbTLt0YiRpfsg==",
|
||||
"linux-x64": "sha512-sv92VpPyN+3oBv0vi4wDjx51demGdtyhEjd+vDfC3h8S/RSuIUE9Pt/+dBFuf+iv9tRdIq9hH9vzAvsLVy6NYg==",
|
||||
"linuxmusl-arm64v8": "sha512-TjhK/wHAS/m55l46T8PZ0qvlK+PKYFZGTQfh+c9aG8/z1v/VtG7TQOLNmPWfg0SFDTkXV7YqnJCqvgYLmJPZUg==",
|
||||
"linuxmusl-x64": "sha512-/su96pn/H9+lDdnlM1xB2whWEoeEDJICFp/RNRJb0+bJPJhnL/IDVIhF4VnVNBq/9AlldBWii3hqMq5rY2eEAA==",
|
||||
"win32-arm64v8": "sha512-UnSmwCcx3F5u4UOXyrdwTdYsuMK/RtQYc+1y+QxqIkBHiSL7dOlTIH/vKOSQvSaDQTPqxVLFt3wkMN1U7LZwyg==",
|
||||
"win32-ia32": "sha512-KH/H6vpx5lJ6NEzLQmwxU/QnDg8p1Jxd+WKaPiyWmXq/HpwyKrZhi3WDoyKD4fLwnlfhAXEfVLZbUbhX21pDpQ==",
|
||||
"win32-x64": "sha512-Xim5F21pqx7MuVQViaQNhSz24zWIiKHC9bm4KCdi7q/ytbvdMhm6bzWDI/mvFGNjI62NRB2SBkTTaqwJvM/pUg=="
|
||||
"darwin-arm64v8": "sha512-eUuxg6H0tXgX4z2lsaGtZ4cbPAm7yoFgkvPDd4csxoiVt+QUB25pEJwiXw7oB53VlBFIp3O8lbydSFS5zH8MQQ==",
|
||||
"darwin-x64": "sha512-cMT4v76IgzSR0VoXqLk/yftRyzMEZ+SBVMLzXCgqP/lmnYisrpmHHNqrWnoZbUUBXbPXLn6KMultYOJHe/c9ZQ==",
|
||||
"linux-arm64v8": "sha512-OcDJ/ly80pxwaKnw0W91sSvZczPtWsjmzrY/+6NMiQZT84LkmeaRuwErbHhorKDxnl7iZuNn9Uj5V25Xmj+LDQ==",
|
||||
"linux-armv6": "sha512-hk2ohSOYTJEtVQxEQFyQ+tuayKpYqx6NiXa7AE+8MF+yscxt+g+mLJ7TjDqtmb4ttFGH4IVfsEfU2YXIqWqkpg==",
|
||||
"linux-armv7": "sha512-/5Ci2Cd+yLZmTaEt9lVJ89elxX3RMJpps0ESjj43X40yrwka51QfXeg1QV38uNzZpCDIZkrbXZK0lyKldjpLuA==",
|
||||
"linux-x64": "sha512-wjCKmWfBb0uz1UB7rPDLvO0s+VWuoAY/Vv/YGCRFEQUkdSLQUgHExrOMMbOM3FleuYfQqznDYCXXphkl7X44+w==",
|
||||
"linuxmusl-arm64v8": "sha512-QtD2n90yi+rLE65C0gksFUU5uMUFPICI/pS3A0bgthpIcoCejAOYs3ZjVWpZbHQuV/lWahIUYO78MB9CzY860A==",
|
||||
"linuxmusl-x64": "sha512-TokQ/ETCJAsPYuxIMOPYDp25rlcwtpmIMtRUR9PB75TmZEJe7abRfCEInIPYeD8F/HxxnJSLiEdlbn1z1Jfzng==",
|
||||
"win32-arm64v8": "sha512-IIuj4EAgLqEVAoOuYH79C61a7TcJXlU/RBwk+5JsGWc2mr4J/Ar5J01e6XBvU4Lu3eqcU+3GPaACZEa1511buA==",
|
||||
"win32-ia32": "sha512-CsZi7lrReX3B6tmYgOGJ0IiAfcN5APDC6l+3gdosxfTfwpLLO+jXaSmyNwIGeMqrdgckG/gwwc+IrUZmkmjJ/A==",
|
||||
"win32-x64": "sha512-J7znmNKUK4ZKo6SnSnEtzT1xRAwvkGXxIx9/QihAadu1TFdS06yNhcENmwC4973+KZBlAdVpWbZ8sLrEoWkdCA=="
|
||||
},
|
||||
"runtime": "napi",
|
||||
"target": 7
|
||||
@@ -193,5 +197,8 @@
|
||||
"filter": [
|
||||
"build/include"
|
||||
]
|
||||
},
|
||||
"tsd": {
|
||||
"directory": "test/types/"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#include <cstdlib>
|
||||
#include <string>
|
||||
@@ -76,6 +65,16 @@ namespace sharp {
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
Napi::Buffer<char> NewOrCopyBuffer(Napi::Env env, char* data, size_t len) {
|
||||
try {
|
||||
return Napi::Buffer<char>::New(env, data, len, FreeCallback);
|
||||
} catch (Napi::Error const &err) {
|
||||
static_cast<void>(err);
|
||||
}
|
||||
Napi::Buffer<char> buf = Napi::Buffer<char>::Copy(env, data, len);
|
||||
FreeCallback(nullptr, data);
|
||||
return buf;
|
||||
}
|
||||
|
||||
// Create an InputDescriptor instance from a Napi::Object describing an input image
|
||||
InputDescriptor* CreateInputDescriptor(Napi::Object input) {
|
||||
@@ -93,6 +92,10 @@ namespace sharp {
|
||||
if (HasAttr(input, "density")) {
|
||||
descriptor->density = AttrAsDouble(input, "density");
|
||||
}
|
||||
// Should we ignore any embedded ICC profile
|
||||
if (HasAttr(input, "ignoreIcc")) {
|
||||
descriptor->ignoreIcc = AttrAsBool(input, "ignoreIcc");
|
||||
}
|
||||
// Raw pixel input
|
||||
if (HasAttr(input, "rawChannels")) {
|
||||
descriptor->rawDepth = AttrAsEnum<VipsBandFormat>(input, "rawDepth", VIPS_TYPE_BAND_FORMAT);
|
||||
@@ -159,6 +162,9 @@ namespace sharp {
|
||||
if (HasAttr(input, "textSpacing")) {
|
||||
descriptor->textSpacing = AttrAsUint32(input, "textSpacing");
|
||||
}
|
||||
if (HasAttr(input, "textWrap")) {
|
||||
descriptor->textWrap = AttrAsEnum<VipsTextWrap>(input, "textWrap", VIPS_TYPE_TEXT_WRAP);
|
||||
}
|
||||
}
|
||||
// Limit input images to a given number of pixels, where pixels = width * height
|
||||
descriptor->limitInputPixels = static_cast<uint64_t>(AttrAsInt64(input, "limitInputPixels"));
|
||||
@@ -207,6 +213,9 @@ namespace sharp {
|
||||
bool IsAvif(std::string const &str) {
|
||||
return EndsWith(str, ".avif") || EndsWith(str, ".AVIF");
|
||||
}
|
||||
bool IsJxl(std::string const &str) {
|
||||
return EndsWith(str, ".jxl") || EndsWith(str, ".JXL");
|
||||
}
|
||||
bool IsDz(std::string const &str) {
|
||||
return EndsWith(str, ".dzi") || EndsWith(str, ".DZI");
|
||||
}
|
||||
@@ -217,6 +226,13 @@ namespace sharp {
|
||||
return EndsWith(str, ".v") || EndsWith(str, ".V") || EndsWith(str, ".vips") || EndsWith(str, ".VIPS");
|
||||
}
|
||||
|
||||
/*
|
||||
Trim space from end of string.
|
||||
*/
|
||||
std::string TrimEnd(std::string const &str) {
|
||||
return str.substr(0, str.find_last_not_of(" \n\r\f") + 1);
|
||||
}
|
||||
|
||||
/*
|
||||
Provide a string identifier for the given image type.
|
||||
*/
|
||||
@@ -237,6 +253,7 @@ namespace sharp {
|
||||
case ImageType::PPM: id = "ppm"; break;
|
||||
case ImageType::FITS: id = "fits"; break;
|
||||
case ImageType::EXR: id = "exr"; break;
|
||||
case ImageType::JXL: id = "jxl"; break;
|
||||
case ImageType::VIPS: id = "vips"; break;
|
||||
case ImageType::RAW: id = "raw"; break;
|
||||
case ImageType::UNKNOWN: id = "unknown"; break;
|
||||
@@ -281,6 +298,8 @@ namespace sharp {
|
||||
{ "VipsForeignLoadPpmFile", ImageType::PPM },
|
||||
{ "VipsForeignLoadFitsFile", ImageType::FITS },
|
||||
{ "VipsForeignLoadOpenexr", ImageType::EXR },
|
||||
{ "VipsForeignLoadJxlFile", ImageType::JXL },
|
||||
{ "VipsForeignLoadJxlBuffer", ImageType::JXL },
|
||||
{ "VipsForeignLoadVips", ImageType::VIPS },
|
||||
{ "VipsForeignLoadVipsFile", ImageType::VIPS },
|
||||
{ "VipsForeignLoadRaw", ImageType::RAW }
|
||||
@@ -440,6 +459,7 @@ namespace sharp {
|
||||
->set("justify", descriptor->textJustify)
|
||||
->set("rgba", descriptor->textRgba)
|
||||
->set("spacing", descriptor->textSpacing)
|
||||
->set("wrap", descriptor->textWrap)
|
||||
->set("autofit_dpi", &descriptor->textAutofitDpi);
|
||||
if (descriptor->textWidth > 0) {
|
||||
textOptions->set("width", descriptor->textWidth);
|
||||
@@ -598,6 +618,15 @@ namespace sharp {
|
||||
return copy;
|
||||
}
|
||||
|
||||
/*
|
||||
Remove GIF palette from image.
|
||||
*/
|
||||
VImage RemoveGifPalette(VImage image) {
|
||||
VImage copy = image.copy();
|
||||
copy.remove("gif-palette");
|
||||
return copy;
|
||||
}
|
||||
|
||||
/*
|
||||
Does this image have a non-default density?
|
||||
*/
|
||||
@@ -913,7 +942,7 @@ namespace sharp {
|
||||
// Add non-transparent alpha channel, if required
|
||||
if (colour[3] < 255.0 && !HasAlpha(image)) {
|
||||
image = image.bandjoin(
|
||||
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
|
||||
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier).cast(image.format()));
|
||||
}
|
||||
return std::make_tuple(image, alphaColour);
|
||||
}
|
||||
|
||||
38
src/common.h
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#ifndef SRC_COMMON_H_
|
||||
#define SRC_COMMON_H_
|
||||
@@ -25,9 +14,9 @@
|
||||
// Verify platform and compiler compatibility
|
||||
|
||||
#if (VIPS_MAJOR_VERSION < 8) || \
|
||||
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 13) || \
|
||||
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION == 13 && VIPS_MICRO_VERSION < 2)
|
||||
#error "libvips version 8.13.2+ is required - please see https://sharp.pixelplumbing.com/install"
|
||||
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 14) || \
|
||||
(VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION == 14 && VIPS_MICRO_VERSION < 2)
|
||||
#error "libvips version 8.14.2+ is required - please see https://sharp.pixelplumbing.com/install"
|
||||
#endif
|
||||
|
||||
#if ((!defined(__clang__)) && defined(__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 6)))
|
||||
@@ -55,6 +44,7 @@ namespace sharp {
|
||||
size_t bufferLength;
|
||||
bool isBuffer;
|
||||
double density;
|
||||
bool ignoreIcc;
|
||||
VipsBandFormat rawDepth;
|
||||
int rawChannels;
|
||||
int rawWidth;
|
||||
@@ -81,6 +71,7 @@ namespace sharp {
|
||||
int textDpi;
|
||||
bool textRgba;
|
||||
int textSpacing;
|
||||
VipsTextWrap textWrap;
|
||||
int textAutofitDpi;
|
||||
|
||||
InputDescriptor():
|
||||
@@ -92,6 +83,7 @@ namespace sharp {
|
||||
bufferLength(0),
|
||||
isBuffer(FALSE),
|
||||
density(72.0),
|
||||
ignoreIcc(FALSE),
|
||||
rawDepth(VIPS_FORMAT_UCHAR),
|
||||
rawChannels(0),
|
||||
rawWidth(0),
|
||||
@@ -114,6 +106,7 @@ namespace sharp {
|
||||
textDpi(72),
|
||||
textRgba(FALSE),
|
||||
textSpacing(0),
|
||||
textWrap(VIPS_TEXT_WRAP_WORD),
|
||||
textAutofitDpi(0) {}
|
||||
};
|
||||
|
||||
@@ -133,6 +126,7 @@ namespace sharp {
|
||||
return static_cast<T>(
|
||||
vips_enum_from_nick(nullptr, type, AttrAsStr(obj, attr).data()));
|
||||
}
|
||||
Napi::Buffer<char> NewOrCopyBuffer(Napi::Env env, char* data, size_t len);
|
||||
|
||||
// Create an InputDescriptor instance from a Napi::Object describing an input image
|
||||
InputDescriptor* CreateInputDescriptor(Napi::Object input);
|
||||
@@ -152,6 +146,7 @@ namespace sharp {
|
||||
PPM,
|
||||
FITS,
|
||||
EXR,
|
||||
JXL,
|
||||
VIPS,
|
||||
RAW,
|
||||
UNKNOWN,
|
||||
@@ -182,10 +177,16 @@ namespace sharp {
|
||||
bool IsHeic(std::string const &str);
|
||||
bool IsHeif(std::string const &str);
|
||||
bool IsAvif(std::string const &str);
|
||||
bool IsJxl(std::string const &str);
|
||||
bool IsDz(std::string const &str);
|
||||
bool IsDzZip(std::string const &str);
|
||||
bool IsV(std::string const &str);
|
||||
|
||||
/*
|
||||
Trim space from end of string.
|
||||
*/
|
||||
std::string TrimEnd(std::string const &str);
|
||||
|
||||
/*
|
||||
Provide a string identifier for the given image type.
|
||||
*/
|
||||
@@ -252,6 +253,11 @@ namespace sharp {
|
||||
*/
|
||||
VImage RemoveAnimationProperties(VImage image);
|
||||
|
||||
/*
|
||||
Remove GIF palette from image.
|
||||
*/
|
||||
VImage RemoveGifPalette(VImage image);
|
||||
|
||||
/*
|
||||
Does this image have a non-default density?
|
||||
*/
|
||||
|
||||
@@ -3679,6 +3679,13 @@ VipsBlob *VImage::webpsave_buffer( VOption *options ) const
|
||||
return( buffer );
|
||||
}
|
||||
|
||||
void VImage::webpsave_mime( VOption *options ) const
|
||||
{
|
||||
call( "webpsave_mime",
|
||||
(options ? options : VImage::option())->
|
||||
set( "in", *this ) );
|
||||
}
|
||||
|
||||
void VImage::webpsave_target( VTarget target, VOption *options ) const
|
||||
{
|
||||
call( "webpsave_target",
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#include <numeric>
|
||||
#include <vector>
|
||||
@@ -80,6 +69,9 @@ class MetadataWorker : public Napi::AsyncWorker {
|
||||
if (image.get_typeof(VIPS_META_RESOLUTION_UNIT) == VIPS_TYPE_REF_STRING) {
|
||||
baton->resolutionUnit = image.get_string(VIPS_META_RESOLUTION_UNIT);
|
||||
}
|
||||
if (image.get_typeof("magick-format") == VIPS_TYPE_REF_STRING) {
|
||||
baton->formatMagick = image.get_string("magick-format");
|
||||
}
|
||||
if (image.get_typeof("openslide.level-count") == VIPS_TYPE_REF_STRING) {
|
||||
int const levels = std::stoi(image.get_string("openslide.level-count"));
|
||||
for (int l = 0; l < levels; l++) {
|
||||
@@ -153,7 +145,7 @@ class MetadataWorker : public Napi::AsyncWorker {
|
||||
// Handle warnings
|
||||
std::string warning = sharp::VipsWarningPop();
|
||||
while (!warning.empty()) {
|
||||
debuglog.Call({ Napi::String::New(env, warning) });
|
||||
debuglog.MakeCallback(Receiver().Value(), { Napi::String::New(env, warning) });
|
||||
warning = sharp::VipsWarningPop();
|
||||
}
|
||||
|
||||
@@ -204,6 +196,9 @@ class MetadataWorker : public Napi::AsyncWorker {
|
||||
if (!baton->resolutionUnit.empty()) {
|
||||
info.Set("resolutionUnit", baton->resolutionUnit == "in" ? "inch" : baton->resolutionUnit);
|
||||
}
|
||||
if (!baton->formatMagick.empty()) {
|
||||
info.Set("formatMagick", baton->formatMagick);
|
||||
}
|
||||
if (!baton->levels.empty()) {
|
||||
int i = 0;
|
||||
Napi::Array levels = Napi::Array::New(env, static_cast<size_t>(baton->levels.size()));
|
||||
@@ -235,24 +230,24 @@ class MetadataWorker : public Napi::AsyncWorker {
|
||||
info.Set("orientation", baton->orientation);
|
||||
}
|
||||
if (baton->exifLength > 0) {
|
||||
info.Set("exif", Napi::Buffer<char>::New(env, baton->exif, baton->exifLength, sharp::FreeCallback));
|
||||
info.Set("exif", sharp::NewOrCopyBuffer(env, baton->exif, baton->exifLength));
|
||||
}
|
||||
if (baton->iccLength > 0) {
|
||||
info.Set("icc", Napi::Buffer<char>::New(env, baton->icc, baton->iccLength, sharp::FreeCallback));
|
||||
info.Set("icc", sharp::NewOrCopyBuffer(env, baton->icc, baton->iccLength));
|
||||
}
|
||||
if (baton->iptcLength > 0) {
|
||||
info.Set("iptc", Napi::Buffer<char>::New(env, baton->iptc, baton->iptcLength, sharp::FreeCallback));
|
||||
info.Set("iptc", sharp::NewOrCopyBuffer(env, baton->iptc, baton->iptcLength));
|
||||
}
|
||||
if (baton->xmpLength > 0) {
|
||||
info.Set("xmp", Napi::Buffer<char>::New(env, baton->xmp, baton->xmpLength, sharp::FreeCallback));
|
||||
info.Set("xmp", sharp::NewOrCopyBuffer(env, baton->xmp, baton->xmpLength));
|
||||
}
|
||||
if (baton->tifftagPhotoshopLength > 0) {
|
||||
info.Set("tifftagPhotoshop",
|
||||
Napi::Buffer<char>::New(env, baton->tifftagPhotoshop, baton->tifftagPhotoshopLength, sharp::FreeCallback));
|
||||
sharp::NewOrCopyBuffer(env, baton->tifftagPhotoshop, baton->tifftagPhotoshopLength));
|
||||
}
|
||||
Callback().MakeCallback(Receiver().Value(), { env.Null(), info });
|
||||
} else {
|
||||
Callback().MakeCallback(Receiver().Value(), { Napi::Error::New(env, baton->err).Value() });
|
||||
Callback().MakeCallback(Receiver().Value(), { Napi::Error::New(env, sharp::TrimEnd(baton->err)).Value() });
|
||||
}
|
||||
|
||||
delete baton->input;
|
||||
@@ -270,7 +265,7 @@ class MetadataWorker : public Napi::AsyncWorker {
|
||||
Napi::Value metadata(const Napi::CallbackInfo& info) {
|
||||
// V8 objects are converted to non-V8 types held in the baton struct
|
||||
MetadataBaton *baton = new MetadataBaton;
|
||||
Napi::Object options = info[0].As<Napi::Object>();
|
||||
Napi::Object options = info[size_t(0)].As<Napi::Object>();
|
||||
|
||||
// Input
|
||||
baton->input = sharp::CreateInputDescriptor(options.Get("input").As<Napi::Object>());
|
||||
@@ -279,7 +274,7 @@ Napi::Value metadata(const Napi::CallbackInfo& info) {
|
||||
Napi::Function debuglog = options.Get("debuglog").As<Napi::Function>();
|
||||
|
||||
// Join queue for worker thread
|
||||
Napi::Function callback = info[1].As<Napi::Function>();
|
||||
Napi::Function callback = info[size_t(1)].As<Napi::Function>();
|
||||
MetadataWorker *worker = new MetadataWorker(callback, baton, debuglog);
|
||||
worker->Receiver().Set("options", options);
|
||||
worker->Queue();
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#ifndef SRC_METADATA_H_
|
||||
#define SRC_METADATA_H_
|
||||
@@ -41,6 +30,7 @@ struct MetadataBaton {
|
||||
int pagePrimary;
|
||||
std::string compression;
|
||||
std::string resolutionUnit;
|
||||
std::string formatMagick;
|
||||
std::vector<std::pair<int, int>> levels;
|
||||
int subifds;
|
||||
std::vector<double> background;
|
||||
|
||||
@@ -1,23 +1,11 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <tuple>
|
||||
#include <vector>
|
||||
|
||||
#include <vips/vips8>
|
||||
|
||||
#include "common.h"
|
||||
@@ -57,7 +45,7 @@ namespace sharp {
|
||||
/*
|
||||
* Stretch luminance to cover full dynamic range.
|
||||
*/
|
||||
VImage Normalise(VImage image) {
|
||||
VImage Normalise(VImage image, int const lower, int const upper) {
|
||||
// Get original colourspace
|
||||
VipsInterpretation typeBeforeNormalize = image.interpretation();
|
||||
if (typeBeforeNormalize == VIPS_INTERPRETATION_RGB) {
|
||||
@@ -67,9 +55,11 @@ namespace sharp {
|
||||
VImage lab = image.colourspace(VIPS_INTERPRETATION_LAB);
|
||||
// Extract luminance
|
||||
VImage luminance = lab[0];
|
||||
|
||||
// Find luminance range
|
||||
int const min = luminance.percent(1);
|
||||
int const max = luminance.percent(99);
|
||||
int const min = lower == 0 ? luminance.min() : luminance.percent(lower);
|
||||
int const max = upper == 100 ? luminance.max() : luminance.percent(upper);
|
||||
|
||||
if (std::abs(max - min) > 1) {
|
||||
// Extract chroma
|
||||
VImage chroma = lab.extract_band(1, VImage::option()->set("n", 2));
|
||||
@@ -345,9 +335,9 @@ namespace sharp {
|
||||
if (HasAlpha(image) && a.size() != bands && (a.size() == 1 || a.size() == bands - 1 || bands - 1 == 1)) {
|
||||
// Separate alpha channel
|
||||
VImage alpha = image[bands - 1];
|
||||
return RemoveAlpha(image).linear(a, b).bandjoin(alpha);
|
||||
return RemoveAlpha(image).linear(a, b, VImage::option()->set("uchar", TRUE)).bandjoin(alpha);
|
||||
} else {
|
||||
return image.linear(a, b);
|
||||
return image.linear(a, b, VImage::option()->set("uchar", TRUE));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -395,11 +385,11 @@ namespace sharp {
|
||||
* Split into frames, embed each frame, reassemble, and update pageHeight.
|
||||
*/
|
||||
VImage EmbedMultiPage(VImage image, int left, int top, int width, int height,
|
||||
std::vector<double> background, int nPages, int *pageHeight) {
|
||||
VipsExtend extendWith, std::vector<double> background, int nPages, int *pageHeight) {
|
||||
if (top == 0 && height == *pageHeight) {
|
||||
// Fast path; no need to adjust the height of the multi-page image
|
||||
return image.embed(left, 0, width, image.height(), VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("extend", extendWith)
|
||||
->set("background", background));
|
||||
} else if (left == 0 && width == image.width()) {
|
||||
// Fast path; no need to adjust the width of the multi-page image
|
||||
@@ -411,7 +401,7 @@ namespace sharp {
|
||||
|
||||
// Do the embed on the wide image
|
||||
image = image.embed(0, top, image.width(), height, VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("extend", extendWith)
|
||||
->set("background", background));
|
||||
|
||||
// Split the wide image into frames
|
||||
@@ -441,7 +431,7 @@ namespace sharp {
|
||||
// Embed each frame in the target size
|
||||
for (int i = 0; i < nPages; i++) {
|
||||
pages[i] = pages[i].embed(left, top, width, height, VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("extend", extendWith)
|
||||
->set("background", background));
|
||||
}
|
||||
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#ifndef SRC_OPERATIONS_H_
|
||||
#define SRC_OPERATIONS_H_
|
||||
@@ -33,7 +22,7 @@ namespace sharp {
|
||||
/*
|
||||
* Stretch luminance to cover full dynamic range.
|
||||
*/
|
||||
VImage Normalise(VImage image);
|
||||
VImage Normalise(VImage image, int const lower, int const upper);
|
||||
|
||||
/*
|
||||
* Contrast limiting adapative histogram equalization (CLAHE)
|
||||
@@ -124,7 +113,7 @@ namespace sharp {
|
||||
* Split into frames, embed each frame, reassemble, and update pageHeight.
|
||||
*/
|
||||
VImage EmbedMultiPage(VImage image, int left, int top, int width, int height,
|
||||
std::vector<double> background, int nPages, int *pageHeight);
|
||||
VipsExtend extendWith, std::vector<double> background, int nPages, int *pageHeight);
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
|
||||
269
src/pipeline.cc
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
@@ -81,35 +70,48 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
int pageHeight = sharp::GetPageHeight(image);
|
||||
|
||||
// Calculate angle of rotation
|
||||
VipsAngle rotation;
|
||||
bool flip = FALSE;
|
||||
bool flop = FALSE;
|
||||
VipsAngle rotation = VIPS_ANGLE_D0;
|
||||
VipsAngle autoRotation = VIPS_ANGLE_D0;
|
||||
bool autoFlip = FALSE;
|
||||
bool autoFlop = FALSE;
|
||||
|
||||
if (baton->useExifOrientation) {
|
||||
// Rotate and flip image according to Exif orientation
|
||||
std::tie(rotation, flip, flop) = CalculateExifRotationAndFlip(sharp::ExifOrientation(image));
|
||||
std::tie(autoRotation, autoFlip, autoFlop) = CalculateExifRotationAndFlip(sharp::ExifOrientation(image));
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
} else {
|
||||
rotation = CalculateAngleRotation(baton->angle);
|
||||
}
|
||||
|
||||
// Rotate pre-extract
|
||||
bool const shouldRotateBefore = baton->rotateBeforePreExtract &&
|
||||
(rotation != VIPS_ANGLE_D0 || flip || flop || baton->rotationAngle != 0.0);
|
||||
(rotation != VIPS_ANGLE_D0 || autoRotation != VIPS_ANGLE_D0 ||
|
||||
autoFlip || baton->flip || autoFlop || baton->flop ||
|
||||
baton->rotationAngle != 0.0);
|
||||
|
||||
if (shouldRotateBefore) {
|
||||
if (autoRotation != VIPS_ANGLE_D0) {
|
||||
image = image.rot(autoRotation);
|
||||
autoRotation = VIPS_ANGLE_D0;
|
||||
}
|
||||
if (autoFlip) {
|
||||
image = image.flip(VIPS_DIRECTION_VERTICAL);
|
||||
autoFlip = FALSE;
|
||||
} else if (baton->flip) {
|
||||
image = image.flip(VIPS_DIRECTION_VERTICAL);
|
||||
baton->flip = FALSE;
|
||||
}
|
||||
if (autoFlop) {
|
||||
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
|
||||
autoFlop = FALSE;
|
||||
} else if (baton->flop) {
|
||||
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
|
||||
baton->flop = FALSE;
|
||||
}
|
||||
if (rotation != VIPS_ANGLE_D0) {
|
||||
image = image.rot(rotation);
|
||||
rotation = VIPS_ANGLE_D0;
|
||||
}
|
||||
if (flip) {
|
||||
image = image.flip(VIPS_DIRECTION_VERTICAL);
|
||||
}
|
||||
if (flop) {
|
||||
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
|
||||
}
|
||||
if (rotation != VIPS_ANGLE_D0 || flip || flop) {
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
}
|
||||
flop = FALSE;
|
||||
flip = FALSE;
|
||||
if (baton->rotationAngle != 0.0) {
|
||||
MultiPageUnsupported(nPages, "Rotate");
|
||||
std::vector<double> background;
|
||||
@@ -150,7 +152,9 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
int targetResizeHeight = baton->height;
|
||||
|
||||
// Swap input output width and height when rotating by 90 or 270 degrees
|
||||
bool swap = !baton->rotateBeforePreExtract && (rotation == VIPS_ANGLE_D90 || rotation == VIPS_ANGLE_D270);
|
||||
bool swap = !baton->rotateBeforePreExtract &&
|
||||
(rotation == VIPS_ANGLE_D90 || rotation == VIPS_ANGLE_D270 ||
|
||||
autoRotation == VIPS_ANGLE_D90 || autoRotation == VIPS_ANGLE_D270);
|
||||
|
||||
// Shrink to pageHeight, so we work for multi-page images
|
||||
std::tie(hshrink, vshrink) = sharp::ResolveShrink(
|
||||
@@ -191,7 +195,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
if (jpegShrinkOnLoad > 1 && static_cast<int>(shrink) == jpegShrinkOnLoad) {
|
||||
jpegShrinkOnLoad /= 2;
|
||||
}
|
||||
} else if (inputImageType == sharp::ImageType::WEBP && shrink > 1.0) {
|
||||
} else if (inputImageType == sharp::ImageType::WEBP && baton->fastShrinkOnLoad && shrink > 1.0) {
|
||||
// Avoid upscaling via webp
|
||||
scale = 1.0 / shrink;
|
||||
} else if (inputImageType == sharp::ImageType::SVG ||
|
||||
@@ -306,7 +310,8 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
if (
|
||||
sharp::HasProfile(image) &&
|
||||
image.interpretation() != VIPS_INTERPRETATION_LABS &&
|
||||
image.interpretation() != VIPS_INTERPRETATION_GREY16
|
||||
image.interpretation() != VIPS_INTERPRETATION_GREY16 &&
|
||||
!baton->input->ignoreIcc
|
||||
) {
|
||||
// Convert to sRGB/P3 using embedded profile
|
||||
try {
|
||||
@@ -315,7 +320,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("depth", image.interpretation() == VIPS_INTERPRETATION_RGB16 ? 16 : 8)
|
||||
->set("intent", VIPS_INTENT_PERCEPTUAL));
|
||||
} catch(...) {
|
||||
// Ignore failure of embedded profile
|
||||
sharp::VipsWarningCallback(nullptr, G_LOG_LEVEL_WARNING, "Invalid embedded profile", nullptr);
|
||||
}
|
||||
} else if (image.interpretation() == VIPS_INTERPRETATION_CMYK) {
|
||||
image = image.icc_transform(processingProfile, VImage::option()
|
||||
@@ -353,11 +358,12 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
image = sharp::EnsureAlpha(image, 1);
|
||||
}
|
||||
|
||||
VipsBandFormat premultiplyFormat = image.format();
|
||||
bool const shouldPremultiplyAlpha = sharp::HasAlpha(image) &&
|
||||
(shouldResize || shouldBlur || shouldConv || shouldSharpen);
|
||||
|
||||
if (shouldPremultiplyAlpha) {
|
||||
image = image.premultiply();
|
||||
image = image.premultiply().cast(premultiplyFormat);
|
||||
}
|
||||
|
||||
// Resize
|
||||
@@ -367,34 +373,25 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("kernel", baton->kernel));
|
||||
}
|
||||
|
||||
// Auto-rotate post-extract
|
||||
if (autoRotation != VIPS_ANGLE_D0) {
|
||||
image = image.rot(autoRotation);
|
||||
}
|
||||
// Flip (mirror about Y axis)
|
||||
if (baton->flip || flip) {
|
||||
if (baton->flip || autoFlip) {
|
||||
image = image.flip(VIPS_DIRECTION_VERTICAL);
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
}
|
||||
|
||||
// Flop (mirror about X axis)
|
||||
if (baton->flop || flop) {
|
||||
if (baton->flop || autoFlop) {
|
||||
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
}
|
||||
|
||||
// Rotate post-extract 90-angle
|
||||
if (!baton->rotateBeforePreExtract && rotation != VIPS_ANGLE_D0) {
|
||||
if (rotation != VIPS_ANGLE_D0) {
|
||||
image = image.rot(rotation);
|
||||
if (flip) {
|
||||
image = image.flip(VIPS_DIRECTION_VERTICAL);
|
||||
flip = FALSE;
|
||||
}
|
||||
if (flop) {
|
||||
image = image.flip(VIPS_DIRECTION_HORIZONTAL);
|
||||
flop = FALSE;
|
||||
}
|
||||
image = sharp::RemoveExifOrientation(image);
|
||||
}
|
||||
|
||||
// Join additional color channels to the image
|
||||
if (baton->joinChannelIn.size() > 0) {
|
||||
if (!baton->joinChannelIn.empty()) {
|
||||
VImage joinImage;
|
||||
sharp::ImageType joinImageType = sharp::ImageType::UNKNOWN;
|
||||
|
||||
@@ -404,6 +401,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
image = image.bandjoin(joinImage);
|
||||
}
|
||||
image = image.copy(VImage::option()->set("interpretation", baton->colourspace));
|
||||
image = sharp::RemoveGifPalette(image);
|
||||
}
|
||||
|
||||
inputWidth = image.width();
|
||||
@@ -433,7 +431,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
|
||||
image = nPages > 1
|
||||
? sharp::EmbedMultiPage(image,
|
||||
left, top, width, height, background, nPages, &targetPageHeight)
|
||||
left, top, width, height, VIPS_EXTEND_BACKGROUND, background, nPages, &targetPageHeight)
|
||||
: image.embed(left, top, width, height, VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background));
|
||||
@@ -450,6 +448,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
// Gravity-based crop
|
||||
int left;
|
||||
int top;
|
||||
|
||||
std::tie(left, top) = sharp::CalculateCrop(
|
||||
inputWidth, inputHeight, baton->width, baton->height, baton->position);
|
||||
int width = std::min(inputWidth, baton->width);
|
||||
@@ -460,16 +459,25 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
left, top, width, height, nPages, &targetPageHeight)
|
||||
: image.extract_area(left, top, width, height);
|
||||
} else {
|
||||
int attention_x;
|
||||
int attention_y;
|
||||
|
||||
// Attention-based or Entropy-based crop
|
||||
MultiPageUnsupported(nPages, "Resize strategy");
|
||||
image = image.tilecache(VImage::option()
|
||||
->set("access", VIPS_ACCESS_RANDOM)
|
||||
->set("threaded", TRUE));
|
||||
|
||||
image = image.smartcrop(baton->width, baton->height, VImage::option()
|
||||
->set("interesting", baton->position == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION));
|
||||
->set("interesting", baton->position == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION)
|
||||
->set("attention_x", &attention_x)
|
||||
->set("attention_y", &attention_y));
|
||||
baton->hasCropOffset = true;
|
||||
baton->cropOffsetLeft = static_cast<int>(image.xoffset());
|
||||
baton->cropOffsetTop = static_cast<int>(image.yoffset());
|
||||
baton->hasAttentionCenter = true;
|
||||
baton->attentionX = static_cast<int>(attention_x * jpegShrinkOnLoad / scale);
|
||||
baton->attentionY = static_cast<int>(attention_y * jpegShrinkOnLoad / scale);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -498,7 +506,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
}
|
||||
|
||||
// Affine transform
|
||||
if (baton->affineMatrix.size() > 0) {
|
||||
if (!baton->affineMatrix.empty()) {
|
||||
MultiPageUnsupported(nPages, "Affine");
|
||||
std::vector<double> background;
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->affineBackground, shouldPremultiplyAlpha);
|
||||
@@ -514,18 +522,29 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
|
||||
// Extend edges
|
||||
if (baton->extendTop > 0 || baton->extendBottom > 0 || baton->extendLeft > 0 || baton->extendRight > 0) {
|
||||
std::vector<double> background;
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->extendBackground, shouldPremultiplyAlpha);
|
||||
|
||||
// Embed
|
||||
baton->width = image.width() + baton->extendLeft + baton->extendRight;
|
||||
baton->height = (nPages > 1 ? targetPageHeight : image.height()) + baton->extendTop + baton->extendBottom;
|
||||
|
||||
image = nPages > 1
|
||||
? sharp::EmbedMultiPage(image,
|
||||
baton->extendLeft, baton->extendTop, baton->width, baton->height, background, nPages, &targetPageHeight)
|
||||
: image.embed(baton->extendLeft, baton->extendTop, baton->width, baton->height,
|
||||
VImage::option()->set("extend", VIPS_EXTEND_BACKGROUND)->set("background", background));
|
||||
if (baton->extendWith == VIPS_EXTEND_BACKGROUND) {
|
||||
std::vector<double> background;
|
||||
std::tie(image, background) = sharp::ApplyAlpha(image, baton->extendBackground, shouldPremultiplyAlpha);
|
||||
|
||||
image = nPages > 1
|
||||
? sharp::EmbedMultiPage(image,
|
||||
baton->extendLeft, baton->extendTop, baton->width, baton->height,
|
||||
baton->extendWith, background, nPages, &targetPageHeight)
|
||||
: image.embed(baton->extendLeft, baton->extendTop, baton->width, baton->height,
|
||||
VImage::option()->set("extend", baton->extendWith)->set("background", background));
|
||||
} else {
|
||||
std::vector<double> ignoredBackground(1);
|
||||
image = nPages > 1
|
||||
? sharp::EmbedMultiPage(image,
|
||||
baton->extendLeft, baton->extendTop, baton->width, baton->height,
|
||||
baton->extendWith, ignoredBackground, nPages, &targetPageHeight)
|
||||
: image.embed(baton->extendLeft, baton->extendTop, baton->width, baton->height,
|
||||
VImage::option()->set("extend", baton->extendWith));
|
||||
}
|
||||
}
|
||||
// Median - must happen before blurring, due to the utility of blurring after thresholding
|
||||
if (baton->medianSize > 0) {
|
||||
@@ -567,13 +586,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
|
||||
// Reverse premultiplication after all transformations
|
||||
if (shouldPremultiplyAlpha) {
|
||||
image = image.unpremultiply();
|
||||
// Cast pixel values to integer
|
||||
if (sharp::Is16Bit(image.interpretation())) {
|
||||
image = image.cast(VIPS_FORMAT_USHORT);
|
||||
} else {
|
||||
image = image.cast(VIPS_FORMAT_UCHAR);
|
||||
}
|
||||
image = image.unpremultiply().cast(premultiplyFormat);
|
||||
}
|
||||
baton->premultiplied = shouldPremultiplyAlpha;
|
||||
|
||||
@@ -655,6 +668,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
ys.push_back(top);
|
||||
}
|
||||
image = VImage::composite(images, modes, VImage::option()->set("x", xs)->set("y", ys));
|
||||
image = sharp::RemoveGifPalette(image);
|
||||
}
|
||||
|
||||
// Gamma decoding (brighten)
|
||||
@@ -669,7 +683,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
|
||||
// Apply normalisation - stretch luminance to cover full dynamic range
|
||||
if (baton->normalise) {
|
||||
image = sharp::Normalise(image);
|
||||
image = sharp::Normalise(image, baton->normaliseLower, baton->normaliseUpper);
|
||||
}
|
||||
|
||||
// Apply contrast limiting adaptive histogram equalization (CLAHE)
|
||||
@@ -684,6 +698,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
std::tie(booleanImage, booleanImageType) = sharp::OpenInput(baton->boolean);
|
||||
booleanImage = sharp::EnsureColourspace(booleanImage, baton->colourspaceInput);
|
||||
image = sharp::Boolean(image, booleanImage, baton->booleanOp);
|
||||
image = sharp::RemoveGifPalette(image);
|
||||
}
|
||||
|
||||
// Apply per-channel Bandbool bitwise operations after all other operations
|
||||
@@ -696,24 +711,6 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
image = sharp::Tint(image, baton->tintA, baton->tintB);
|
||||
}
|
||||
|
||||
// Extract an image channel (aka vips band)
|
||||
if (baton->extractChannel > -1) {
|
||||
if (baton->extractChannel >= image.bands()) {
|
||||
if (baton->extractChannel == 3 && sharp::HasAlpha(image)) {
|
||||
baton->extractChannel = image.bands() - 1;
|
||||
} else {
|
||||
(baton->err).append("Cannot extract channel from image. Too few channels in image.");
|
||||
return Error();
|
||||
}
|
||||
}
|
||||
VipsInterpretation const interpretation = sharp::Is16Bit(image.interpretation())
|
||||
? VIPS_INTERPRETATION_GREY16
|
||||
: VIPS_INTERPRETATION_B_W;
|
||||
image = image
|
||||
.extract_band(baton->extractChannel)
|
||||
.copy(VImage::option()->set("interpretation", interpretation));
|
||||
}
|
||||
|
||||
// Remove alpha channel, if any
|
||||
if (baton->removeAlpha) {
|
||||
image = sharp::RemoveAlpha(image);
|
||||
@@ -739,6 +736,26 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
}
|
||||
}
|
||||
|
||||
// Extract channel
|
||||
if (baton->extractChannel > -1) {
|
||||
if (baton->extractChannel >= image.bands()) {
|
||||
if (baton->extractChannel == 3 && sharp::HasAlpha(image)) {
|
||||
baton->extractChannel = image.bands() - 1;
|
||||
} else {
|
||||
(baton->err)
|
||||
.append("Cannot extract channel ").append(std::to_string(baton->extractChannel))
|
||||
.append(" from image with channels 0-").append(std::to_string(image.bands() - 1));
|
||||
return Error();
|
||||
}
|
||||
}
|
||||
VipsInterpretation colourspace = sharp::Is16Bit(image.interpretation())
|
||||
? VIPS_INTERPRETATION_GREY16
|
||||
: VIPS_INTERPRETATION_B_W;
|
||||
image = image
|
||||
.extract_band(baton->extractChannel)
|
||||
.copy(VImage::option()->set("interpretation", colourspace));
|
||||
}
|
||||
|
||||
// Apply output ICC profile
|
||||
if (!baton->withMetadataIcc.empty()) {
|
||||
image = image.icc_transform(
|
||||
@@ -863,7 +880,10 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("strip", !baton->withMetadata)
|
||||
->set("bitdepth", baton->gifBitdepth)
|
||||
->set("effort", baton->gifEffort)
|
||||
->set("reoptimise", baton->gifReoptimise)
|
||||
->set("reuse", baton->gifReuse)
|
||||
->set("interlace", baton->gifProgressive)
|
||||
->set("interframe_maxerror", baton->gifInterFrameMaxError)
|
||||
->set("interpalette_maxerror", baton->gifInterPaletteMaxError)
|
||||
->set("dither", baton->gifDither)));
|
||||
baton->bufferOut = static_cast<char*>(area->data);
|
||||
baton->bufferOutLength = area->length;
|
||||
@@ -930,6 +950,21 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
area->free_fn = nullptr;
|
||||
vips_area_unref(area);
|
||||
baton->formatOut = "dz";
|
||||
} else if (baton->formatOut == "jxl" ||
|
||||
(baton->formatOut == "input" && inputImageType == sharp::ImageType::JXL)) {
|
||||
// Write JXL to buffer
|
||||
image = sharp::RemoveAnimationProperties(image);
|
||||
VipsArea *area = reinterpret_cast<VipsArea*>(image.jxlsave_buffer(VImage::option()
|
||||
->set("strip", !baton->withMetadata)
|
||||
->set("distance", baton->jxlDistance)
|
||||
->set("tier", baton->jxlDecodingTier)
|
||||
->set("effort", baton->jxlEffort)
|
||||
->set("lossless", baton->jxlLossless)));
|
||||
baton->bufferOut = static_cast<char*>(area->data);
|
||||
baton->bufferOutLength = area->length;
|
||||
area->free_fn = nullptr;
|
||||
vips_area_unref(area);
|
||||
baton->formatOut = "jxl";
|
||||
} else if (baton->formatOut == "raw" ||
|
||||
(baton->formatOut == "input" && inputImageType == sharp::ImageType::RAW)) {
|
||||
// Write raw, uncompressed image data to buffer
|
||||
@@ -968,6 +1003,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
bool const isTiff = sharp::IsTiff(baton->fileOut);
|
||||
bool const isJp2 = sharp::IsJp2(baton->fileOut);
|
||||
bool const isHeif = sharp::IsHeif(baton->fileOut);
|
||||
bool const isJxl = sharp::IsJxl(baton->fileOut);
|
||||
bool const isDz = sharp::IsDz(baton->fileOut);
|
||||
bool const isDzZip = sharp::IsDzZip(baton->fileOut);
|
||||
bool const isV = sharp::IsV(baton->fileOut);
|
||||
@@ -1043,7 +1079,8 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
->set("strip", !baton->withMetadata)
|
||||
->set("bitdepth", baton->gifBitdepth)
|
||||
->set("effort", baton->gifEffort)
|
||||
->set("reoptimise", baton->gifReoptimise)
|
||||
->set("reuse", baton->gifReuse)
|
||||
->set("interlace", baton->gifProgressive)
|
||||
->set("dither", baton->gifDither));
|
||||
baton->formatOut = "gif";
|
||||
} else if (baton->formatOut == "tiff" || (mightMatchInput && isTiff) ||
|
||||
@@ -1085,6 +1122,17 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
? VIPS_FOREIGN_SUBSAMPLE_OFF : VIPS_FOREIGN_SUBSAMPLE_ON)
|
||||
->set("lossless", baton->heifLossless));
|
||||
baton->formatOut = "heif";
|
||||
} else if (baton->formatOut == "jxl" || (mightMatchInput && isJxl) ||
|
||||
(willMatchInput && inputImageType == sharp::ImageType::JXL)) {
|
||||
// Write JXL to file
|
||||
image = sharp::RemoveAnimationProperties(image);
|
||||
image.jxlsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
->set("strip", !baton->withMetadata)
|
||||
->set("distance", baton->jxlDistance)
|
||||
->set("tier", baton->jxlDecodingTier)
|
||||
->set("effort", baton->jxlEffort)
|
||||
->set("lossless", baton->jxlLossless));
|
||||
baton->formatOut = "jxl";
|
||||
} else if (baton->formatOut == "dz" || isDz || isDzZip) {
|
||||
// Write DZ to file
|
||||
if (isDzZip) {
|
||||
@@ -1128,7 +1176,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
// Handle warnings
|
||||
std::string warning = sharp::VipsWarningPop();
|
||||
while (!warning.empty()) {
|
||||
debuglog.Call({ Napi::String::New(env, warning) });
|
||||
debuglog.MakeCallback(Receiver().Value(), { Napi::String::New(env, warning) });
|
||||
warning = sharp::VipsWarningPop();
|
||||
}
|
||||
|
||||
@@ -1157,6 +1205,10 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
info.Set("cropOffsetLeft", static_cast<int32_t>(baton->cropOffsetLeft));
|
||||
info.Set("cropOffsetTop", static_cast<int32_t>(baton->cropOffsetTop));
|
||||
}
|
||||
if (baton->hasAttentionCenter) {
|
||||
info.Set("attentionX", static_cast<int32_t>(baton->attentionX));
|
||||
info.Set("attentionY", static_cast<int32_t>(baton->attentionY));
|
||||
}
|
||||
if (baton->trimThreshold > 0.0) {
|
||||
info.Set("trimOffsetLeft", static_cast<int32_t>(baton->trimOffsetLeft));
|
||||
info.Set("trimOffsetTop", static_cast<int32_t>(baton->trimOffsetTop));
|
||||
@@ -1170,8 +1222,8 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
// Add buffer size to info
|
||||
info.Set("size", static_cast<uint32_t>(baton->bufferOutLength));
|
||||
// Pass ownership of output data to Buffer instance
|
||||
Napi::Buffer<char> data = Napi::Buffer<char>::New(env, static_cast<char*>(baton->bufferOut),
|
||||
baton->bufferOutLength, sharp::FreeCallback);
|
||||
Napi::Buffer<char> data = sharp::NewOrCopyBuffer(env, static_cast<char*>(baton->bufferOut),
|
||||
baton->bufferOutLength);
|
||||
Callback().MakeCallback(Receiver().Value(), { env.Null(), data, info });
|
||||
} else {
|
||||
// Add file size to info
|
||||
@@ -1182,7 +1234,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
Callback().MakeCallback(Receiver().Value(), { env.Null(), info });
|
||||
}
|
||||
} else {
|
||||
Callback().MakeCallback(Receiver().Value(), { Napi::Error::New(env, baton->err).Value() });
|
||||
Callback().MakeCallback(Receiver().Value(), { Napi::Error::New(env, sharp::TrimEnd(baton->err)).Value() });
|
||||
}
|
||||
|
||||
// Delete baton
|
||||
@@ -1200,7 +1252,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
// Decrement processing task counter
|
||||
g_atomic_int_dec_and_test(&sharp::counterProcess);
|
||||
Napi::Number queueLength = Napi::Number::New(env, static_cast<double>(sharp::counterQueue));
|
||||
queueListener.Call(Receiver().Value(), { queueLength });
|
||||
queueListener.MakeCallback(Receiver().Value(), { queueLength });
|
||||
}
|
||||
|
||||
private:
|
||||
@@ -1346,7 +1398,7 @@ class PipelineWorker : public Napi::AsyncWorker {
|
||||
Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
// V8 objects are converted to non-V8 types held in the baton struct
|
||||
PipelineBaton *baton = new PipelineBaton;
|
||||
Napi::Object options = info[0].As<Napi::Object>();
|
||||
Napi::Object options = info[size_t(0)].As<Napi::Object>();
|
||||
|
||||
// Input
|
||||
baton->input = sharp::CreateInputDescriptor(options.Get("input").As<Napi::Object>());
|
||||
@@ -1432,6 +1484,8 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->linearB = sharp::AttrAsVectorOfDouble(options, "linearB");
|
||||
baton->greyscale = sharp::AttrAsBool(options, "greyscale");
|
||||
baton->normalise = sharp::AttrAsBool(options, "normalise");
|
||||
baton->normaliseLower = sharp::AttrAsUint32(options, "normaliseLower");
|
||||
baton->normaliseUpper = sharp::AttrAsUint32(options, "normaliseUpper");
|
||||
baton->tintA = sharp::AttrAsDouble(options, "tintA");
|
||||
baton->tintB = sharp::AttrAsDouble(options, "tintB");
|
||||
baton->claheWidth = sharp::AttrAsUint32(options, "claheWidth");
|
||||
@@ -1449,6 +1503,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->extendLeft = sharp::AttrAsInt32(options, "extendLeft");
|
||||
baton->extendRight = sharp::AttrAsInt32(options, "extendRight");
|
||||
baton->extendBackground = sharp::AttrAsVectorOfDouble(options, "extendBackground");
|
||||
baton->extendWith = sharp::AttrAsEnum<VipsExtend>(options, "extendWith", VIPS_TYPE_EXTEND);
|
||||
baton->extractChannel = sharp::AttrAsInt32(options, "extractChannel");
|
||||
baton->affineMatrix = sharp::AttrAsVectorOfDouble(options, "affineMatrix");
|
||||
baton->affineBackground = sharp::AttrAsVectorOfDouble(options, "affineBackground");
|
||||
@@ -1544,7 +1599,10 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->gifBitdepth = sharp::AttrAsUint32(options, "gifBitdepth");
|
||||
baton->gifEffort = sharp::AttrAsUint32(options, "gifEffort");
|
||||
baton->gifDither = sharp::AttrAsDouble(options, "gifDither");
|
||||
baton->gifReoptimise = sharp::AttrAsBool(options, "gifReoptimise");
|
||||
baton->gifInterFrameMaxError = sharp::AttrAsDouble(options, "gifInterFrameMaxError");
|
||||
baton->gifInterPaletteMaxError = sharp::AttrAsDouble(options, "gifInterPaletteMaxError");
|
||||
baton->gifReuse = sharp::AttrAsBool(options, "gifReuse");
|
||||
baton->gifProgressive = sharp::AttrAsBool(options, "gifProgressive");
|
||||
baton->tiffQuality = sharp::AttrAsUint32(options, "tiffQuality");
|
||||
baton->tiffPyramid = sharp::AttrAsBool(options, "tiffPyramid");
|
||||
baton->tiffBitdepth = sharp::AttrAsUint32(options, "tiffBitdepth");
|
||||
@@ -1568,6 +1626,10 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
options, "heifCompression", VIPS_TYPE_FOREIGN_HEIF_COMPRESSION);
|
||||
baton->heifEffort = sharp::AttrAsUint32(options, "heifEffort");
|
||||
baton->heifChromaSubsampling = sharp::AttrAsStr(options, "heifChromaSubsampling");
|
||||
baton->jxlDistance = sharp::AttrAsDouble(options, "jxlDistance");
|
||||
baton->jxlDecodingTier = sharp::AttrAsUint32(options, "jxlDecodingTier");
|
||||
baton->jxlEffort = sharp::AttrAsUint32(options, "jxlEffort");
|
||||
baton->jxlLossless = sharp::AttrAsBool(options, "jxlLossless");
|
||||
baton->rawDepth = sharp::AttrAsEnum<VipsBandFormat>(options, "rawDepth", VIPS_TYPE_BAND_FORMAT);
|
||||
// Animated output properties
|
||||
if (sharp::HasAttr(options, "loop")) {
|
||||
@@ -1596,9 +1658,12 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
baton->trimThreshold > 0.0 ||
|
||||
baton->normalise ||
|
||||
baton->position == 16 || baton->position == 17 ||
|
||||
baton->angle % 360 != 0 ||
|
||||
fmod(baton->rotationAngle, 360.0) != 0.0 ||
|
||||
baton->useExifOrientation
|
||||
baton->angle != 0 ||
|
||||
baton->rotationAngle != 0.0 ||
|
||||
baton->tileAngle != 0 ||
|
||||
baton->useExifOrientation ||
|
||||
baton->claheWidth != 0 ||
|
||||
!baton->affineMatrix.empty()
|
||||
) {
|
||||
baton->input->access = VIPS_ACCESS_RANDOM;
|
||||
}
|
||||
@@ -1611,7 +1676,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
Napi::Function queueListener = options.Get("queueListener").As<Napi::Function>();
|
||||
|
||||
// Join queue for worker thread
|
||||
Napi::Function callback = info[1].As<Napi::Function>();
|
||||
Napi::Function callback = info[size_t(1)].As<Napi::Function>();
|
||||
PipelineWorker *worker = new PipelineWorker(callback, baton, debuglog, queueListener);
|
||||
worker->Receiver().Set("options", options);
|
||||
worker->Queue();
|
||||
@@ -1619,7 +1684,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
|
||||
// Increment queued task counter
|
||||
g_atomic_int_inc(&sharp::counterQueue);
|
||||
Napi::Number queueLength = Napi::Number::New(info.Env(), static_cast<double>(sharp::counterQueue));
|
||||
queueListener.Call(info.This(), { queueLength });
|
||||
queueListener.MakeCallback(info.This(), { queueLength });
|
||||
|
||||
return info.Env().Undefined();
|
||||
}
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#ifndef SRC_PIPELINE_H_
|
||||
#define SRC_PIPELINE_H_
|
||||
@@ -74,6 +63,9 @@ struct PipelineBaton {
|
||||
bool hasCropOffset;
|
||||
int cropOffsetLeft;
|
||||
int cropOffsetTop;
|
||||
bool hasAttentionCenter;
|
||||
int attentionX;
|
||||
int attentionY;
|
||||
bool premultiplied;
|
||||
bool tileCentre;
|
||||
bool fastShrinkOnLoad;
|
||||
@@ -107,6 +99,8 @@ struct PipelineBaton {
|
||||
double gammaOut;
|
||||
bool greyscale;
|
||||
bool normalise;
|
||||
int normaliseLower;
|
||||
int normaliseUpper;
|
||||
int claheWidth;
|
||||
int claheHeight;
|
||||
int claheMaxSlope;
|
||||
@@ -122,6 +116,7 @@ struct PipelineBaton {
|
||||
int extendLeft;
|
||||
int extendRight;
|
||||
std::vector<double> extendBackground;
|
||||
VipsExtend extendWith;
|
||||
bool withoutEnlargement;
|
||||
bool withoutReduction;
|
||||
std::vector<double> affineMatrix;
|
||||
@@ -163,7 +158,10 @@ struct PipelineBaton {
|
||||
int gifBitdepth;
|
||||
int gifEffort;
|
||||
double gifDither;
|
||||
bool gifReoptimise;
|
||||
double gifInterFrameMaxError;
|
||||
double gifInterPaletteMaxError;
|
||||
bool gifReuse;
|
||||
bool gifProgressive;
|
||||
int tiffQuality;
|
||||
VipsForeignTiffCompression tiffCompression;
|
||||
VipsForeignTiffPredictor tiffPredictor;
|
||||
@@ -180,6 +178,10 @@ struct PipelineBaton {
|
||||
int heifEffort;
|
||||
std::string heifChromaSubsampling;
|
||||
bool heifLossless;
|
||||
double jxlDistance;
|
||||
int jxlDecodingTier;
|
||||
int jxlEffort;
|
||||
bool jxlLossless;
|
||||
VipsBandFormat rawDepth;
|
||||
std::string err;
|
||||
bool withMetadata;
|
||||
@@ -229,6 +231,9 @@ struct PipelineBaton {
|
||||
hasCropOffset(false),
|
||||
cropOffsetLeft(0),
|
||||
cropOffsetTop(0),
|
||||
hasAttentionCenter(false),
|
||||
attentionX(0),
|
||||
attentionY(0),
|
||||
premultiplied(false),
|
||||
tintA(128.0),
|
||||
tintB(128.0),
|
||||
@@ -259,6 +264,8 @@ struct PipelineBaton {
|
||||
gamma(0.0),
|
||||
greyscale(false),
|
||||
normalise(false),
|
||||
normaliseLower(1),
|
||||
normaliseUpper(99),
|
||||
claheWidth(0),
|
||||
claheHeight(0),
|
||||
claheMaxSlope(3),
|
||||
@@ -273,6 +280,7 @@ struct PipelineBaton {
|
||||
extendLeft(0),
|
||||
extendRight(0),
|
||||
extendBackground{ 0.0, 0.0, 0.0, 255.0 },
|
||||
extendWith(VIPS_EXTEND_BACKGROUND),
|
||||
withoutEnlargement(false),
|
||||
withoutReduction(false),
|
||||
affineMatrix{ 1.0, 0.0, 0.0, 1.0 },
|
||||
@@ -314,7 +322,10 @@ struct PipelineBaton {
|
||||
gifBitdepth(8),
|
||||
gifEffort(7),
|
||||
gifDither(1.0),
|
||||
gifReoptimise(false),
|
||||
gifInterFrameMaxError(0.0),
|
||||
gifInterPaletteMaxError(3.0),
|
||||
gifReuse(true),
|
||||
gifProgressive(false),
|
||||
tiffQuality(80),
|
||||
tiffCompression(VIPS_FOREIGN_TIFF_COMPRESSION_JPEG),
|
||||
tiffPredictor(VIPS_FOREIGN_TIFF_PREDICTOR_HORIZONTAL),
|
||||
@@ -331,6 +342,10 @@ struct PipelineBaton {
|
||||
heifEffort(4),
|
||||
heifChromaSubsampling("4:4:4"),
|
||||
heifLossless(false),
|
||||
jxlDistance(1.0),
|
||||
jxlDecodingTier(0),
|
||||
jxlEffort(7),
|
||||
jxlLossless(false),
|
||||
rawDepth(VIPS_FORMAT_UCHAR),
|
||||
withMetadata(false),
|
||||
withMetadataOrientation(-1),
|
||||
|
||||
15
src/sharp.cc
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#include <napi.h>
|
||||
#include <vips/vips8>
|
||||
|
||||
24
src/stats.cc
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#include <numeric>
|
||||
#include <vector>
|
||||
@@ -117,7 +106,7 @@ class StatsWorker : public Napi::AsyncWorker {
|
||||
// Handle warnings
|
||||
std::string warning = sharp::VipsWarningPop();
|
||||
while (!warning.empty()) {
|
||||
debuglog.Call({ Napi::String::New(env, warning) });
|
||||
debuglog.MakeCallback(Receiver().Value(), { Napi::String::New(env, warning) });
|
||||
warning = sharp::VipsWarningPop();
|
||||
}
|
||||
|
||||
@@ -154,7 +143,7 @@ class StatsWorker : public Napi::AsyncWorker {
|
||||
info.Set("dominant", dominant);
|
||||
Callback().MakeCallback(Receiver().Value(), { env.Null(), info });
|
||||
} else {
|
||||
Callback().MakeCallback(Receiver().Value(), { Napi::Error::New(env, baton->err).Value() });
|
||||
Callback().MakeCallback(Receiver().Value(), { Napi::Error::New(env, sharp::TrimEnd(baton->err)).Value() });
|
||||
}
|
||||
|
||||
delete baton->input;
|
||||
@@ -172,16 +161,17 @@ class StatsWorker : public Napi::AsyncWorker {
|
||||
Napi::Value stats(const Napi::CallbackInfo& info) {
|
||||
// V8 objects are converted to non-V8 types held in the baton struct
|
||||
StatsBaton *baton = new StatsBaton;
|
||||
Napi::Object options = info[0].As<Napi::Object>();
|
||||
Napi::Object options = info[size_t(0)].As<Napi::Object>();
|
||||
|
||||
// Input
|
||||
baton->input = sharp::CreateInputDescriptor(options.Get("input").As<Napi::Object>());
|
||||
baton->input->access = VIPS_ACCESS_RANDOM;
|
||||
|
||||
// Function to notify of libvips warnings
|
||||
Napi::Function debuglog = options.Get("debuglog").As<Napi::Function>();
|
||||
|
||||
// Join queue for worker thread
|
||||
Napi::Function callback = info[1].As<Napi::Function>();
|
||||
Napi::Function callback = info[size_t(1)].As<Napi::Function>();
|
||||
StatsWorker *worker = new StatsWorker(callback, baton, debuglog);
|
||||
worker->Receiver().Set("options", options);
|
||||
worker->Queue();
|
||||
|
||||
15
src/stats.h
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#ifndef SRC_STATS_H_
|
||||
#define SRC_STATS_H_
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#include <cmath>
|
||||
#include <string>
|
||||
@@ -30,16 +19,16 @@ Napi::Value cache(const Napi::CallbackInfo& info) {
|
||||
Napi::Env env = info.Env();
|
||||
|
||||
// Set memory limit
|
||||
if (info[0].IsNumber()) {
|
||||
vips_cache_set_max_mem(info[0].As<Napi::Number>().Int32Value() * 1048576);
|
||||
if (info[size_t(0)].IsNumber()) {
|
||||
vips_cache_set_max_mem(info[size_t(0)].As<Napi::Number>().Int32Value() * 1048576);
|
||||
}
|
||||
// Set file limit
|
||||
if (info[1].IsNumber()) {
|
||||
vips_cache_set_max_files(info[1].As<Napi::Number>().Int32Value());
|
||||
if (info[size_t(1)].IsNumber()) {
|
||||
vips_cache_set_max_files(info[size_t(1)].As<Napi::Number>().Int32Value());
|
||||
}
|
||||
// Set items limit
|
||||
if (info[2].IsNumber()) {
|
||||
vips_cache_set_max(info[2].As<Napi::Number>().Int32Value());
|
||||
if (info[size_t(2)].IsNumber()) {
|
||||
vips_cache_set_max(info[size_t(2)].As<Napi::Number>().Int32Value());
|
||||
}
|
||||
|
||||
// Get memory stats
|
||||
@@ -69,8 +58,8 @@ Napi::Value cache(const Napi::CallbackInfo& info) {
|
||||
*/
|
||||
Napi::Value concurrency(const Napi::CallbackInfo& info) {
|
||||
// Set concurrency
|
||||
if (info[0].IsNumber()) {
|
||||
vips_concurrency_set(info[0].As<Napi::Number>().Int32Value());
|
||||
if (info[size_t(0)].IsNumber()) {
|
||||
vips_concurrency_set(info[size_t(0)].As<Napi::Number>().Int32Value());
|
||||
}
|
||||
// Get concurrency
|
||||
return Napi::Number::New(info.Env(), vips_concurrency_get());
|
||||
@@ -91,8 +80,8 @@ Napi::Value counters(const Napi::CallbackInfo& info) {
|
||||
*/
|
||||
Napi::Value simd(const Napi::CallbackInfo& info) {
|
||||
// Set state
|
||||
if (info[0].IsBoolean()) {
|
||||
vips_vector_set_enabled(info[0].As<Napi::Boolean>().Value());
|
||||
if (info[size_t(0)].IsBoolean()) {
|
||||
vips_vector_set_enabled(info[size_t(0)].As<Napi::Boolean>().Value());
|
||||
}
|
||||
// Get state
|
||||
return Napi::Boolean::New(info.Env(), vips_vector_isenabled());
|
||||
@@ -115,7 +104,7 @@ Napi::Value format(const Napi::CallbackInfo& info) {
|
||||
Napi::Object format = Napi::Object::New(env);
|
||||
for (std::string const f : {
|
||||
"jpeg", "png", "webp", "tiff", "magick", "openslide", "dz",
|
||||
"ppm", "fits", "gif", "svg", "heif", "pdf", "vips", "jp2k"
|
||||
"ppm", "fits", "gif", "svg", "heif", "pdf", "vips", "jp2k", "jxl"
|
||||
}) {
|
||||
// Input
|
||||
const VipsObjectClass *oc = vips_class_find("VipsOperation", (f + "load").c_str());
|
||||
@@ -185,10 +174,10 @@ Napi::Value _maxColourDistance(const Napi::CallbackInfo& info) {
|
||||
|
||||
// Open input files
|
||||
VImage image1;
|
||||
sharp::ImageType imageType1 = sharp::DetermineImageType(info[0].As<Napi::String>().Utf8Value().data());
|
||||
sharp::ImageType imageType1 = sharp::DetermineImageType(info[size_t(0)].As<Napi::String>().Utf8Value().data());
|
||||
if (imageType1 != sharp::ImageType::UNKNOWN) {
|
||||
try {
|
||||
image1 = VImage::new_from_file(info[0].As<Napi::String>().Utf8Value().c_str());
|
||||
image1 = VImage::new_from_file(info[size_t(0)].As<Napi::String>().Utf8Value().c_str());
|
||||
} catch (...) {
|
||||
throw Napi::Error::New(env, "Input file 1 has corrupt header");
|
||||
}
|
||||
@@ -196,10 +185,10 @@ Napi::Value _maxColourDistance(const Napi::CallbackInfo& info) {
|
||||
throw Napi::Error::New(env, "Input file 1 is of an unsupported image format");
|
||||
}
|
||||
VImage image2;
|
||||
sharp::ImageType imageType2 = sharp::DetermineImageType(info[1].As<Napi::String>().Utf8Value().data());
|
||||
sharp::ImageType imageType2 = sharp::DetermineImageType(info[size_t(1)].As<Napi::String>().Utf8Value().data());
|
||||
if (imageType2 != sharp::ImageType::UNKNOWN) {
|
||||
try {
|
||||
image2 = VImage::new_from_file(info[1].As<Napi::String>().Utf8Value().c_str());
|
||||
image2 = VImage::new_from_file(info[size_t(1)].As<Napi::String>().Utf8Value().c_str());
|
||||
} catch (...) {
|
||||
throw Napi::Error::New(env, "Input file 2 has corrupt header");
|
||||
}
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#ifndef SRC_UTILITIES_H_
|
||||
#define SRC_UTILITIES_H_
|
||||
|
||||
24
test/beforeEach.js
Normal file
@@ -0,0 +1,24 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const sharp = require('../');
|
||||
|
||||
const usingCache = !process.env.G_DEBUG;
|
||||
const usingSimd = !process.env.VIPS_NOVECTOR;
|
||||
const concurrency = Number(process.env.VIPS_CONCURRENCY) || 0;
|
||||
|
||||
exports.mochaHooks = {
|
||||
beforeEach () {
|
||||
sharp.cache(usingCache);
|
||||
sharp.simd(usingSimd);
|
||||
sharp.concurrency(concurrency);
|
||||
},
|
||||
|
||||
afterEach () {
|
||||
if (global.gc) {
|
||||
global.gc();
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -9,7 +9,7 @@ RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash -
|
||||
RUN apt-get install -y nodejs
|
||||
|
||||
# Install benchmark dependencies
|
||||
RUN apt-get install -y imagemagick libmagick++-dev graphicsmagick libmapnik-dev
|
||||
RUN apt-get install -y imagemagick libmagick++-dev graphicsmagick
|
||||
|
||||
# Install sharp
|
||||
RUN mkdir /tmp/sharp
|
||||
@@ -17,7 +17,7 @@ RUN cd /tmp && git clone --single-branch --branch $BRANCH https://github.com/lov
|
||||
RUN cd /tmp/sharp && npm install --build-from-source
|
||||
|
||||
# Install benchmark test
|
||||
RUN cd /tmp/sharp/test/bench && npm install
|
||||
RUN cd /tmp/sharp/test/bench && npm install --omit optional
|
||||
|
||||
RUN cat /etc/os-release | grep VERSION=
|
||||
RUN node -v
|
||||
|
||||
@@ -7,17 +7,19 @@
|
||||
"scripts": {
|
||||
"test": "node perf && node random && node parallel"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@squoosh/cli": "0.7.2",
|
||||
"dependencies": {
|
||||
"@squoosh/cli": "0.7.3",
|
||||
"@squoosh/lib": "0.4.0",
|
||||
"@tensorflow/tfjs-node": "3.20.0",
|
||||
"async": "3.2.4",
|
||||
"benchmark": "2.1.4",
|
||||
"gm": "1.24.0",
|
||||
"gm": "1.25.0",
|
||||
"imagemagick": "0.1.3",
|
||||
"jimp": "0.16.2",
|
||||
"mapnik": "4.5.9",
|
||||
"semver": "7.3.7"
|
||||
"jimp": "0.22.7",
|
||||
"semver": "7.3.8"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@tensorflow/tfjs-node": "4.2.0",
|
||||
"mapnik": "4.5.9"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
process.env.UV_THREADPOOL_SIZE = 64;
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const os = require('os');
|
||||
@@ -7,15 +10,22 @@ const { exec } = require('child_process');
|
||||
const async = require('async');
|
||||
const Benchmark = require('benchmark');
|
||||
|
||||
const safeRequire = (name) => {
|
||||
try {
|
||||
return require(name);
|
||||
} catch (err) {}
|
||||
return null;
|
||||
};
|
||||
|
||||
// Contenders
|
||||
const sharp = require('../../');
|
||||
const gm = require('gm');
|
||||
const imagemagick = require('imagemagick');
|
||||
const mapnik = require('mapnik');
|
||||
const mapnik = safeRequire('mapnik');
|
||||
const jimp = require('jimp');
|
||||
const squoosh = require('@squoosh/lib');
|
||||
process.env.TF_CPP_MIN_LOG_LEVEL = 1;
|
||||
const tfjs = require('@tensorflow/tfjs-node');
|
||||
const tfjs = safeRequire('@tensorflow/tfjs-node');
|
||||
|
||||
const fixtures = require('../fixtures');
|
||||
|
||||
@@ -25,6 +35,7 @@ const outputWebP = fixtures.path('output.webp');
|
||||
|
||||
const width = 720;
|
||||
const height = 588;
|
||||
const heightPng = 540;
|
||||
|
||||
// Disable libvips cache to ensure tests are as fair as they can be
|
||||
sharp.cache(false);
|
||||
@@ -138,7 +149,7 @@ async.series({
|
||||
}
|
||||
});
|
||||
// mapnik
|
||||
jpegSuite.add('mapnik-file-file', {
|
||||
mapnik && jpegSuite.add('mapnik-file-file', {
|
||||
defer: true,
|
||||
fn: function (deferred) {
|
||||
mapnik.Image.open(fixtures.inputJpg, function (err, img) {
|
||||
@@ -253,7 +264,7 @@ async.series({
|
||||
}
|
||||
});
|
||||
// tfjs
|
||||
jpegSuite.add('tfjs-node-buffer-buffer', {
|
||||
tfjs && jpegSuite.add('tfjs-node-buffer-buffer', {
|
||||
defer: true,
|
||||
fn: function (deferred) {
|
||||
const decoded = tfjs.node.decodeJpeg(inputJpgBuffer);
|
||||
@@ -528,10 +539,10 @@ async.series({
|
||||
}
|
||||
});
|
||||
}
|
||||
}).add('sharp-sequentialRead', {
|
||||
}).add('sharp-random-access-read', {
|
||||
defer: true,
|
||||
fn: function (deferred) {
|
||||
sharp(inputJpgBuffer, { sequentialRead: true })
|
||||
sharp(inputJpgBuffer, { sequentialRead: false })
|
||||
.resize(width, height)
|
||||
.toBuffer(function (err) {
|
||||
if (err) {
|
||||
@@ -641,7 +652,7 @@ async.series({
|
||||
throw err;
|
||||
} else {
|
||||
image
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.deflateLevel(6)
|
||||
.filterType(0)
|
||||
.getBuffer(jimp.MIME_PNG, function (err) {
|
||||
@@ -662,7 +673,7 @@ async.series({
|
||||
throw err;
|
||||
} else {
|
||||
image
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.deflateLevel(6)
|
||||
.filterType(0)
|
||||
.write(outputPng, function (err) {
|
||||
@@ -676,21 +687,74 @@ async.series({
|
||||
});
|
||||
}
|
||||
});
|
||||
// squoosh-cli
|
||||
pngSuite.add('squoosh-cli-file-file', {
|
||||
defer: true,
|
||||
fn: function (deferred) {
|
||||
exec(`./node_modules/.bin/squoosh-cli \
|
||||
--output-dir ${os.tmpdir()} \
|
||||
--resize '{"enabled":true,"width":${width},"height":${heightPng},"method":"lanczos3","premultiply":true,"linearRGB":false}' \
|
||||
--oxipng '{"level":1}' \
|
||||
"${fixtures.inputPngAlphaPremultiplicationLarge}"`, function (err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
deferred.resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
// squoosh-lib (GPLv3)
|
||||
pngSuite.add('squoosh-lib-buffer-buffer', {
|
||||
defer: true,
|
||||
fn: function (deferred) {
|
||||
const pool = new squoosh.ImagePool();
|
||||
const image = pool.ingestImage(inputPngBuffer);
|
||||
image.decoded
|
||||
.then(function () {
|
||||
return image.preprocess({
|
||||
resize: {
|
||||
enabled: true,
|
||||
width,
|
||||
height: heightPng,
|
||||
method: 'lanczos3',
|
||||
premultiply: true,
|
||||
linearRGB: false
|
||||
}
|
||||
});
|
||||
})
|
||||
.then(function () {
|
||||
return image.encode({
|
||||
oxipng: {
|
||||
level: 1
|
||||
}
|
||||
});
|
||||
})
|
||||
.then(function () {
|
||||
return pool.close();
|
||||
})
|
||||
.then(function () {
|
||||
return image.encodedWith.oxipng;
|
||||
})
|
||||
.then(function () {
|
||||
deferred.resolve();
|
||||
});
|
||||
}
|
||||
});
|
||||
// mapnik
|
||||
pngSuite.add('mapnik-file-file', {
|
||||
mapnik && pngSuite.add('mapnik-file-file', {
|
||||
defer: true,
|
||||
fn: function (deferred) {
|
||||
mapnik.Image.open(fixtures.inputPngAlphaPremultiplicationLarge, function (err, img) {
|
||||
if (err) throw err;
|
||||
img.premultiply(function (err, img) {
|
||||
if (err) throw err;
|
||||
img.resize(width, height, {
|
||||
img.resize(width, heightPng, {
|
||||
scaling_method: mapnik.imageScaling.lanczos
|
||||
}, function (err, img) {
|
||||
if (err) throw err;
|
||||
img.demultiply(function (err, img) {
|
||||
if (err) throw err;
|
||||
img.save(outputPng, 'png', function (err) {
|
||||
img.save(outputPng, 'png32:f=no:z=6', function (err) {
|
||||
if (err) throw err;
|
||||
deferred.resolve();
|
||||
});
|
||||
@@ -706,13 +770,13 @@ async.series({
|
||||
if (err) throw err;
|
||||
img.premultiply(function (err, img) {
|
||||
if (err) throw err;
|
||||
img.resize(width, height, {
|
||||
img.resize(width, heightPng, {
|
||||
scaling_method: mapnik.imageScaling.lanczos
|
||||
}, function (err, img) {
|
||||
if (err) throw err;
|
||||
img.demultiply(function (err, img) {
|
||||
if (err) throw err;
|
||||
img.encode('png', function (err) {
|
||||
img.encode('png32:f=no:z=6', function (err) {
|
||||
if (err) throw err;
|
||||
deferred.resolve();
|
||||
});
|
||||
@@ -730,7 +794,7 @@ async.series({
|
||||
srcPath: fixtures.inputPngAlphaPremultiplicationLarge,
|
||||
dstPath: outputPng,
|
||||
width: width,
|
||||
height: height,
|
||||
height: heightPng,
|
||||
filter: 'Lanczos',
|
||||
customArgs: [
|
||||
'-define', 'PNG:compression-level=6',
|
||||
@@ -751,7 +815,7 @@ async.series({
|
||||
fn: function (deferred) {
|
||||
gm(fixtures.inputPngAlphaPremultiplicationLarge)
|
||||
.filter('Lanczos')
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.define('PNG:compression-level=6')
|
||||
.define('PNG:compression-filter=0')
|
||||
.write(outputPng, function (err) {
|
||||
@@ -767,7 +831,7 @@ async.series({
|
||||
fn: function (deferred) {
|
||||
gm(fixtures.inputPngAlphaPremultiplicationLarge)
|
||||
.filter('Lanczos')
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.define('PNG:compression-level=6')
|
||||
.define('PNG:compression-filter=0')
|
||||
.toBuffer(function (err) {
|
||||
@@ -785,7 +849,7 @@ async.series({
|
||||
minSamples,
|
||||
fn: function (deferred) {
|
||||
sharp(inputPngBuffer)
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.png({ compressionLevel: 6 })
|
||||
.toFile(outputPng, function (err) {
|
||||
if (err) {
|
||||
@@ -800,9 +864,9 @@ async.series({
|
||||
minSamples,
|
||||
fn: function (deferred) {
|
||||
sharp(inputPngBuffer)
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.png({ compressionLevel: 6 })
|
||||
.toBuffer(function (err) {
|
||||
.toBuffer(function (err, data) {
|
||||
if (err) {
|
||||
throw err;
|
||||
} else {
|
||||
@@ -815,7 +879,7 @@ async.series({
|
||||
minSamples,
|
||||
fn: function (deferred) {
|
||||
sharp(fixtures.inputPngAlphaPremultiplicationLarge)
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.png({ compressionLevel: 6 })
|
||||
.toFile(outputPng, function (err) {
|
||||
if (err) {
|
||||
@@ -830,7 +894,7 @@ async.series({
|
||||
minSamples,
|
||||
fn: function (deferred) {
|
||||
sharp(fixtures.inputPngAlphaPremultiplicationLarge)
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.png({ compressionLevel: 6 })
|
||||
.toBuffer(function (err) {
|
||||
if (err) {
|
||||
@@ -845,7 +909,7 @@ async.series({
|
||||
minSamples,
|
||||
fn: function (deferred) {
|
||||
sharp(inputPngBuffer)
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.png({ compressionLevel: 6, progressive: true })
|
||||
.toBuffer(function (err) {
|
||||
if (err) {
|
||||
@@ -860,7 +924,7 @@ async.series({
|
||||
minSamples,
|
||||
fn: function (deferred) {
|
||||
sharp(inputPngBuffer)
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.png({ adaptiveFiltering: true, compressionLevel: 6 })
|
||||
.toBuffer(function (err) {
|
||||
if (err) {
|
||||
@@ -875,7 +939,7 @@ async.series({
|
||||
minSamples,
|
||||
fn: function (deferred) {
|
||||
sharp(inputPngBuffer)
|
||||
.resize(width, height)
|
||||
.resize(width, heightPng)
|
||||
.png({ compressionLevel: 9 })
|
||||
.toBuffer(function (err) {
|
||||
if (err) {
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// Copyright 2013 Lovell Fuller and others.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
'use strict';
|
||||
|
||||
const imagemagick = require('imagemagick');
|
||||
|
||||
BIN
test/fixtures/cielab-dagams.tiff
vendored
BIN
test/fixtures/expected/alpha-layer-1-fill-linear.png
vendored
|
Before Width: | Height: | Size: 154 KiB After Width: | Height: | Size: 12 KiB |
BIN
test/fixtures/expected/alpha-layer-1-fill-offset.png
vendored
|
Before Width: | Height: | Size: 112 KiB After Width: | Height: | Size: 9.2 KiB |
BIN
test/fixtures/expected/alpha-layer-1-fill-slope.png
vendored
|
Before Width: | Height: | Size: 179 KiB After Width: | Height: | Size: 13 KiB |
BIN
test/fixtures/expected/crop-strategy.webp
vendored
Normal file
|
After Width: | Height: | Size: 7.0 KiB |
BIN
test/fixtures/expected/extend-2channel-background.png
vendored
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
test/fixtures/expected/extend-2channel-copy.png
vendored
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
test/fixtures/expected/extend-2channel-mirror.png
vendored
Normal file
|
After Width: | Height: | Size: 42 KiB |
BIN
test/fixtures/expected/extend-2channel-repeat.png
vendored
Normal file
|
After Width: | Height: | Size: 39 KiB |
BIN
test/fixtures/expected/extend-2channel.png
vendored
|
Before Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
BIN
test/fixtures/expected/extend-equal-background.webp
vendored
Normal file
|
After Width: | Height: | Size: 9.2 KiB |
BIN
test/fixtures/expected/extend-equal-copy.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.9 KiB |
BIN
test/fixtures/expected/extend-equal-copy.webp
vendored
Normal file
|
After Width: | Height: | Size: 9.1 KiB |
BIN
test/fixtures/expected/extend-equal-mirror.jpg
vendored
Normal file
|
After Width: | Height: | Size: 4.3 KiB |
BIN
test/fixtures/expected/extend-equal-mirror.webp
vendored
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
test/fixtures/expected/extend-equal-repeat.jpg
vendored
Normal file
|
After Width: | Height: | Size: 4.7 KiB |
BIN
test/fixtures/expected/extend-equal-repeat.webp
vendored
Normal file
|
After Width: | Height: | Size: 24 KiB |
|
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 22 KiB |
BIN
test/fixtures/expected/extend-unequal-copy.png
vendored
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
test/fixtures/expected/extend-unequal-mirror.png
vendored
Normal file
|
After Width: | Height: | Size: 38 KiB |
BIN
test/fixtures/expected/extend-unequal-repeat.png
vendored
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
test/fixtures/expected/extract-alpha-16bit.png
vendored
|
Before Width: | Height: | Size: 262 B After Width: | Height: | Size: 255 B |
BIN
test/fixtures/expected/extract-lch.jpg
vendored
|
Before Width: | Height: | Size: 13 KiB |
BIN
test/fixtures/expected/modulate-hue-angle-120.png
vendored
|
Before Width: | Height: | Size: 29 KiB After Width: | Height: | Size: 301 KiB |
BIN
test/fixtures/expected/modulate-hue-angle-150.png
vendored
|
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 301 KiB |
BIN
test/fixtures/expected/modulate-hue-angle-180.png
vendored
|
Before Width: | Height: | Size: 29 KiB After Width: | Height: | Size: 301 KiB |
BIN
test/fixtures/expected/modulate-hue-angle-210.png
vendored
|
Before Width: | Height: | Size: 29 KiB After Width: | Height: | Size: 301 KiB |
BIN
test/fixtures/expected/modulate-hue-angle-240.png
vendored
|
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 301 KiB |
BIN
test/fixtures/expected/modulate-hue-angle-270.png
vendored
|
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 301 KiB |