Compare commits

...

75 Commits

Author SHA1 Message Date
Lovell Fuller
05d76eeadf Release v0.22.1 2019-04-25 12:14:45 +01:00
Lovell Fuller
28a6c53da0 Prevent issue templates being published to npm 2019-04-25 12:14:17 +01:00
Lovell Fuller
6fcd2153c5 Docs: add support for Node 12 #1668 2019-04-25 12:01:14 +01:00
Lovell Fuller
7ae0512b9b Docs: clarify some output features require custom libvips 2019-04-25 11:39:30 +01:00
Nicolas Stepien
0890b59c32 Add Node 12 to Windows CI (#1669) 2019-04-25 11:24:35 +01:00
Lovell Fuller
df3ce450d9 Bump all dependencies 2019-04-25 09:51:59 +01:00
Lovell Fuller
bb0257b318 Remove not-yet-available Node 12 from Windows CI 2019-04-25 09:51:03 +01:00
Lovell Fuller
9c3597670d Add Node 12 to CI 2019-04-25 08:42:13 +01:00
Lovell Fuller
aa9b328778 Remove use of deprecated V8 API, swap v8::Handle for v8::Local #1668 2019-04-23 20:16:31 +01:00
tyankatsu
159e8dace2 docs: Fix path to CONTRIBUTING.md (#1666) 2019-04-20 19:19:14 +01:00
Lovell Fuller
3be4d5bb45 Ensure limitInputPixels check uses 64-bit unsigned type 2019-04-20 17:51:19 +01:00
Lovell Fuller
af7caa7b25 Docs: modernise contributing guide 2019-04-02 22:33:24 +01:00
Lovell Fuller
b4ede75522 Add issue templates (#1639) 2019-04-02 22:32:06 +01:00
Lovell Fuller
9d98114074 Move contributing info to GitHub-specific subdirectory,
which then allows for future possible issue/PR templates etc.
2019-04-02 21:03:57 +01:00
Lovell Fuller
4ac51899c3 Docs: minimum supported version of Node.js is 6 2019-04-02 20:41:23 +01:00
Lovell Fuller
90a0382317 Tests: use a concurrency of 1 on musl-based Linux
Should reduce a bit of pressure on the stack
2019-04-02 20:40:36 +01:00
Lovell Fuller
687795c801 Enhancement to and changelog entry for #1638
Remove bindings dependency as this isn't really... required
2019-04-02 20:39:06 +01:00
Sidhartha Chatterjee
2e0fbbb942 Add warning if sharp bindings aren't built correctly (#1638) 2019-04-02 17:00:55 +01:00
Lovell Fuller
0a3512d066 Unpin prebuild as 8.2.x is no longer breaking 2019-04-01 20:51:45 +01:00
Lovell Fuller
6032171f91 Docs: clarify use of integral pixel values for extract 2019-04-01 20:06:46 +01:00
Lovell Fuller
fc178de309 Changelog entry for #1601 2019-03-25 08:32:20 +00:00
Jakub Michálek
3f4398457f Change docs for composite to reflect how create works (#1623) 2019-03-25 07:45:46 +00:00
Jakub Michálek
b494b2e872 Add brightness, saturation and hue modulation #609 (#1601) 2019-03-25 07:44:07 +00:00
Lovell Fuller
18afcf5f90 Release v0.22.0 2019-03-18 23:26:39 +00:00
Lovell Fuller
87a422942d Pin prebuild due to breaking change in 8.2.0 2019-03-18 23:10:33 +00:00
Lovell Fuller
ac515121e5 Release v0.22.0 2019-03-18 21:31:46 +00:00
Lovell Fuller
2bfea0ad76 Docs: refresh usage examples 2019-03-18 21:29:17 +00:00
Lovell Fuller
83cdb558f6 Allow Stream-based input of raw pixel data #1579 2019-03-18 20:15:18 +00:00
Lovell Fuller
9ee377963e Improve error message if libvips tarball is corrupt 2019-03-17 23:07:58 +00:00
Lovell Fuller
9cc06c887b Add support for pages option for multi-page input #1566 2019-03-17 16:37:27 +00:00
Lovell Fuller
7457b50373 Remove unused shared library 2019-03-15 15:58:25 +00:00
Lovell Fuller
6387fb79b1 Small improvements to input and install docs, bump deps 2019-03-15 15:48:55 +00:00
Lovell Fuller
54e5514b9a Bump dependencies to latest 2019-03-10 18:14:43 +00:00
Lovell Fuller
1e4597c284 Changelog entry for #1595 (plus add GIF) 2019-03-10 17:26:26 +00:00
Lovell Fuller
7cafd4386c Add composite op, supporting multiple images and blend modes #728 2019-03-09 22:46:23 +00:00
Lovell Fuller
e3549ba28c Remove functions previously deprecated in v0.21.0
background, crop, embed, ignoreAspectRatio, max, min, withoutEnlargement
2019-03-01 23:43:35 +00:00
Lovell Fuller
d1bbe62e52 Rename armv8 as arm64v8 to match Node's process.arch 2019-03-01 23:43:35 +00:00
Lovell Fuller
36af74a09b Upgrade to libvips v8.7.4 2019-03-01 23:43:35 +00:00
Fabrizio Ruggeri
5afe02be60 Allow page input option to be set for PDF (#1595) 2019-03-01 23:29:34 +00:00
Jack Cross
2262959673 Docs: add missing comma to extend example (#1588) 2019-02-27 11:03:02 +00:00
Lovell Fuller
ba3f914445 Document support for animated WebP in metadata pages 2019-01-27 21:01:49 +00:00
Lovell Fuller
770be35c44 Tests: add a couple of extra leak suppressions for Node 2019-01-27 20:48:17 +00:00
Lovell Fuller
cc9f2b90fd Docs: use absolute URL for logo 2019-01-19 15:15:22 +00:00
Lovell Fuller
4aff57b071 Release v0.21.3 2019-01-19 14:25:37 +00:00
Maxime BACONNAIS
1df8d82fe0 Docs: overlay parameter of overlayWith is optional (#1547) 2019-01-19 14:19:41 +00:00
Lovell Fuller
98e90784f4 Docs: overlay parameter of overlayWith is optional 2019-01-19 14:11:54 +00:00
Lovell Fuller
87ea54cc66 Bump devDependencies 2019-01-19 14:06:16 +00:00
Lovell Fuller
d5e98bc8ad Split file-based input errors into missing vs invalid #1542 2019-01-19 11:59:36 +00:00
Lovell Fuller
fa69ff773a Input image decoding fail fast by default 2019-01-18 19:25:55 +00:00
Lovell Fuller
a183bb1cac Add valgrind memory leak suppressions 2019-01-18 12:08:28 +00:00
Lovell Fuller
cf62372cab Install: log the fallback to build from source
https://github.com/lovell/sharp-libvips/issues/18
2019-01-14 19:33:01 +00:00
Lovell Fuller
56fa9c95a1 Release v0.21.2 2019-01-13 10:26:47 +00:00
Lovell Fuller
32a34a8841 Tests: separate IO suite into per-format unit files 2019-01-13 10:11:32 +00:00
Lovell Fuller
98797445de Expose PNG output options requiring libimagequant #1484 2019-01-13 09:06:05 +00:00
Lovell Fuller
bd377438b6 Ignore colour profiles in LAB images as they are already LAB 2019-01-12 18:13:43 +00:00
Lovell Fuller
9dd6510de6 Expose underlying error message for invalid input #1505 2019-01-12 16:10:25 +00:00
Lovell Fuller
93ad9d4a4a Ensure all metadata removed from PNG unless withMetadata used 2019-01-09 21:17:53 +00:00
Lovell Fuller
4c01a099ea Add ensureAlpha op, adds alpha channel if missing #1153 2019-01-05 21:12:33 +00:00
Lovell Fuller
8e70579e47 Docs: use HTTPS links where available 2019-01-04 21:25:21 +00:00
Lovell Fuller
ee8bfa3980 Add 2019 to list of years of copyright 2019-01-04 16:05:26 +00:00
Lovell Fuller
c5dfa49cae Docs: add sharp logo to readme 2019-01-04 15:56:22 +00:00
Lovell Fuller
0822404129 Docs: expand logo viewBox to prevent clipping 2019-01-04 15:54:33 +00:00
Lovell Fuller
144f39cd45 Docs: add sharp logo, CC BY-SA 4.0 2019-01-04 15:12:53 +00:00
Lovell Fuller
87f191fd05 Node 11 now supported by nodejs/nan 2019-01-03 14:01:18 +00:00
Lovell Fuller
37ed436202 Version bump of devDependencies 2019-01-03 13:26:56 +00:00
Lovell Fuller
88e490356d Test: remove stray console.log 2019-01-03 12:35:54 +00:00
Lovell Fuller
7c631c0787 Ensure shortest resized edge is >= 1px #1003 2019-01-03 12:01:55 +00:00
Lovell Fuller
f5d3721fe0 Doc refresh for #1205 2019-01-02 19:04:49 +00:00
Lovell Fuller
cc633589d9 Expose pages metadata for multi-page input images #1205 2019-01-01 22:10:27 +00:00
Lovell Fuller
cc1d4c1a6d Expose palette-bit-depth metadata, requires upcoming libvips v8.8.0 2019-01-01 21:02:00 +00:00
Lovell Fuller
30ca424942 Apply correct forced output when chaining #1528 2019-01-01 18:40:09 +00:00
Pascal Temel
813831acf0 Docs: Update deprecated overlayWith example (#1526) 2018-12-30 20:44:36 +00:00
Lovell Fuller
a54fe9f77c Prevent mutatation of jpeg options #1516 2018-12-21 19:54:33 +00:00
Amila Welihinda
8c6da5548a Docs: change repo badge to SVG (#1498) 2018-12-10 11:03:03 +01:00
Lovell Fuller
a2aa7d69e7 Add error handler to download stream lovell/sharp-libvips#14 2018-12-08 13:56:05 +00:00
97 changed files with 2959 additions and 2805 deletions

View File

@@ -12,9 +12,12 @@ New bugs are assigned a `triage` label whilst under investigation.
## Submit a new feature request
If a [similar request](https://github.com/lovell/sharp/labels/enhancement) exists, it's probably fastest to add a comment to it about your requirement.
If a [similar request](https://github.com/lovell/sharp/labels/enhancement) exists,
it's probably fastest to add a comment to it about your requirement.
Implementation is usually straightforward if _libvips_ [already supports](https://libvips.github.io/libvips/API/current/) the feature you need.
Implementation is usually straightforward if libvips
[already supports](https://libvips.github.io/libvips/API/current/func-list.html)
the feature you need.
## Submit a Pull Request to fix a bug
@@ -41,18 +44,18 @@ Any change that modifies the existing public API should be added to the relevant
| Release | WIP branch |
| ------: | :--------- |
| v0.21.0 | teeth |
| v0.22.0 | uptake |
| v0.23.0 | vision |
| v0.24.0 | wit |
Please squash your changes into a single commit using a command like `git rebase -i upstream/<wip-branch>`.
### Add a new public method
The API tries to be as fluent as possible. Image processing concepts follow the naming conventions from _libvips_ and, to a lesser extent, _ImageMagick_.
The API tries to be as fluent as possible.
Image processing concepts follow the naming conventions from libvips and, to a lesser extent, ImageMagick.
Most methods have optional parameters and assume sensible defaults. Methods with mandatory parameters often have names like `doSomethingWith(X)`.
Please ensure backwards compatibility where possible. Methods to modify previously default behaviour often have names like `withoutOptionY()` or `withExtraZ()`.
Most methods have optional parameters and assume sensible defaults.
Please ensure backwards compatibility where possible.
Feel free to create a [new issue](https://github.com/lovell/sharp/issues/new) to gather feedback on a potential API change.
@@ -60,7 +63,7 @@ Feel free to create a [new issue](https://github.com/lovell/sharp/issues/new) to
A method to be removed should be deprecated in the next major version then removed in the following major version.
By way of example, the [bilinearInterpolation method](https://github.com/lovell/sharp/blob/v0.6.0/index.js#L155) present in v0.5.0 was deprecated in v0.6.0 and removed in v0.7.0.
By way of example, the `background()` method present in v0.20.0 was deprecated in v0.21.0 and removed in v0.22.0.
## Documentation

View File

@@ -0,0 +1,18 @@
---
name: Feature request
about: Suggest an idea
title: ''
labels: enhancement
assignees: ''
---
What are you trying to achieve?
Have you searched for similar feature requests?
What would you expect the API to look like?
What alternatives have you considered?
Is there a sample image that helps explain?

14
.github/ISSUE_TEMPLATE/installation.md vendored Normal file
View File

@@ -0,0 +1,14 @@
---
name: Installation
about: For help if something went wrong installing sharp
title: ''
labels: ''
assignees: ''
---
What is the output of running `npm install --verbose sharp`?
What is the output of running `npx envinfo --binaries --languages --system --utilities`?
Have you ensured the platform and version of Node.js used for `npm install` is the same as the platform and version of Node.js used at runtime?

18
.github/ISSUE_TEMPLATE/possible-bug.md vendored Normal file
View File

@@ -0,0 +1,18 @@
---
name: Possible bug
about: Please provide steps to reproduce
title: ''
labels: ''
assignees: ''
---
What is the output of running `npx envinfo --binaries --languages --system --utilities`?
What are the steps to reproduce?
What is the expected behaviour?
Are you able to provide a standalone code sample, without other dependencies, that demonstrates this problem?
Are you able to provide a sample image that helps explain the problem?

16
.github/ISSUE_TEMPLATE/question.md vendored Normal file
View File

@@ -0,0 +1,16 @@
---
name: Question
about: For help with an existing feature
title: ''
labels: question
assignees: ''
---
What are you trying to achieve?
Have you searched for similar questions?
Are you able to provide a standalone code sample that demonstrates this question?
Are you able to provide a sample image that helps explain the question?

View File

@@ -12,4 +12,4 @@ docs/css/
vendor
.prebuildrc
.nyc_output
CONTRIBUTING.md
.github/

View File

@@ -18,17 +18,21 @@ matrix:
sudo: false
language: node_js
node_js: "10"
- name: "Linux (glibc) - Node 12"
os: linux
dist: trusty
sudo: false
language: node_js
node_js: "12"
after_success:
- npm install coveralls
- cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js
- name: "Linux (glibc) - Node 11 (Experimental)"
- name: "Linux (glibc) - Node 11"
os: linux
dist: trusty
sudo: false
language: node_js
node_js: "11"
before_install:
- unset PREBUILD_TOKEN
- name: "Linux (musl) - Node 8"
os: linux
dist: trusty
@@ -49,13 +53,23 @@ matrix:
- sudo docker exec sharp apk add build-base git python2 --update-cache
install: sudo docker exec sharp sh -c "npm install --unsafe-perm"
script: sudo docker exec sharp sh -c "npm test"
- name: "Linux (musl) - Node 11 (Experimental)"
- name: "Linux (musl) - Node 11"
os: linux
dist: trusty
sudo: true
language: minimal
before_install:
- sudo docker run -dit --name sharp --env CI --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp node:11-alpine
- sudo docker run -dit --name sharp --env CI --env PREBUILD_TOKEN --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp node:11-alpine
- sudo docker exec sharp apk add build-base git python2 --update-cache
install: sudo docker exec sharp sh -c "npm install --unsafe-perm"
script: sudo docker exec sharp sh -c "npm test"
- name: "Linux (musl) - Node 12"
os: linux
dist: trusty
sudo: true
language: minimal
before_install:
- sudo docker run -dit --name sharp --env CI --env PREBUILD_TOKEN --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp node:12.0-alpine
- sudo docker exec sharp apk add build-base git python2 --update-cache
install: sudo docker exec sharp sh -c "npm install --unsafe-perm"
script: sudo docker exec sharp sh -c "npm test"
@@ -74,10 +88,13 @@ matrix:
osx_image: xcode9.2
language: node_js
node_js: "10"
- name: "OS X - Node 11 (Experimental)"
- name: "OS X - Node 11"
os: osx
osx_image: xcode9.2
language: node_js
node_js: "11"
before_install:
- unset PREBUILD_TOKEN
- name: "OS X - Node 12"
os: osx
osx_image: xcode9.2
language: node_js
node_js: "12"

View File

@@ -1,13 +1,11 @@
# sharp
<img src="https://raw.githubusercontent.com/lovell/sharp/master/docs/image/sharp-logo.svg?sanitize=true" width="160" height="160" alt="sharp logo" align="right">
```sh
npm install sharp
```
```sh
yarn add sharp
```
The typical use case for this high speed Node.js module
is to convert large images in common formats to
smaller, web-friendly JPEG, PNG and WebP images of varying dimensions.
@@ -22,7 +20,7 @@ As well as image resizing, operations such as
rotation, extraction, compositing and gamma correction are available.
Most modern 64-bit OS X, Windows and Linux systems running
Node versions 6, 8 and 10
Node versions 6, 8, 10, 11 and 12
do not require any additional install or runtime dependencies.
## Examples
@@ -31,22 +29,42 @@ do not require any additional install or runtime dependencies.
const sharp = require('sharp');
```
### Callback
```javascript
sharp(inputBuffer)
.resize(320, 240)
.toFile('output.webp', (err, info) => ... );
// A Promises/A+ promise is returned when callback is not provided.
.toFile('output.webp', (err, info) => { ... });
```
### Promise
```javascript
sharp('input.jpg')
.rotate()
.resize(200)
.toBuffer()
.then( data => ... )
.catch( err => ... );
.then( data => { ... })
.catch( err => { ... });
```
### Async/await
```javascript
const semiTransparentRedPng = await sharp({
create: {
width: 48,
height: 48,
channels: 4,
background: { r: 255, g: 0, b: 0, alpha: 0.5 }
}
})
.png()
.toBuffer();
```
### Stream
```javascript
const roundedCorners = Buffer.from(
'<svg><rect x="0" y="0" width="200" height="200" rx="50" ry="50"/></svg>'
@@ -55,7 +73,10 @@ const roundedCorners = Buffer.from(
const roundedCornerResizer =
sharp()
.resize(200, 200)
.overlayWith(roundedCorners, { cutout: true })
.composite([{
input: roundedCorners,
blend: 'dest-in'
}])
.png();
readableStream
@@ -63,29 +84,29 @@ readableStream
.pipe(writableStream);
```
[![Test Coverage](https://coveralls.io/repos/lovell/sharp/badge.png?branch=master)](https://coveralls.io/r/lovell/sharp?branch=master)
[![Test Coverage](https://coveralls.io/repos/lovell/sharp/badge.svg?branch=master)](https://coveralls.io/r/lovell/sharp?branch=master)
### Documentation
Visit [sharp.pixelplumbing.com](http://sharp.pixelplumbing.com/) for complete
[installation instructions](http://sharp.pixelplumbing.com/page/install),
[API documentation](http://sharp.pixelplumbing.com/page/api),
[benchmark tests](http://sharp.pixelplumbing.com/page/performance) and
[changelog](http://sharp.pixelplumbing.com/page/changelog).
Visit [sharp.pixelplumbing.com](https://sharp.pixelplumbing.com/) for complete
[installation instructions](https://sharp.pixelplumbing.com/page/install),
[API documentation](https://sharp.pixelplumbing.com/page/api),
[benchmark tests](https://sharp.pixelplumbing.com/page/performance) and
[changelog](https://sharp.pixelplumbing.com/page/changelog).
### Contributing
A [guide for contributors](https://github.com/lovell/sharp/blob/master/CONTRIBUTING.md)
A [guide for contributors](https://github.com/lovell/sharp/blob/master/.github/CONTRIBUTING.md)
covers reporting bugs, requesting features and submitting code changes.
### Licensing
Copyright 2013, 2014, 2015, 2016, 2017, 2018 Lovell Fuller and contributors.
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
[http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html)
[https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,

View File

@@ -8,10 +8,10 @@ environment:
- nodejs_version: "8"
- nodejs_version: "10"
- nodejs_version: "11"
PREBUILD_TOKEN: ""
- nodejs_version: "12"
install:
- ps: Install-Product node $env:nodejs_version x64
- npm install -g npm@5
- ps: Update-NodeJsInstallation (Get-NodeJsLatestBuild $env:nodejs_version) x64
- npm install -g npm@6
- npm install
test_script:
- npm test

View File

@@ -140,7 +140,6 @@
'../vendor/lib/libgsf-1.so',
'../vendor/lib/libgthread-2.0.so',
'../vendor/lib/libharfbuzz.so',
'../vendor/lib/libharfbuzz-subset.so.0',
'../vendor/lib/libjpeg.so',
'../vendor/lib/liblcms2.so',
'../vendor/lib/liborc-0.4.so',

View File

@@ -16,6 +16,22 @@ sharp('rgba.png')
Returns **Sharp**
## ensureAlpha
Ensure alpha channel, if missing. The added alpha channel will be fully opaque. This is a no-op if the image already has an alpha channel.
### Examples
```javascript
sharp('rgb.jpg')
.ensureAlpha()
.toFile('rgba.png', function(err, info) {
// rgba.png is a 4 channel image with a fully opaque alpha channel
});
```
Returns **Sharp**
## extractChannel
Extract a single channel from a multi-channel image.

View File

@@ -1,33 +1,41 @@
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
## overlayWith
## composite
Overlay (composite) an image over the processed (resized, extracted etc.) image.
Composite image(s) over the processed (resized, extracted etc.) image.
The overlay image must be the same size or smaller than the processed image.
The images to composite must be the same size or smaller than the processed image.
If both `top` and `left` options are provided, they take precedence over `gravity`.
If the overlay image contains an alpha channel then composition with premultiplication will occur.
The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
`dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
`xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
`colour-dodge`, `color-dodge`, `colour-burn`,`color-burn`,
`hard-light`, `soft-light`, `difference`, `exclusion`.
More information about blend modes can be found at
[https://libvips.github.io/libvips/API/current/libvips-conversion.html#VipsBlendMode][1]
and [https://www.cairographics.org/operators/][2]
### Parameters
- `overlay` **([Buffer][1] \| [String][2])** Buffer containing image data or String containing the path to an image file.
- `options` **[Object][3]?**
- `options.gravity` **[String][2]** gravity at which to place the overlay. (optional, default `'centre'`)
- `options.top` **[Number][4]?** the pixel offset from the top edge.
- `options.left` **[Number][4]?** the pixel offset from the left edge.
- `options.tile` **[Boolean][5]** set to true to repeat the overlay image across the entire image with the given `gravity`. (optional, default `false`)
- `options.cutout` **[Boolean][5]** set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another. (optional, default `false`)
- `options.density` **[Number][4]** number representing the DPI for vector overlay image. (optional, default `72`)
- `options.raw` **[Object][3]?** describes overlay when using raw pixel data.
- `options.raw.width` **[Number][4]?**
- `options.raw.height` **[Number][4]?**
- `options.raw.channels` **[Number][4]?**
- `options.create` **[Object][3]?** describes a blank overlay to be created.
- `options.create.width` **[Number][4]?**
- `options.create.height` **[Number][4]?**
- `options.create.channels` **[Number][4]?** 3-4
- `options.create.background` **([String][2] \| [Object][3])?** parsed by the [color][6] module to extract values for red, green, blue and alpha.
- `images` **[Array][3]&lt;[Object][4]>** Ordered list of images to composite
- `images[].input` **([Buffer][5] \| [String][6])?** Buffer containing image data, String containing the path to an image file, or Create object (see bellow)
- `images[].input.create` **[Object][4]?** describes a blank overlay to be created.
- `images[].input.create.width` **[Number][7]?**
- `images[].input.create.height` **[Number][7]?**
- `images[].input.create.channels` **[Number][7]?** 3-4
- `images[].input.create.background` **([String][6] \| [Object][4])?** parsed by the [color][8] module to extract values for red, green, blue and alpha.
- `images[].blend` **[String][6]** how to blend this image with the image below. (optional, default `'over'`)
- `images[].gravity` **[String][6]** gravity at which to place the overlay. (optional, default `'centre'`)
- `images[].top` **[Number][7]?** the pixel offset from the top edge.
- `images[].left` **[Number][7]?** the pixel offset from the left edge.
- `images[].tile` **[Boolean][9]** set to true to repeat the overlay image across the entire image with the given `gravity`. (optional, default `false`)
- `images[].density` **[Number][7]** number representing the DPI for vector overlay image. (optional, default `72`)
- `images[].raw` **[Object][4]?** describes overlay when using raw pixel data.
- `images[].raw.width` **[Number][7]?**
- `images[].raw.height` **[Number][7]?**
- `images[].raw.channels` **[Number][7]?**
### Examples
@@ -35,9 +43,8 @@ If the overlay image contains an alpha channel then composition with premultipli
sharp('input.png')
.rotate(180)
.resize(300)
.flatten()
.background('#ff6600')
.overlayWith('overlay.png', { gravity: sharp.gravity.southeast } )
.flatten( { background: '#ff6600' } )
.composite([{ input: 'overlay.png', gravity: 'southeast' }])
.sharpen()
.withMetadata()
.webp( { quality: 90 } )
@@ -49,20 +56,26 @@ sharp('input.png')
});
```
- Throws **[Error][7]** Invalid parameters
- Throws **[Error][10]** Invalid parameters
Returns **Sharp**
[1]: https://nodejs.org/api/buffer.html
[1]: https://libvips.github.io/libvips/API/current/libvips-conversion.html#VipsBlendMode
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[2]: https://www.cairographics.org/operators/
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[5]: https://nodejs.org/api/buffer.html
[6]: https://www.npmjs.org/package/color
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[8]: https://www.npmjs.org/package/color
[9]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error

View File

@@ -9,11 +9,11 @@
a String containing the path to an JPEG, PNG, WebP, GIF, SVG or TIFF image file.
JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
- `options` **[Object][3]?** if present, is an Object with optional attributes.
- `options.failOnError` **[Boolean][4]** by default apply a "best effort"
to decode images, even if the data is corrupt or invalid. Set this flag to true
if you'd rather halt processing and raise an error when loading invalid images. (optional, default `false`)
- `options.failOnError` **[Boolean][4]** by default halt processing and raise an error when loading invalid images.
Set this flag to `false` if you'd rather apply a "best effort" to decode images, even if the data is corrupt or invalid. (optional, default `true`)
- `options.density` **[Number][5]** number representing the DPI for vector images. (optional, default `72`)
- `options.page` **[Number][5]** page number to extract for multi-page input (GIF, TIFF) (optional, default `0`)
- `options.pages` **[Number][5]** number of pages to extract for multi-page input (GIF, TIFF, PDF), use -1 for all pages. (optional, default `1`)
- `options.page` **[Number][5]** page number to start extracting from for multi-page input (GIF, TIFF, PDF), zero based. (optional, default `0`)
- `options.raw` **[Object][3]?** describes raw pixel input image data. See `raw()` for pixel ordering.
- `options.raw.width` **[Number][5]?**
- `options.raw.height` **[Number][5]?**

View File

@@ -22,7 +22,7 @@ Returns **Sharp**
## metadata
Fast access to (uncached) image metadata without decoding any compressed image data.
A Promises/A+ promise is returned when `callback` is not provided.
A `Promise` is returned when `callback` is not provided.
- `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
- `size`: Total size of image in bytes, for Stream and Buffer input only
@@ -34,6 +34,8 @@ A Promises/A+ promise is returned when `callback` is not provided.
- `density`: Number of pixels per inch (DPI), if present
- `chromaSubsampling`: String containing JPEG chroma subsampling, `4:2:0` or `4:4:4` for RGB, `4:2:0:4` or `4:4:4:4` for CMYK
- `isProgressive`: Boolean indicating whether the image is interlaced using a progressive scan
- `pages`: Number of pages/frames contained within the image, with support for TIFF, PDF, animated GIF and animated WebP
- `pageHeight`: Number of pixels high each page in this PDF image will be.
- `hasProfile`: Boolean indicating the presence of an embedded ICC profile
- `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
- `orientation`: Number value of the EXIF Orientation header, if present
@@ -68,7 +70,7 @@ Returns **([Promise][5]&lt;[Object][6]> | Sharp)**
## stats
Access to pixel-derived image statistics for every channel in the image.
A Promise is returned when `callback` is not provided.
A `Promise` is returned when `callback` is not provided.
- `channels`: Array of channel statistics for each channel in the image. Each channel statistic contains
- `min` (minimum value in the channel)
@@ -103,9 +105,9 @@ Returns **[Promise][5]&lt;[Object][6]>**
## limitInputPixels
Do not process input images where the number of pixels (width _ height) exceeds this limit.
Do not process input images where the number of pixels (width x height) exceeds this limit.
Assumes image dimensions contained in the input metadata can be trusted.
The default limit is 268402689 (0x3FFF _ 0x3FFF) pixels.
The default limit is 268402689 (0x3FFF x 0x3FFF) pixels.
### Parameters

View File

@@ -287,6 +287,41 @@ sharp(input)
Returns **Sharp**
## modulate
Transforms the image using brightness, saturation and hue rotation.
### Parameters
- `options` **[Object][2]?**
- `options.brightness` **[Number][1]?** Brightness multiplier
- `options.saturation` **[Number][1]?** Saturation multiplier
- `options.hue` **[Number][1]?** Degrees for hue rotation
### Examples
```javascript
sharp(input)
.modulate({
brightness: 2 // increase lightness by a factor of 2
});
sharp(input)
.modulate({
hue: 180 // hue-rotate by 180 degrees
});
// decreate brightness and saturation while also hue-rotating by 90 degrees
sharp(input)
.modulate({
brightness: 0.5,
saturation: 0.5,
hue: 90
});
```
Returns **Sharp**
[1]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object

View File

@@ -115,13 +115,13 @@ Use these JPEG options for output image.
- `options.quality` **[Number][8]** quality, integer 1-100 (optional, default `80`)
- `options.progressive` **[Boolean][6]** use progressive (interlace) scan (optional, default `false`)
- `options.chromaSubsampling` **[String][1]** set to '4:4:4' to prevent chroma subsampling when quality &lt;= 90 (optional, default `'4:2:0'`)
- `options.trellisQuantisation` **[Boolean][6]** apply trellis quantisation, requires mozjpeg (optional, default `false`)
- `options.overshootDeringing` **[Boolean][6]** apply overshoot deringing, requires mozjpeg (optional, default `false`)
- `options.optimiseScans` **[Boolean][6]** optimise progressive scans, forces progressive, requires mozjpeg (optional, default `false`)
- `options.trellisQuantisation` **[Boolean][6]** apply trellis quantisation, requires libvips compiled with support for mozjpeg (optional, default `false`)
- `options.overshootDeringing` **[Boolean][6]** apply overshoot deringing, requires libvips compiled with support for mozjpeg (optional, default `false`)
- `options.optimiseScans` **[Boolean][6]** optimise progressive scans, forces progressive, requires libvips compiled with support for mozjpeg (optional, default `false`)
- `options.optimizeScans` **[Boolean][6]** alternative spelling of optimiseScans (optional, default `false`)
- `options.optimiseCoding` **[Boolean][6]** optimise Huffman coding tables (optional, default `true`)
- `options.optimizeCoding` **[Boolean][6]** alternative spelling of optimiseCoding (optional, default `true`)
- `options.quantisationTable` **[Number][8]** quantization table to use, integer 0-8, requires mozjpeg (optional, default `0`)
- `options.quantisationTable` **[Number][8]** quantization table to use, integer 0-8, requires libvips compiled with support for mozjpeg (optional, default `0`)
- `options.quantizationTable` **[Number][8]** alternative spelling of quantisationTable (optional, default `0`)
- `options.force` **[Boolean][6]** force JPEG output, otherwise attempt to use input format (optional, default `true`)
@@ -154,6 +154,11 @@ Indexed PNG input at 1, 2 or 4 bits per pixel is converted to 8 bits per pixel.
- `options.progressive` **[Boolean][6]** use progressive (interlace) scan (optional, default `false`)
- `options.compressionLevel` **[Number][8]** zlib compression level, 0-9 (optional, default `9`)
- `options.adaptiveFiltering` **[Boolean][6]** use adaptive row filtering (optional, default `false`)
- `options.palette` **[Boolean][6]** quantise to a palette-based image with alpha transparency support, requires libvips compiled with support for libimagequant (optional, default `false`)
- `options.quality` **[Number][8]** use the lowest number of colours needed to achieve given quality, requires libvips compiled with support for libimagequant (optional, default `100`)
- `options.colours` **[Number][8]** maximum number of palette entries, requires libvips compiled with support for libimagequant (optional, default `256`)
- `options.colors` **[Number][8]** alternative spelling of `options.colours`, requires libvips compiled with support for libimagequant (optional, default `256`)
- `options.dither` **[Number][8]** level of Floyd-Steinberg error diffusion, requires libvips compiled with support for libimagequant (optional, default `1.0`)
- `options.force` **[Boolean][6]** force PNG output, otherwise attempt to use input format (optional, default `true`)
### Examples

View File

@@ -143,7 +143,7 @@ sharp(input)
top: 10,
bottom: 20,
left: 10,
right: 10
right: 10,
background: { r: 0, g: 0, b: 0, alpha: 0 }
})
...
@@ -163,11 +163,11 @@ Extract a region of the image.
### Parameters
- `options` **[Object][9]**
- `options` **[Object][9]** describes the region to extract using integral pixel values
- `options.left` **[Number][8]** zero-indexed offset from left edge
- `options.top` **[Number][8]** zero-indexed offset from top edge
- `options.width` **[Number][8]** dimension of extracted image
- `options.height` **[Number][8]** dimension of extracted image
- `options.width` **[Number][8]** width of region to extract
- `options.height` **[Number][8]** height of region to extract
### Examples

View File

@@ -1,9 +1,76 @@
# Changelog
### v0.22 - "*uptake*"
Requires libvips v8.7.4.
#### v0.22.1 - 25<sup>th</sup> April 2019
* Add `modulate` operation for brightness, saturation and hue.
[#1601](https://github.com/lovell/sharp/pull/1601)
[@Goues](https://github.com/Goues)
* Improve help messaging should `require("sharp")` fail.
[#1638](https://github.com/lovell/sharp/pull/1638)
[@sidharthachatterjee](https://github.com/sidharthachatterjee)
* Add support for Node 12.
[#1668](https://github.com/lovell/sharp/issues/1668)
#### v0.22.0 - 18<sup>th</sup> March 2019
* Remove functions previously deprecated in v0.21.0:
`background`, `crop`, `embed`, `ignoreAspectRatio`, `max`, `min` and `withoutEnlargement`.
* Add `composite` operation supporting multiple images and blend modes; deprecate `overlayWith`.
[#728](https://github.com/lovell/sharp/issues/728)
* Add support for `pages` input option for multi-page input.
[#1566](https://github.com/lovell/sharp/issues/1566)
* Allow Stream-based input of raw pixel data.
[#1579](https://github.com/lovell/sharp/issues/1579)
* Add support for `page` input option to GIF and PDF.
[#1595](https://github.com/lovell/sharp/pull/1595)
[@ramiel](https://github.com/ramiel)
### v0.21 - "*teeth*"
Requires libvips v8.7.0.
#### v0.21.3 - 19<sup>th</sup> January 2019
* Input image decoding now fails fast, set `failOnError` to change this behaviour.
* Failed filesystem-based input now separates missing file and invalid format errors.
[#1542](https://github.com/lovell/sharp/issues/1542)
#### v0.21.2 - 13<sup>th</sup> January 2019
* Ensure all metadata is removed from PNG output unless `withMetadata` used.
* Ensure shortest edge is at least one pixel after resizing.
[#1003](https://github.com/lovell/sharp/issues/1003)
* Add `ensureAlpha` operation to add an alpha channel, if missing.
[#1153](https://github.com/lovell/sharp/issues/1153)
* Expose `pages` and `pageHeight` metadata for multi-page input images.
[#1205](https://github.com/lovell/sharp/issues/1205)
* Expose PNG output options requiring libimagequant.
[#1484](https://github.com/lovell/sharp/issues/1484)
* Expose underlying error message for invalid input.
[#1505](https://github.com/lovell/sharp/issues/1505)
* Prevent mutatation of options passed to `jpeg`.
[#1516](https://github.com/lovell/sharp/issues/1516)
* Ensure forced output format applied correctly when output chaining.
[#1528](https://github.com/lovell/sharp/issues/1528)
#### v0.21.1 - 7<sup>th</sup> December 2018
* Install: support `sharp_dist_base_url` npm config, like existing `SHARP_DIST_BASE_URL`.

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="86 86 550 550">
<!-- Copyright 2019 Lovell Fuller. This work is licensed under the Creative Commons Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) License. -->
<path fill="none" stroke="#9c0" stroke-width="80" d="M258.411 285.777l200.176-26.8M244.113 466.413L451.44 438.66M451.441 438.66V238.484M451.441 88.363v171.572l178.725-23.917M270.323 255.602V477.22M272.71 634.17V462.591L93.984 486.515"/>
<path fill="none" stroke="#090" stroke-width="80" d="M451.441 610.246V438.66l178.725-23.91M269.688 112.59v171.58L90.964 308.093"/>
</svg>

After

Width:  |  Height:  |  Size: 592 B

View File

@@ -1,5 +1,7 @@
# sharp
<img src="image/sharp-logo.svg" width="160" height="160" alt="sharp logo" align="right">
The typical use case for this high speed Node.js module
is to convert large images in common formats to
smaller, web-friendly JPEG, PNG and WebP images of varying dimensions.
@@ -14,7 +16,7 @@ As well as image resizing, operations such as
rotation, extraction, compositing and gamma correction are available.
Most modern 64-bit OS X, Windows and Linux systems running
Node versions 6, 8 and 10
Node versions 6, 8, 10, 11 and 12
do not require any additional install or runtime dependencies.
[![Test Coverage](https://coveralls.io/repos/lovell/sharp/badge.png?branch=master)](https://coveralls.io/r/lovell/sharp?branch=master)
@@ -62,7 +64,7 @@ as [pngcrush](https://pmt.sourceforge.io/pngcrush/).
### Contributing
A [guide for contributors](https://github.com/lovell/sharp/blob/master/CONTRIBUTING.md)
A [guide for contributors](https://github.com/lovell/sharp/blob/master/.github/CONTRIBUTING.md)
covers reporting bugs, requesting features and submitting code changes.
### Credits
@@ -122,17 +124,18 @@ the help and code contributions of the following people:
* [Julian Aubourg](https://github.com/jaubourg)
* [Keith Belovay](https://github.com/fromkeith)
* [Michael B. Klein](https://github.com/mbklein)
* [Jakub Michálek](https://github.com/Goues)
Thank you!
### Licensing
Copyright 2013, 2014, 2015, 2016, 2017, 2018 Lovell Fuller and contributors.
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
[http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html)
[https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,

View File

@@ -10,12 +10,12 @@ yarn add sharp
## Prerequisites
* Node v4.5.0+
* Node.js v6+
### Building from source
Pre-compiled binaries for sharp are provided for use with
Node versions 6, 8 and 10 on
Node versions 6, 8, 10, 11 and 12 on
64-bit Windows, OS X and Linux platforms.
Sharp will be built from source at install time when:
@@ -36,14 +36,14 @@ Building from source requires:
[![Ubuntu 16.04 Build Status](https://travis-ci.org/lovell/sharp.png?branch=master)](https://travis-ci.org/lovell/sharp)
libvips and its dependencies are fetched and stored within `node_modules/sharp/vendor` during `npm install`.
This involves an automated HTTPS download of approximately 8MB.
This involves an automated HTTPS download of approximately 9MB.
Most Linux-based (glibc, musl) operating systems running on x64 and ARMv6+ CPUs should "just work", e.g.:
* Debian 7+
* Ubuntu 14.04+
* Centos 7+
* Alpine 3.8+ (Node 8 and 10)
* Alpine 3.8+ (Node 8+)
* Fedora
* openSUSE 13.2+
* Archlinux
@@ -86,7 +86,7 @@ via `sharp.cache(false)` to avoid a stack overflow.
[![OS X 10.12 Build Status](https://travis-ci.org/lovell/sharp.png?branch=master)](https://travis-ci.org/lovell/sharp)
libvips and its dependencies are fetched and stored within `node_modules/sharp/vendor` during `npm install`.
This involves an automated HTTPS download of approximately 7MB.
This involves an automated HTTPS download of approximately 8MB.
To use your own version of libvips instead of the provided binaries, make sure it is
at least the version listed under `config.libvips` in the `package.json` file and
@@ -97,8 +97,9 @@ that it can be located using `pkg-config --modversion vips-cpp`.
[![Windows x64 Build Status](https://ci.appveyor.com/api/projects/status/pgtul704nkhhg6sg)](https://ci.appveyor.com/project/lovell/sharp)
libvips and its dependencies are fetched and stored within `node_modules\sharp\vendor` during `npm install`.
This involves an automated HTTPS download of approximately 13MB. If you are having issues during
installation consider removing the directory ```C:\Users\[user]\AppData\Roaming\npm-cache\_libvips```.
This involves an automated HTTPS download of approximately 14MB.
If you are having issues during installation consider removing the directory
`C:\Users\[user]\AppData\Roaming\npm-cache\_libvips`.
Only 64-bit (x64) `node.exe` is supported.

View File

@@ -20,7 +20,8 @@ const distBaseUrl = process.env.npm_config_sharp_dist_base_url || process.env.SH
const fail = function (err) {
npmLog.error('sharp', err.message);
npmLog.error('sharp', 'Please see http://sharp.pixelplumbing.com/page/install');
npmLog.info('sharp', 'Attempting to build from source via node-gyp but this may fail due to the above error');
npmLog.info('sharp', 'Please see https://sharp.pixelplumbing.com/page/install for required dependencies');
process.exit(1);
};
@@ -33,7 +34,12 @@ const extractTarball = function (tarPath) {
cwd: vendorPath,
strict: true
})
.catch(fail);
.catch(function (err) {
if (/unexpected end of file/.test(err.message)) {
npmLog.error('sharp', `Please delete ${tarPath} as it is not a valid tarball`);
}
fail(err);
});
};
try {
@@ -79,7 +85,9 @@ try {
if (response.statusCode !== 200) {
throw new Error(`Status ${response.statusCode}`);
}
response.pipe(tmpFile);
response
.on('error', fail)
.pipe(tmpFile);
});
tmpFile
.on('error', fail)

View File

@@ -29,6 +29,23 @@ function removeAlpha () {
return this;
}
/**
* Ensure alpha channel, if missing. The added alpha channel will be fully opaque. This is a no-op if the image already has an alpha channel.
*
* @example
* sharp('rgb.jpg')
* .ensureAlpha()
* .toFile('rgba.png', function(err, info) {
* // rgba.png is a 4 channel image with a fully opaque alpha channel
* });
*
* @returns {Sharp}
*/
function ensureAlpha () {
this.options.ensureAlpha = true;
return this;
}
/**
* Extract a single channel from a multi-channel image.
*
@@ -120,6 +137,7 @@ module.exports = function (Sharp) {
Object.assign(Sharp.prototype, {
// Public instance functions
removeAlpha,
ensureAlpha,
extractChannel,
joinChannel,
bandbool

View File

@@ -1,7 +1,5 @@
'use strict';
const deprecate = require('util').deprecate;
const color = require('color');
const is = require('./is');
@@ -17,24 +15,6 @@ const colourspace = {
srgb: 'srgb'
};
/**
* @deprecated
* @private
*/
function background (rgba) {
const colour = color(rgba);
const background = [
colour.red(),
colour.green(),
colour.blue(),
Math.round(colour.alpha() * 255)
];
this.options.resizeBackground = background;
this.options.extendBackground = background;
this.options.flattenBackground = background.slice(0, 3);
return this;
}
/**
* Tint the image using the provided chroma while preserving the image luminance.
* An alpha channel may be present and will be unchanged by the operation.
@@ -136,6 +116,4 @@ module.exports = function (Sharp) {
// Class attributes
Sharp.colourspace = colourspace;
Sharp.colorspace = colourspace;
// Deprecated
Sharp.prototype.background = deprecate(background, 'background(background) is deprecated, use resize({ background }), extend({ background }) or flatten({ background }) instead');
};

View File

@@ -1,22 +1,66 @@
'use strict';
const deprecate = require('util').deprecate;
const is = require('./is');
/**
* Overlay (composite) an image over the processed (resized, extracted etc.) image.
* Blend modes.
* @member
* @private
*/
const blend = {
clear: 'clear',
source: 'source',
over: 'over',
in: 'in',
out: 'out',
atop: 'atop',
dest: 'dest',
'dest-over': 'dest-over',
'dest-in': 'dest-in',
'dest-out': 'dest-out',
'dest-atop': 'dest-atop',
xor: 'xor',
add: 'add',
saturate: 'saturate',
multiply: 'multiply',
screen: 'screen',
overlay: 'overlay',
darken: 'darken',
lighten: 'lighten',
'colour-dodge': 'colour-dodge',
'color-dodge': 'colour-dodge',
'colour-burn': 'colour-burn',
'color-burn': 'colour-burn',
'hard-light': 'hard-light',
'soft-light': 'soft-light',
difference: 'difference',
exclusion: 'exclusion'
};
/**
* Composite image(s) over the processed (resized, extracted etc.) image.
*
* The overlay image must be the same size or smaller than the processed image.
* The images to composite must be the same size or smaller than the processed image.
* If both `top` and `left` options are provided, they take precedence over `gravity`.
*
* If the overlay image contains an alpha channel then composition with premultiplication will occur.
* The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
* `dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
* `xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
* `colour-dodge`, `color-dodge`, `colour-burn`,`color-burn`,
* `hard-light`, `soft-light`, `difference`, `exclusion`.
*
* More information about blend modes can be found at
* https://libvips.github.io/libvips/API/current/libvips-conversion.html#VipsBlendMode
* and https://www.cairographics.org/operators/
*
* @example
* sharp('input.png')
* .rotate(180)
* .resize(300)
* .flatten()
* .background('#ff6600')
* .overlayWith('overlay.png', { gravity: sharp.gravity.southeast } )
* .flatten( { background: '#ff6600' } )
* .composite([{ input: 'overlay.png', gravity: 'southeast' }])
* .sharpen()
* .withMetadata()
* .webp( { quality: 90 } )
@@ -27,70 +71,104 @@ const is = require('./is');
* // sharpened, with metadata, 90% quality WebP image data. Phew!
* });
*
* @param {(Buffer|String)} overlay - Buffer containing image data or String containing the path to an image file.
* @param {Object} [options]
* @param {String} [options.gravity='centre'] - gravity at which to place the overlay.
* @param {Number} [options.top] - the pixel offset from the top edge.
* @param {Number} [options.left] - the pixel offset from the left edge.
* @param {Boolean} [options.tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`.
* @param {Boolean} [options.cutout=false] - set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another.
* @param {Number} [options.density=72] - number representing the DPI for vector overlay image.
* @param {Object} [options.raw] - describes overlay when using raw pixel data.
* @param {Number} [options.raw.width]
* @param {Number} [options.raw.height]
* @param {Number} [options.raw.channels]
* @param {Object} [options.create] - describes a blank overlay to be created.
* @param {Number} [options.create.width]
* @param {Number} [options.create.height]
* @param {Number} [options.create.channels] - 3-4
* @param {String|Object} [options.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @param {Object[]} images - Ordered list of images to composite
* @param {Buffer|String} [images[].input] - Buffer containing image data, String containing the path to an image file, or Create object (see bellow)
* @param {Object} [images[].input.create] - describes a blank overlay to be created.
* @param {Number} [images[].input.create.width]
* @param {Number} [images[].input.create.height]
* @param {Number} [images[].input.create.channels] - 3-4
* @param {String|Object} [images[].input.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @param {String} [images[].blend='over'] - how to blend this image with the image below.
* @param {String} [images[].gravity='centre'] - gravity at which to place the overlay.
* @param {Number} [images[].top] - the pixel offset from the top edge.
* @param {Number} [images[].left] - the pixel offset from the left edge.
* @param {Boolean} [images[].tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`.
* @param {Number} [images[].density=72] - number representing the DPI for vector overlay image.
* @param {Object} [images[].raw] - describes overlay when using raw pixel data.
* @param {Number} [images[].raw.width]
* @param {Number} [images[].raw.height]
* @param {Number} [images[].raw.channels]
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function overlayWith (overlay, options) {
this.options.overlay = this._createInputDescriptor(overlay, options, {
allowStream: false
});
if (is.object(options)) {
if (is.defined(options.tile)) {
if (is.bool(options.tile)) {
this.options.overlayTile = options.tile;
} else {
throw new Error('Invalid overlay tile ' + options.tile);
}
}
if (is.defined(options.cutout)) {
if (is.bool(options.cutout)) {
this.options.overlayCutout = options.cutout;
} else {
throw new Error('Invalid overlay cutout ' + options.cutout);
}
}
if (is.defined(options.left) || is.defined(options.top)) {
if (is.integer(options.left) && options.left >= 0 && is.integer(options.top) && options.top >= 0) {
this.options.overlayXOffset = options.left;
this.options.overlayYOffset = options.top;
} else {
throw new Error('Invalid overlay left ' + options.left + ' and/or top ' + options.top);
}
}
if (is.defined(options.gravity)) {
if (is.integer(options.gravity) && is.inRange(options.gravity, 0, 8)) {
this.options.overlayGravity = options.gravity;
} else if (is.string(options.gravity) && is.integer(this.constructor.gravity[options.gravity])) {
this.options.overlayGravity = this.constructor.gravity[options.gravity];
} else {
throw new Error('Unsupported overlay gravity ' + options.gravity);
}
}
function composite (images) {
if (!Array.isArray(images)) {
throw is.invalidParameterError('images to composite', 'array', images);
}
this.options.composite = images.map(image => {
if (!is.object(image)) {
throw is.invalidParameterError('image to composite', 'object', image);
}
const { raw, density } = image;
const inputOptions = (raw || density) ? { raw, density } : undefined;
const composite = {
input: this._createInputDescriptor(image.input, inputOptions, { allowStream: false }),
blend: 'over',
tile: false,
left: -1,
top: -1,
gravity: 0
};
if (is.defined(image.blend)) {
if (is.string(blend[image.blend])) {
composite.blend = blend[image.blend];
} else {
throw is.invalidParameterError('blend', 'valid blend name', image.blend);
}
}
if (is.defined(image.tile)) {
if (is.bool(image.tile)) {
composite.tile = image.tile;
} else {
throw is.invalidParameterError('tile', 'boolean', image.tile);
}
}
if (is.defined(image.left)) {
if (is.integer(image.left) && image.left >= 0) {
composite.left = image.left;
} else {
throw is.invalidParameterError('left', 'positive integer', image.left);
}
}
if (is.defined(image.top)) {
if (is.integer(image.top) && image.top >= 0) {
composite.top = image.top;
} else {
throw is.invalidParameterError('top', 'positive integer', image.top);
}
}
if (composite.left !== composite.top && Math.min(composite.left, composite.top) === -1) {
throw new Error('Expected both left and top to be set');
}
if (is.defined(image.gravity)) {
if (is.integer(image.gravity) && is.inRange(image.gravity, 0, 8)) {
composite.gravity = image.gravity;
} else if (is.string(image.gravity) && is.integer(this.constructor.gravity[image.gravity])) {
composite.gravity = this.constructor.gravity[image.gravity];
} else {
throw is.invalidParameterError('gravity', 'valid gravity', image.gravity);
}
}
return composite;
});
return this;
}
/**
* @deprecated
* @private
*/
function overlayWith (input, options) {
const blend = (is.object(options) && options.cutout) ? 'dest-in' : 'over';
return this.composite([Object.assign({ input, blend }, options)]);
}
/**
* Decorate the Sharp prototype with composite-related functions.
* @private
*/
module.exports = function (Sharp) {
Sharp.prototype.overlayWith = overlayWith;
Sharp.prototype.composite = composite;
Sharp.prototype.overlayWith = deprecate(overlayWith, 'overlayWith(input, options) is deprecated, use composite([{ input, ...options }]) instead');
Sharp.blend = blend;
};

View File

@@ -7,7 +7,27 @@ const events = require('events');
const is = require('./is');
require('./libvips').hasVendoredLibvips();
const sharp = require('bindings')('sharp.node');
let sharp;
try {
sharp = require('../build/Release/sharp.node');
} catch (err) {
// Bail early if bindings aren't available
const help = ['', 'Something went wrong installing the "sharp" module', '', err.message, ''];
if (/NODE_MODULE_VERSION/.test(err.message)) {
help.push('- Ensure the version of Node.js used at install time matches that used at runtime');
} else if (/invalid ELF header/.test(err.message)) {
help.push(`- Ensure "${process.platform}" is used at install time as well as runtime`);
} else {
help.push('- Remove the "node_modules/sharp" directory, run "npm install" and look for errors');
}
help.push(
'- Consult the installation documentation at https://sharp.pixelplumbing.com/en/stable/install/',
'- Search for this error at https://github.com/lovell/sharp/issues', ''
);
console.error(help.join('\n'));
process.exit(1);
}
// Use NODE_DEBUG=sharp to enable libvips warnings
const debuglog = util.debuglog('sharp');
@@ -61,11 +81,11 @@ const debuglog = util.debuglog('sharp');
* a String containing the path to an JPEG, PNG, WebP, GIF, SVG or TIFF image file.
* JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
* @param {Object} [options] - if present, is an Object with optional attributes.
* @param {Boolean} [options.failOnError=false] - by default apply a "best effort"
* to decode images, even if the data is corrupt or invalid. Set this flag to true
* if you'd rather halt processing and raise an error when loading invalid images.
* @param {Boolean} [options.failOnError=true] - by default halt processing and raise an error when loading invalid images.
* Set this flag to `false` if you'd rather apply a "best effort" to decode images, even if the data is corrupt or invalid.
* @param {Number} [options.density=72] - number representing the DPI for vector images.
* @param {Number} [options.page=0] - page number to extract for multi-page input (GIF, TIFF)
* @param {Number} [options.pages=1] - number of pages to extract for multi-page input (GIF, TIFF, PDF), use -1 for all pages.
* @param {Number} [options.page=0] - page number to start extracting from for multi-page input (GIF, TIFF, PDF), zero based.
* @param {Object} [options.raw] - describes raw pixel input image data. See `raw()` for pixel ordering.
* @param {Number} [options.raw.width]
* @param {Number} [options.raw.height]
@@ -139,18 +159,17 @@ const Sharp = function (input, options) {
gammaOut: 0,
greyscale: false,
normalise: 0,
brightness: 1,
saturation: 1,
hue: 0,
booleanBufferIn: null,
booleanFileIn: '',
joinChannelIn: [],
extractChannel: -1,
removeAlpha: false,
ensureAlpha: false,
colourspace: 'srgb',
// overlay
overlayGravity: 0,
overlayXOffset: -1,
overlayYOffset: -1,
overlayTile: false,
overlayCutout: false,
composite: [],
// output
fileOut: '',
formatOut: 'input',
@@ -170,6 +189,10 @@ const Sharp = function (input, options) {
pngProgressive: false,
pngCompressionLevel: 9,
pngAdaptiveFiltering: false,
pngPalette: false,
pngQuality: 100,
pngColours: 256,
pngDither: 1,
webpQuality: 80,
webpAlphaQuality: 100,
webpLossless: false,

View File

@@ -9,7 +9,7 @@ const sharp = require('../build/Release/sharp.node');
* @private
*/
function _createInputDescriptor (input, inputOptions, containerOptions) {
const inputDescriptor = { failOnError: false };
const inputDescriptor = { failOnError: true };
if (is.string(input)) {
// filesystem
inputDescriptor.file = input;
@@ -19,6 +19,10 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
} else if (is.plainObject(input) && !is.defined(inputOptions)) {
// Plain Object descriptor, e.g. create
inputOptions = input;
if (is.plainObject(inputOptions.raw)) {
// Raw Stream
inputDescriptor.buffer = [];
}
} else if (!is.defined(input) && is.object(containerOptions) && containerOptions.allowStream) {
// Stream
inputDescriptor.buffer = [];
@@ -57,7 +61,12 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
throw new Error('Expected width, height and channels for raw pixel input');
}
}
// Page input for multi-page TIFF
// Multi-page input (GIF, TIFF, PDF)
if (is.defined(inputOptions.pages)) {
if (is.integer(inputOptions.pages) && is.inRange(inputOptions.pages, -1, 100000)) {
inputDescriptor.pages = inputOptions.pages;
}
}
if (is.defined(inputOptions.page)) {
if (is.integer(inputOptions.page) && is.inRange(inputOptions.page, 0, 100000)) {
inputDescriptor.page = inputOptions.page;
@@ -174,7 +183,7 @@ function clone () {
/**
* Fast access to (uncached) image metadata without decoding any compressed image data.
* A Promises/A+ promise is returned when `callback` is not provided.
* A `Promise` is returned when `callback` is not provided.
*
* - `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
* - `size`: Total size of image in bytes, for Stream and Buffer input only
@@ -186,6 +195,8 @@ function clone () {
* - `density`: Number of pixels per inch (DPI), if present
* - `chromaSubsampling`: String containing JPEG chroma subsampling, `4:2:0` or `4:4:4` for RGB, `4:2:0:4` or `4:4:4:4` for CMYK
* - `isProgressive`: Boolean indicating whether the image is interlaced using a progressive scan
* - `pages`: Number of pages/frames contained within the image, with support for TIFF, PDF, animated GIF and animated WebP
* - `pageHeight`: Number of pixels high each page in this PDF image will be.
* - `hasProfile`: Boolean indicating the presence of an embedded ICC profile
* - `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
* - `orientation`: Number value of the EXIF Orientation header, if present
@@ -253,7 +264,7 @@ function metadata (callback) {
/**
* Access to pixel-derived image statistics for every channel in the image.
* A Promise is returned when `callback` is not provided.
* A `Promise` is returned when `callback` is not provided.
*
* - `channels`: Array of channel statistics for each channel in the image. Each channel statistic contains
* - `min` (minimum value in the channel)
@@ -321,9 +332,9 @@ function stats (callback) {
}
/**
* Do not process input images where the number of pixels (width * height) exceeds this limit.
* Do not process input images where the number of pixels (width x height) exceeds this limit.
* Assumes image dimensions contained in the input metadata can be trusted.
* The default limit is 268402689 (0x3FFF * 0x3FFF) pixels.
* The default limit is 268402689 (0x3FFF x 0x3FFF) pixels.
* @param {(Number|Boolean)} limit - an integral Number of pixels, zero or false to remove limit, true to use default limit.
* @returns {Sharp}
* @throws {Error} Invalid limit

View File

@@ -415,6 +415,62 @@ function recomb (inputMatrix) {
return this;
}
/**
* Transforms the image using brightness, saturation and hue rotation.
*
* @example
* sharp(input)
* .modulate({
* brightness: 2 // increase lightness by a factor of 2
* });
*
* sharp(input)
* .modulate({
* hue: 180 // hue-rotate by 180 degrees
* });
*
* // decreate brightness and saturation while also hue-rotating by 90 degrees
* sharp(input)
* .modulate({
* brightness: 0.5,
* saturation: 0.5,
* hue: 90
* });
*
* @param {Object} [options]
* @param {Number} [options.brightness] Brightness multiplier
* @param {Number} [options.saturation] Saturation multiplier
* @param {Number} [options.hue] Degrees for hue rotation
* @returns {Sharp}
*/
function modulate (options) {
if (!is.plainObject(options)) {
throw is.invalidParameterError('options', 'plain object', options);
}
if ('brightness' in options) {
if (is.number(options.brightness) && options.brightness >= 0) {
this.options.brightness = options.brightness;
} else {
throw is.invalidParameterError('brightness', 'number above zero', options.brightness);
}
}
if ('saturation' in options) {
if (is.number(options.saturation) && options.saturation >= 0) {
this.options.saturation = options.saturation;
} else {
throw is.invalidParameterError('saturation', 'number above zero', options.saturation);
}
}
if ('hue' in options) {
if (is.integer(options.hue)) {
this.options.hue = options.hue % 360;
} else {
throw is.invalidParameterError('hue', 'number', options.hue);
}
}
return this;
}
/**
* Decorate the Sharp prototype with operation-related functions.
* @private
@@ -436,6 +492,7 @@ module.exports = function (Sharp) {
threshold,
boolean,
linear,
recomb
recomb,
modulate
});
};

View File

@@ -32,7 +32,7 @@ const sharp = require('../build/Release/sharp.node');
*/
function toFile (fileOut, callback) {
if (!fileOut || fileOut.length === 0) {
const errOutputInvalid = new Error('Invalid output');
const errOutputInvalid = new Error('Missing output file path');
if (is.fn(callback)) {
callback(errOutputInvalid);
} else {
@@ -144,13 +144,13 @@ function withMetadata (withMetadata) {
* @param {Number} [options.quality=80] - quality, integer 1-100
* @param {Boolean} [options.progressive=false] - use progressive (interlace) scan
* @param {String} [options.chromaSubsampling='4:2:0'] - set to '4:4:4' to prevent chroma subsampling when quality <= 90
* @param {Boolean} [options.trellisQuantisation=false] - apply trellis quantisation, requires mozjpeg
* @param {Boolean} [options.overshootDeringing=false] - apply overshoot deringing, requires mozjpeg
* @param {Boolean} [options.optimiseScans=false] - optimise progressive scans, forces progressive, requires mozjpeg
* @param {Boolean} [options.trellisQuantisation=false] - apply trellis quantisation, requires libvips compiled with support for mozjpeg
* @param {Boolean} [options.overshootDeringing=false] - apply overshoot deringing, requires libvips compiled with support for mozjpeg
* @param {Boolean} [options.optimiseScans=false] - optimise progressive scans, forces progressive, requires libvips compiled with support for mozjpeg
* @param {Boolean} [options.optimizeScans=false] - alternative spelling of optimiseScans
* @param {Boolean} [options.optimiseCoding=true] - optimise Huffman coding tables
* @param {Boolean} [options.optimizeCoding=true] - alternative spelling of optimiseCoding
* @param {Number} [options.quantisationTable=0] - quantization table to use, integer 0-8, requires mozjpeg
* @param {Number} [options.quantisationTable=0] - quantization table to use, integer 0-8, requires libvips compiled with support for mozjpeg
* @param {Number} [options.quantizationTable=0] - alternative spelling of quantisationTable
* @param {Boolean} [options.force=true] - force JPEG output, otherwise attempt to use input format
* @returns {Sharp}
@@ -175,30 +175,30 @@ function jpeg (options) {
throw new Error('Invalid chromaSubsampling (4:2:0, 4:4:4) ' + options.chromaSubsampling);
}
}
options.trellisQuantisation = is.bool(options.trellisQuantization) ? options.trellisQuantization : options.trellisQuantisation;
if (is.defined(options.trellisQuantisation)) {
this._setBooleanOption('jpegTrellisQuantisation', options.trellisQuantisation);
const trellisQuantisation = is.bool(options.trellisQuantization) ? options.trellisQuantization : options.trellisQuantisation;
if (is.defined(trellisQuantisation)) {
this._setBooleanOption('jpegTrellisQuantisation', trellisQuantisation);
}
if (is.defined(options.overshootDeringing)) {
this._setBooleanOption('jpegOvershootDeringing', options.overshootDeringing);
}
options.optimiseScans = is.bool(options.optimizeScans) ? options.optimizeScans : options.optimiseScans;
if (is.defined(options.optimiseScans)) {
this._setBooleanOption('jpegOptimiseScans', options.optimiseScans);
if (options.optimiseScans) {
const optimiseScans = is.bool(options.optimizeScans) ? options.optimizeScans : options.optimiseScans;
if (is.defined(optimiseScans)) {
this._setBooleanOption('jpegOptimiseScans', optimiseScans);
if (optimiseScans) {
this.options.jpegProgressive = true;
}
}
options.optimiseCoding = is.bool(options.optimizeCoding) ? options.optimizeCoding : options.optimiseCoding;
if (is.defined(options.optimiseCoding)) {
this._setBooleanOption('jpegOptimiseCoding', options.optimiseCoding);
const optimiseCoding = is.bool(options.optimizeCoding) ? options.optimizeCoding : options.optimiseCoding;
if (is.defined(optimiseCoding)) {
this._setBooleanOption('jpegOptimiseCoding', optimiseCoding);
}
options.quantisationTable = is.number(options.quantizationTable) ? options.quantizationTable : options.quantisationTable;
if (is.defined(options.quantisationTable)) {
if (is.integer(options.quantisationTable) && is.inRange(options.quantisationTable, 0, 8)) {
this.options.jpegQuantisationTable = options.quantisationTable;
const quantisationTable = is.number(options.quantizationTable) ? options.quantizationTable : options.quantisationTable;
if (is.defined(quantisationTable)) {
if (is.integer(quantisationTable) && is.inRange(quantisationTable, 0, 8)) {
this.options.jpegQuantisationTable = quantisationTable;
} else {
throw new Error('Invalid quantisation table (integer, 0-8) ' + options.quantisationTable);
throw new Error('Invalid quantisation table (integer, 0-8) ' + quantisationTable);
}
}
}
@@ -221,6 +221,11 @@ function jpeg (options) {
* @param {Boolean} [options.progressive=false] - use progressive (interlace) scan
* @param {Number} [options.compressionLevel=9] - zlib compression level, 0-9
* @param {Boolean} [options.adaptiveFiltering=false] - use adaptive row filtering
* @param {Boolean} [options.palette=false] - quantise to a palette-based image with alpha transparency support, requires libvips compiled with support for libimagequant
* @param {Number} [options.quality=100] - use the lowest number of colours needed to achieve given quality, requires libvips compiled with support for libimagequant
* @param {Number} [options.colours=256] - maximum number of palette entries, requires libvips compiled with support for libimagequant
* @param {Number} [options.colors=256] - alternative spelling of `options.colours`, requires libvips compiled with support for libimagequant
* @param {Number} [options.dither=1.0] - level of Floyd-Steinberg error diffusion, requires libvips compiled with support for libimagequant
* @param {Boolean} [options.force=true] - force PNG output, otherwise attempt to use input format
* @returns {Sharp}
* @throws {Error} Invalid options
@@ -240,6 +245,33 @@ function png (options) {
if (is.defined(options.adaptiveFiltering)) {
this._setBooleanOption('pngAdaptiveFiltering', options.adaptiveFiltering);
}
if (is.defined(options.palette)) {
this._setBooleanOption('pngPalette', options.palette);
if (this.options.pngPalette) {
if (is.defined(options.quality)) {
if (is.integer(options.quality) && is.inRange(options.quality, 0, 100)) {
this.options.pngQuality = options.quality;
} else {
throw is.invalidParameterError('quality', 'integer between 0 and 100', options.quality);
}
}
const colours = options.colours || options.colors;
if (is.defined(colours)) {
if (is.integer(colours) && is.inRange(colours, 2, 256)) {
this.options.pngColours = colours;
} else {
throw is.invalidParameterError('colours', 'integer between 2 and 256', colours);
}
}
if (is.defined(options.dither)) {
if (is.number(options.dither) && is.inRange(options.dither, 0, 1)) {
this.options.pngDither = options.dither;
} else {
throw is.invalidParameterError('dither', 'number between 0.0 and 1.0', options.dither);
}
}
}
}
}
return this._updateFormatOut('png', options);
}
@@ -541,7 +573,9 @@ function tile (tile) {
* @returns {Sharp}
*/
function _updateFormatOut (formatOut, options) {
this.options.formatOut = (is.object(options) && options.force === false) ? 'input' : formatOut;
if (!(is.object(options) && options.force === false)) {
this.options.formatOut = formatOut;
}
return this;
}

View File

@@ -2,17 +2,22 @@
const detectLibc = require('detect-libc');
const env = process.env;
module.exports = function () {
const arch = process.env.npm_config_arch || process.arch;
const platform = process.env.npm_config_platform || process.platform;
const arch = env.npm_config_arch || process.arch;
const platform = env.npm_config_platform || process.platform;
const libc = (platform === 'linux' && detectLibc.isNonGlibcLinux) ? detectLibc.family : '';
const platformId = [`${platform}${libc}`];
if (arch === 'arm' || arch === 'armhf' || arch === 'arm64') {
const armVersion = (arch === 'arm64') ? '8' : process.env.npm_config_armv || process.config.variables.arm_version || '6';
platformId.push(`armv${armVersion}`);
if (arch === 'arm') {
platformId.push(`armv${env.npm_config_arm_version || process.config.variables.arm_version || '6'}`);
} else if (arch === 'arm64') {
platformId.push(`arm64v${env.npm_config_arm_version || '8'}`);
} else {
platformId.push(arch);
}
return platformId.join('-');
};

View File

@@ -1,6 +1,5 @@
'use strict';
const deprecate = require('util').deprecate;
const is = require('./is');
/**
@@ -275,7 +274,7 @@ function resize (width, height, options) {
* top: 10,
* bottom: 20,
* left: 10,
* right: 10
* right: 10,
* background: { r: 0, g: 0, b: 0, alpha: 0 }
* })
* ...
@@ -335,11 +334,11 @@ function extend (extend) {
* // Extract a region, resize, then extract from the resized image
* });
*
* @param {Object} options
* @param {Object} options - describes the region to extract using integral pixel values
* @param {Number} options.left - zero-indexed offset from left edge
* @param {Number} options.top - zero-indexed offset from top edge
* @param {Number} options.width - dimension of extracted image
* @param {Number} options.height - dimension of extracted image
* @param {Number} options.width - width of region to extract
* @param {Number} options.height - height of region to extract
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
@@ -378,92 +377,6 @@ function trim (threshold) {
return this;
}
// Deprecated functions
/**
* @deprecated
* @private
*/
function crop (crop) {
this.options.canvas = 'crop';
if (!is.defined(crop)) {
// Default
this.options.position = gravity.center;
} else if (is.integer(crop) && is.inRange(crop, 0, 8)) {
// Gravity (numeric)
this.options.position = crop;
} else if (is.string(crop) && is.integer(gravity[crop])) {
// Gravity (string)
this.options.position = gravity[crop];
} else if (is.integer(crop) && crop >= strategy.entropy) {
// Strategy
this.options.position = crop;
} else if (is.string(crop) && is.integer(strategy[crop])) {
// Strategy (string)
this.options.position = strategy[crop];
} else {
throw is.invalidParameterError('crop', 'valid crop id/name/strategy', crop);
}
return this;
}
/**
* @deprecated
* @private
*/
function embed (embed) {
this.options.canvas = 'embed';
if (!is.defined(embed)) {
// Default
this.options.position = gravity.center;
} else if (is.integer(embed) && is.inRange(embed, 0, 8)) {
// Gravity (numeric)
this.options.position = embed;
} else if (is.string(embed) && is.integer(gravity[embed])) {
// Gravity (string)
this.options.position = gravity[embed];
} else {
throw is.invalidParameterError('embed', 'valid embed id/name', embed);
}
return this;
}
/**
* @deprecated
* @private
*/
function max () {
this.options.canvas = 'max';
return this;
}
/**
* @deprecated
* @private
*/
function min () {
this.options.canvas = 'min';
return this;
}
/**
* @deprecated
* @private
*/
function ignoreAspectRatio () {
this.options.canvas = 'ignore_aspect';
return this;
}
/**
* @deprecated
* @private
*/
function withoutEnlargement (withoutEnlargement) {
this.options.withoutEnlargement = is.bool(withoutEnlargement) ? withoutEnlargement : true;
return this;
}
/**
* Decorate the Sharp prototype with resize-related functions.
* @private
@@ -481,11 +394,4 @@ module.exports = function (Sharp) {
Sharp.kernel = kernel;
Sharp.fit = fit;
Sharp.position = position;
// Deprecated functions, to be removed in v0.22.0
Sharp.prototype.crop = deprecate(crop, 'crop(position) is deprecated, use resize({ fit: "cover", position }) instead');
Sharp.prototype.embed = deprecate(embed, 'embed(position) is deprecated, use resize({ fit: "contain", position }) instead');
Sharp.prototype.max = deprecate(max, 'max() is deprecated, use resize({ fit: "inside" }) instead');
Sharp.prototype.min = deprecate(min, 'min() is deprecated, use resize({ fit: "outside" }) instead');
Sharp.prototype.ignoreAspectRatio = deprecate(ignoreAspectRatio, 'ignoreAspectRatio() is deprecated, use resize({ fit: "fill" }) instead');
Sharp.prototype.withoutEnlargement = deprecate(withoutEnlargement, 'withoutEnlargement() is deprecated, use resize({ withoutEnlargement: true }) instead');
};

View File

@@ -1,5 +1,5 @@
site_name: sharp
site_url: http://sharp.pixelplumbing.com/
site_url: https://sharp.pixelplumbing.com/
repo_url: https://github.com/lovell/sharp
site_description: High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images
copyright: <a href="https://pixelplumbing.com/">pixelplumbing.com</a>

View File

@@ -1,7 +1,7 @@
{
"name": "sharp",
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images",
"version": "0.21.1",
"version": "0.22.1",
"author": "Lovell Fuller <npm@lovell.info>",
"homepage": "https://github.com/lovell/sharp",
"contributors": [
@@ -93,37 +93,36 @@
"vips"
],
"dependencies": {
"bindings": "^1.3.1",
"color": "^3.1.0",
"color": "^3.1.1",
"detect-libc": "^1.0.3",
"fs-copy-file-sync": "^1.1.1",
"nan": "^2.11.1",
"nan": "^2.13.2",
"npmlog": "^4.1.2",
"prebuild-install": "^5.2.2",
"semver": "^5.6.0",
"prebuild-install": "^5.3.0",
"semver": "^6.0.0",
"simple-get": "^3.0.3",
"tar": "^4.4.8",
"tunnel-agent": "^0.6.0"
},
"devDependencies": {
"async": "^2.6.1",
"async": "^2.6.2",
"cc": "^1.0.2",
"decompress-zip": "^0.3.1",
"documentation": "^8.1.2",
"decompress-zip": "^0.3.2",
"documentation": "^10.0.0",
"exif-reader": "^1.0.2",
"icc": "^1.0.0",
"license-checker": "^24.0.1",
"mocha": "^5.2.0",
"mock-fs": "^4.7.0",
"nyc": "^13.1.0",
"prebuild": "^8.1.2",
"prebuild-ci": "^2.3.0",
"rimraf": "^2.6.2",
"license-checker": "^25.0.1",
"mocha": "^6.1.4",
"mock-fs": "^4.9.0",
"nyc": "^14.0.0",
"prebuild": "^8.2.1",
"prebuild-ci": "^3.0.0",
"rimraf": "^2.6.3",
"semistandard": "^13.0.1"
},
"license": "Apache-2.0",
"config": {
"libvips": "8.7.0"
"libvips": "8.7.4"
},
"engines": {
"node": ">=6"

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -31,13 +31,13 @@ using vips::VImage;
namespace sharp {
// Convenience methods to access the attributes of a v8::Object
bool HasAttr(v8::Handle<v8::Object> obj, std::string attr) {
bool HasAttr(v8::Local<v8::Object> obj, std::string attr) {
return Nan::Has(obj, Nan::New(attr).ToLocalChecked()).FromJust();
}
std::string AttrAsStr(v8::Handle<v8::Object> obj, std::string attr) {
std::string AttrAsStr(v8::Local<v8::Object> obj, std::string attr) {
return *Nan::Utf8String(Nan::Get(obj, Nan::New(attr).ToLocalChecked()).ToLocalChecked());
}
std::vector<double> AttrAsRgba(v8::Handle<v8::Object> obj, std::string attr) {
std::vector<double> AttrAsRgba(v8::Local<v8::Object> obj, std::string attr) {
v8::Local<v8::Object> background = AttrAs<v8::Object>(obj, attr);
std::vector<double> rgba(4);
for (unsigned int i = 0; i < 4; i++) {
@@ -48,7 +48,7 @@ namespace sharp {
// Create an InputDescriptor instance from a v8::Object describing an input image
InputDescriptor* CreateInputDescriptor(
v8::Handle<v8::Object> input, std::vector<v8::Local<v8::Object>> &buffersToPersist
v8::Local<v8::Object> input, std::vector<v8::Local<v8::Object>> &buffersToPersist
) {
Nan::HandleScope();
InputDescriptor *descriptor = new InputDescriptor;
@@ -71,7 +71,10 @@ namespace sharp {
descriptor->rawWidth = AttrTo<uint32_t>(input, "rawWidth");
descriptor->rawHeight = AttrTo<uint32_t>(input, "rawHeight");
}
// Page input for multi-page TIFF
// Multi-page input (GIF, TIFF, PDF)
if (HasAttr(input, "pages")) {
descriptor->pages = AttrTo<int32_t>(input, "pages");
}
if (HasAttr(input, "page")) {
descriptor->page = AttrTo<uint32_t>(input, "page");
}
@@ -137,6 +140,7 @@ namespace sharp {
case ImageType::VIPS: id = "v"; break;
case ImageType::RAW: id = "raw"; break;
case ImageType::UNKNOWN: id = "unknown"; break;
case ImageType::MISSING: id = "missing"; break;
}
return id;
}
@@ -203,10 +207,24 @@ namespace sharp {
} else if (EndsWith(loader, "Magick") || EndsWith(loader, "MagickFile")) {
imageType = ImageType::MAGICK;
}
} else {
if (EndsWith(vips::VError().what(), " not found\n")) {
imageType = ImageType::MISSING;
}
}
return imageType;
}
/*
Does this image type support multiple pages?
*/
bool ImageTypeSupportsPage(ImageType imageType) {
return
imageType == ImageType::GIF ||
imageType == ImageType::TIFF ||
imageType == ImageType::PDF;
}
/*
Open an image from the given InputDescriptor (filesystem, compressed buffer, raw pixel data)
*/
@@ -238,15 +256,16 @@ namespace sharp {
if (imageType == ImageType::MAGICK) {
option->set("density", std::to_string(descriptor->density).data());
}
if (imageType == ImageType::TIFF) {
option->set("page", descriptor->page);
if (ImageTypeSupportsPage(imageType)) {
option->set("n", descriptor->pages);
option->set("page", descriptor->page);
}
image = VImage::new_from_buffer(descriptor->buffer, descriptor->bufferLength, nullptr, option);
if (imageType == ImageType::SVG || imageType == ImageType::PDF || imageType == ImageType::MAGICK) {
SetDensity(image, descriptor->density);
}
} catch (...) {
throw vips::VError("Input buffer has corrupt header");
} catch (vips::VError const &err) {
throw vips::VError(std::string("Input buffer has corrupt header: ") + err.what());
}
} else {
throw vips::VError("Input buffer contains unsupported image format");
@@ -269,6 +288,9 @@ namespace sharp {
} else {
// From filesystem
imageType = DetermineImageType(descriptor->file.data());
if (imageType == ImageType::MISSING) {
throw vips::VError("Input file is missing");
}
if (imageType != ImageType::UNKNOWN) {
try {
vips::VOption *option = VImage::option()
@@ -280,18 +302,19 @@ namespace sharp {
if (imageType == ImageType::MAGICK) {
option->set("density", std::to_string(descriptor->density).data());
}
if (imageType == ImageType::TIFF) {
option->set("page", descriptor->page);
if (ImageTypeSupportsPage(imageType)) {
option->set("n", descriptor->pages);
option->set("page", descriptor->page);
}
image = VImage::new_from_file(descriptor->file.data(), option);
if (imageType == ImageType::SVG || imageType == ImageType::PDF || imageType == ImageType::MAGICK) {
SetDensity(image, descriptor->density);
}
} catch (...) {
throw vips::VError("Input file has corrupt header");
} catch (vips::VError const &err) {
throw vips::VError(std::string("Input file has corrupt header: ") + err.what());
}
} else {
throw vips::VError("Input file is missing or of an unsupported image format");
throw vips::VError("Input file contains unsupported image format");
}
}
}

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -25,8 +25,8 @@
// Verify platform and compiler compatibility
#if (VIPS_MAJOR_VERSION < 8 || (VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 6))
#error libvips version 8.6.1+ is required - see sharp.pixelplumbing.com/page/install
#if (VIPS_MAJOR_VERSION < 8 || (VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 7))
#error libvips version 8.7.0+ is required - see sharp.pixelplumbing.com/page/install
#endif
#if ((!defined(__clang__)) && defined(__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 6)))
@@ -53,6 +53,7 @@ namespace sharp {
int rawChannels;
int rawWidth;
int rawHeight;
int pages;
int page;
int createChannels;
int createWidth;
@@ -61,12 +62,13 @@ namespace sharp {
InputDescriptor():
buffer(nullptr),
failOnError(FALSE),
failOnError(TRUE),
bufferLength(0),
density(72.0),
rawChannels(0),
rawWidth(0),
rawHeight(0),
pages(1),
page(0),
createChannels(0),
createWidth(0),
@@ -75,22 +77,22 @@ namespace sharp {
};
// Convenience methods to access the attributes of a v8::Object
bool HasAttr(v8::Handle<v8::Object> obj, std::string attr);
std::string AttrAsStr(v8::Handle<v8::Object> obj, std::string attr);
std::vector<double> AttrAsRgba(v8::Handle<v8::Object> obj, std::string attr);
template<typename T> v8::Local<T> AttrAs(v8::Handle<v8::Object> obj, std::string attr) {
bool HasAttr(v8::Local<v8::Object> obj, std::string attr);
std::string AttrAsStr(v8::Local<v8::Object> obj, std::string attr);
std::vector<double> AttrAsRgba(v8::Local<v8::Object> obj, std::string attr);
template<typename T> v8::Local<T> AttrAs(v8::Local<v8::Object> obj, std::string attr) {
return Nan::Get(obj, Nan::New(attr).ToLocalChecked()).ToLocalChecked().As<T>();
}
template<typename T> T AttrTo(v8::Handle<v8::Object> obj, std::string attr) {
template<typename T> T AttrTo(v8::Local<v8::Object> obj, std::string attr) {
return Nan::To<T>(Nan::Get(obj, Nan::New(attr).ToLocalChecked()).ToLocalChecked()).FromJust();
}
template<typename T> T AttrTo(v8::Handle<v8::Object> obj, int attr) {
template<typename T> T AttrTo(v8::Local<v8::Object> obj, int attr) {
return Nan::To<T>(Nan::Get(obj, attr).ToLocalChecked()).FromJust();
}
// Create an InputDescriptor instance from a v8::Object describing an input image
InputDescriptor* CreateInputDescriptor(
v8::Handle<v8::Object> input, std::vector<v8::Local<v8::Object>> &buffersToPersist);
v8::Local<v8::Object> input, std::vector<v8::Local<v8::Object>> &buffersToPersist);
enum class ImageType {
JPEG,
@@ -106,7 +108,8 @@ namespace sharp {
FITS,
VIPS,
RAW,
UNKNOWN
UNKNOWN,
MISSING
};
// How many tasks are in the queue?
@@ -139,6 +142,11 @@ namespace sharp {
*/
ImageType DetermineImageType(char const *file);
/*
Does this image type support multiple pages?
*/
bool ImageTypeSupportsPage(ImageType imageType);
/*
Open an image from the given InputDescriptor (filesystem, compressed buffer, raw pixel data)
*/

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -68,6 +68,15 @@ class MetadataWorker : public Nan::AsyncWorker {
if (image.get_typeof("interlaced") == G_TYPE_INT) {
baton->isProgressive = image.get_int("interlaced") == 1;
}
if (image.get_typeof("palette-bit-depth") == G_TYPE_INT) {
baton->paletteBitDepth = image.get_int("palette-bit-depth");
}
if (image.get_typeof(VIPS_META_N_PAGES) == G_TYPE_INT) {
baton->pages = image.get_int(VIPS_META_N_PAGES);
}
if (image.get_typeof(VIPS_META_PAGE_HEIGHT) == G_TYPE_INT) {
baton->pageHeight = image.get_int(VIPS_META_PAGE_HEIGHT);
}
baton->hasProfile = sharp::HasProfile(image);
// Derived attributes
baton->hasAlpha = sharp::HasAlpha(image);
@@ -140,6 +149,15 @@ class MetadataWorker : public Nan::AsyncWorker {
New<v8::String>(baton->chromaSubsampling).ToLocalChecked());
}
Set(info, New("isProgressive").ToLocalChecked(), New<v8::Boolean>(baton->isProgressive));
if (baton->paletteBitDepth > 0) {
Set(info, New("paletteBitDepth").ToLocalChecked(), New<v8::Uint32>(baton->paletteBitDepth));
}
if (baton->pages > 0) {
Set(info, New("pages").ToLocalChecked(), New<v8::Uint32>(baton->pages));
}
if (baton->pageHeight > 0) {
Set(info, New("pageHeight").ToLocalChecked(), New<v8::Uint32>(baton->pageHeight));
}
Set(info, New("hasProfile").ToLocalChecked(), New<v8::Boolean>(baton->hasProfile));
Set(info, New("hasAlpha").ToLocalChecked(), New<v8::Boolean>(baton->hasAlpha));
if (baton->orientation > 0) {

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -33,6 +33,9 @@ struct MetadataBaton {
int density;
std::string chromaSubsampling;
bool isProgressive;
int paletteBitDepth;
int pages;
int pageHeight;
bool hasProfile;
bool hasAlpha;
int orientation;
@@ -53,6 +56,9 @@ struct MetadataBaton {
channels(0),
density(0),
isProgressive(false),
paletteBitDepth(0),
pages(0),
pageHeight(0),
hasProfile(false),
hasAlpha(false),
orientation(0),

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -39,127 +39,15 @@ namespace sharp {
}
/*
Composite overlayImage over image at given position
Assumes alpha channels are already premultiplied and will be unpremultiplied after
*/
VImage Composite(VImage image, VImage overlayImage, int const left, int const top) {
if (HasAlpha(overlayImage)) {
// Alpha composite
if (overlayImage.width() < image.width() || overlayImage.height() < image.height()) {
// Enlarge overlay
std::vector<double> const background { 0.0, 0.0, 0.0, 0.0 };
overlayImage = overlayImage.embed(left, top, image.width(), image.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background));
}
return AlphaComposite(image, overlayImage);
} else {
if (HasAlpha(image)) {
// Add alpha channel to overlayImage so channels match
double const multiplier = sharp::Is16Bit(overlayImage.interpretation()) ? 256.0 : 1.0;
overlayImage = overlayImage.bandjoin(
VImage::new_matrix(overlayImage.width(), overlayImage.height()).new_from_image(255 * multiplier));
}
return image.insert(overlayImage, left, top);
Ensures alpha channel, if missing.
*/
VImage EnsureAlpha(VImage image) {
if (!HasAlpha(image)) {
std::vector<double> alpha;
alpha.push_back(sharp::MaximumImageAlpha(image.interpretation()));
image = image.bandjoin_const(alpha);
}
}
VImage AlphaComposite(VImage dst, VImage src) {
// Split src into non-alpha and alpha channels
VImage srcWithoutAlpha = src.extract_band(0, VImage::option()->set("n", src.bands() - 1));
VImage srcAlpha = src[src.bands() - 1] * (1.0 / 255.0);
// Split dst into non-alpha and alpha channels
VImage dstWithoutAlpha = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
VImage dstAlpha = dst[dst.bands() - 1] * (1.0 / 255.0);
//
// Compute normalized output alpha channel:
//
// References:
// - http://en.wikipedia.org/wiki/Alpha_compositing#Alpha_blending
// - https://github.com/libvips/ruby-vips/issues/28#issuecomment-9014826
//
// out_a = src_a + dst_a * (1 - src_a)
// ^^^^^^^^^^^
// t0
VImage t0 = srcAlpha.linear(-1.0, 1.0);
VImage outAlphaNormalized = srcAlpha + dstAlpha * t0;
//
// Compute output RGB channels:
//
// Wikipedia:
// out_rgb = (src_rgb * src_a + dst_rgb * dst_a * (1 - src_a)) / out_a
// ^^^^^^^^^^^
// t0
//
// Omit division by `out_a` since `Compose` is supposed to output a
// premultiplied RGBA image as reversal of premultiplication is handled
// externally.
//
VImage outRGBPremultiplied = srcWithoutAlpha + dstWithoutAlpha * t0;
// Combine RGB and alpha channel into output image:
return outRGBPremultiplied.bandjoin(outAlphaNormalized * 255.0);
}
/*
Cutout src over dst with given gravity.
*/
VImage Cutout(VImage mask, VImage dst, const int gravity) {
using sharp::CalculateCrop;
using sharp::HasAlpha;
using sharp::MaximumImageAlpha;
bool maskHasAlpha = HasAlpha(mask);
if (!maskHasAlpha && mask.bands() > 1) {
throw VError("Overlay image must have an alpha channel or one band");
}
if (!HasAlpha(dst)) {
throw VError("Image to be overlaid must have an alpha channel");
}
if (mask.width() > dst.width() || mask.height() > dst.height()) {
throw VError("Overlay image must have same dimensions or smaller");
}
// Enlarge overlay mask, if required
if (mask.width() < dst.width() || mask.height() < dst.height()) {
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
int left;
int top;
std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), mask.width(), mask.height(), gravity);
// Embed onto transparent background
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
mask = mask.embed(left, top, dst.width(), dst.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background));
}
// we use the mask alpha if it has alpha
if (maskHasAlpha) {
mask = mask.extract_band(mask.bands() - 1, VImage::option()->set("n", 1));;
}
// Split dst into an optional alpha
VImage dstAlpha = dst.extract_band(dst.bands() - 1, VImage::option()->set("n", 1));
// we use the dst non-alpha
dst = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
// the range of the mask and the image need to match .. one could be
// 16-bit, one 8-bit
double const dstMax = MaximumImageAlpha(dst.interpretation());
double const maskMax = MaximumImageAlpha(mask.interpretation());
// combine the new mask and the existing alpha ... there are
// many ways of doing this, mult is the simplest
mask = dstMax * ((mask / maskMax) * (dstAlpha / dstMax));
// append the mask to the image data ... the mask might be float now,
// we must cast the format down to match the image data
return dst.bandjoin(mask.cast(dst.format()));
return image;
}
/*
@@ -297,6 +185,21 @@ namespace sharp {
0.0, 0.0, 0.0, 1.0));
}
VImage Modulate(VImage image, double const brightness, double const saturation, int const hue) {
if (HasAlpha(image)) {
// Separate alpha channel
VImage alpha = image[image.bands() - 1];
return RemoveAlpha(image)
.colourspace(VIPS_INTERPRETATION_LCH)
.linear({brightness, saturation, 1}, {0, 0, static_cast<double>(hue)})
.bandjoin(alpha);
} else {
return image
.colourspace(VIPS_INTERPRETATION_LCH)
.linear({brightness, saturation, 1}, {0, 0, static_cast<double>(hue)});
}
}
/*
* Sharpen flat and jagged areas. Use sigma of -1.0 for fast sharpen.
*/

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -31,25 +31,9 @@ namespace sharp {
VImage RemoveAlpha(VImage image);
/*
Alpha composite src over dst with given gravity.
Assumes alpha channels are already premultiplied and will be unpremultiplied after.
*/
VImage Composite(VImage src, VImage dst, const int gravity);
/*
Composite overlayImage over image at given position
*/
VImage Composite(VImage image, VImage overlayImage, int const x, int const y);
/*
Alpha composite overlayImage over image, assumes matching dimensions
Ensures alpha channel, if missing.
*/
VImage AlphaComposite(VImage image, VImage overlayImage);
/*
Cutout src over dst with given gravity.
*/
VImage Cutout(VImage src, VImage dst, const int gravity);
VImage EnsureAlpha(VImage image);
/*
* Tint an image using the specified chroma, preserving the original image luminance
@@ -113,6 +97,11 @@ namespace sharp {
*/
VImage Recomb(VImage image, std::unique_ptr<double[]> const &matrix);
/*
* Modulate brightness, saturation and hue
*/
VImage Modulate(VImage image, double const brightness, double const saturation, int const hue);
} // namespace sharp
#endif // SRC_OPERATIONS_H_

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -75,7 +75,8 @@ class PipelineWorker : public Nan::AsyncWorker {
// Limit input images to a given number of pixels, where pixels = width * height
// Ignore if 0
if (baton->limitInputPixels > 0 && image.width() * image.height() > baton->limitInputPixels) {
if (baton->limitInputPixels > 0 &&
static_cast<uint64_t>(image.width() * image.height()) > static_cast<uint64_t>(baton->limitInputPixels)) {
(baton->err).append("Input image exceeds pixel limit");
return Error();
}
@@ -297,7 +298,7 @@ class PipelineWorker : public Nan::AsyncWorker {
}
// Ensure we're using a device-independent colour space
if (sharp::HasProfile(image)) {
if (sharp::HasProfile(image) && image.interpretation() != VIPS_INTERPRETATION_LABS) {
// Convert to sRGB using embedded profile
try {
image = image.icc_transform(
@@ -343,30 +344,20 @@ class PipelineWorker : public Nan::AsyncWorker {
image = image.colourspace(VIPS_INTERPRETATION_B_W);
}
// Ensure image has an alpha channel when there is an overlay with an alpha channel
VImage overlayImage;
ImageType overlayImageType = ImageType::UNKNOWN;
bool shouldOverlayWithAlpha = FALSE;
if (baton->overlay != nullptr) {
std::tie(overlayImage, overlayImageType) = OpenInput(baton->overlay, baton->accessMethod);
if (HasAlpha(overlayImage)) {
shouldOverlayWithAlpha = !baton->overlayCutout;
if (!HasAlpha(image)) {
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
}
}
bool const shouldResize = xfactor != 1.0 || yfactor != 1.0;
bool const shouldBlur = baton->blurSigma != 0.0;
bool const shouldConv = baton->convKernelWidth * baton->convKernelHeight > 0;
bool const shouldSharpen = baton->sharpenSigma != 0.0;
bool const shouldApplyMedian = baton->medianSize > 0;
bool const shouldComposite = !baton->composite.empty();
bool const shouldModulate = baton->brightness != 1.0 || baton->saturation != 1.0 || baton->hue != 0.0;
if (shouldComposite && !HasAlpha(image)) {
image = sharp::EnsureAlpha(image);
}
bool const shouldPremultiplyAlpha = HasAlpha(image) &&
(shouldResize || shouldBlur || shouldConv || shouldSharpen || shouldOverlayWithAlpha);
(shouldResize || shouldBlur || shouldConv || shouldSharpen || shouldComposite);
// Premultiply image alpha channel before all transformations to avoid
// dark fringing around bright pixels
@@ -385,6 +376,15 @@ class PipelineWorker : public Nan::AsyncWorker {
) {
throw vips::VError("Unknown kernel");
}
// Ensure shortest edge is at least 1 pixel
if (image.width() / xfactor < 0.5) {
xfactor = 2 * image.width();
baton->width = 1;
}
if (image.height() / yfactor < 0.5) {
yfactor = 2 * image.height();
baton->height = 1;
}
image = image.resize(1.0 / xfactor, VImage::option()
->set("vscale", 1.0 / yfactor)
->set("kernel", kernel));
@@ -530,77 +530,76 @@ class PipelineWorker : public Nan::AsyncWorker {
image = sharp::Recomb(image, baton->recombMatrix);
}
if (shouldModulate) {
image = sharp::Modulate(image, baton->brightness, baton->saturation, baton->hue);
}
// Sharpen
if (shouldSharpen) {
image = sharp::Sharpen(image, baton->sharpenSigma, baton->sharpenFlat, baton->sharpenJagged);
}
// Composite with overlay, if present
if (baton->overlay != nullptr) {
// Verify overlay image is within current dimensions
if (overlayImage.width() > image.width() || overlayImage.height() > image.height()) {
throw vips::VError("Overlay image must have same dimensions or smaller");
}
// Check if overlay is tiled
if (baton->overlayTile) {
int const overlayImageWidth = overlayImage.width();
int const overlayImageHeight = overlayImage.height();
int across = 0;
int down = 0;
// Use gravity in overlay
if (overlayImageWidth <= baton->width) {
across = static_cast<int>(ceil(static_cast<double>(image.width()) / overlayImageWidth));
// Composite
if (shouldComposite) {
for (Composite *composite : baton->composite) {
VImage compositeImage;
ImageType compositeImageType = ImageType::UNKNOWN;
std::tie(compositeImage, compositeImageType) = OpenInput(composite->input, baton->accessMethod);
// Verify within current dimensions
if (compositeImage.width() > image.width() || compositeImage.height() > image.height()) {
throw vips::VError("Image to composite must have same dimensions or smaller");
}
if (overlayImageHeight <= baton->height) {
down = static_cast<int>(ceil(static_cast<double>(image.height()) / overlayImageHeight));
}
if (across != 0 || down != 0) {
int left;
int top;
overlayImage = overlayImage.replicate(across, down);
if (baton->overlayXOffset >= 0 && baton->overlayYOffset >= 0) {
// the overlayX/YOffsets will now be used to CalculateCrop for extract_area
std::tie(left, top) = sharp::CalculateCrop(
overlayImage.width(), overlayImage.height(), image.width(), image.height(),
baton->overlayXOffset, baton->overlayYOffset);
} else {
// the overlayGravity will now be used to CalculateCrop for extract_area
std::tie(left, top) = sharp::CalculateCrop(
overlayImage.width(), overlayImage.height(), image.width(), image.height(), baton->overlayGravity);
// Check if overlay is tiled
if (composite->tile) {
int across = 0;
int down = 0;
// Use gravity in overlay
if (compositeImage.width() <= baton->width) {
across = static_cast<int>(ceil(static_cast<double>(image.width()) / compositeImage.width()));
}
overlayImage = overlayImage.extract_area(left, top, image.width(), image.height());
}
// the overlayGravity was used for extract_area, therefore set it back to its default value of 0
baton->overlayGravity = 0;
}
if (baton->overlayCutout) {
// 'cut out' the image, premultiplication is not required
image = sharp::Cutout(overlayImage, image, baton->overlayGravity);
} else {
// Ensure overlay is sRGB
overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB);
// Ensure overlay matches premultiplication state
if (shouldPremultiplyAlpha) {
// Ensure overlay has alpha channel
if (!HasAlpha(overlayImage)) {
double const multiplier = sharp::Is16Bit(overlayImage.interpretation()) ? 256.0 : 1.0;
overlayImage = overlayImage.bandjoin(
VImage::new_matrix(overlayImage.width(), overlayImage.height()).new_from_image(255 * multiplier));
if (compositeImage.height() <= baton->height) {
down = static_cast<int>(ceil(static_cast<double>(image.height()) / compositeImage.height()));
}
overlayImage = overlayImage.premultiply();
if (across != 0 || down != 0) {
int left;
int top;
compositeImage = compositeImage.replicate(across, down);
if (composite->left >= 0 && composite->top >= 0) {
std::tie(left, top) = sharp::CalculateCrop(
compositeImage.width(), compositeImage.height(), image.width(), image.height(),
composite->left, composite->top);
} else {
std::tie(left, top) = sharp::CalculateCrop(
compositeImage.width(), compositeImage.height(), image.width(), image.height(), composite->gravity);
}
compositeImage = compositeImage.extract_area(left, top, image.width(), image.height());
}
// gravity was used for extract_area, set it back to its default value of 0
composite->gravity = 0;
}
// Ensure image to composite is sRGB with premultiplied alpha
compositeImage = compositeImage.colourspace(VIPS_INTERPRETATION_sRGB);
if (!HasAlpha(compositeImage)) {
compositeImage = sharp::EnsureAlpha(compositeImage);
}
compositeImage = compositeImage.premultiply();
// Calculate position
int left;
int top;
if (baton->overlayXOffset >= 0 && baton->overlayYOffset >= 0) {
// Composite images at given offsets
if (composite->left >= 0 && composite->top >= 0) {
// Composite image at given offsets
std::tie(left, top) = sharp::CalculateCrop(image.width(), image.height(),
overlayImage.width(), overlayImage.height(), baton->overlayXOffset, baton->overlayYOffset);
compositeImage.width(), compositeImage.height(), composite->left, composite->top);
} else {
// Composite images with given gravity
// Composite image with given gravity
std::tie(left, top) = sharp::CalculateCrop(image.width(), image.height(),
overlayImage.width(), overlayImage.height(), baton->overlayGravity);
compositeImage.width(), compositeImage.height(), composite->gravity);
}
image = sharp::Composite(image, overlayImage, left, top);
// Composite
image = image.composite2(compositeImage, composite->mode, VImage::option()
->set("premultiplied", TRUE)
->set("x", left)
->set("y", top));
}
}
@@ -668,6 +667,11 @@ class PipelineWorker : public Nan::AsyncWorker {
image = sharp::RemoveAlpha(image);
}
// Ensure alpha channel, if missing
if (baton->ensureAlpha) {
image = sharp::EnsureAlpha(image);
}
// Convert image to sRGB, if not already
if (sharp::Is16Bit(image.interpretation())) {
image = image.cast(VIPS_FORMAT_USHORT);
@@ -721,14 +725,15 @@ class PipelineWorker : public Nan::AsyncWorker {
(inputImageType == ImageType::PNG || inputImageType == ImageType::GIF || inputImageType == ImageType::SVG))) {
// Write PNG to buffer
sharp::AssertImageTypeDimensions(image, ImageType::PNG);
// Strip profile
if (!baton->withMetadata) {
vips_image_remove(image.get_image(), VIPS_META_ICC_NAME);
}
VipsArea *area = VIPS_AREA(image.pngsave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("interlace", baton->pngProgressive)
->set("compression", baton->pngCompressionLevel)
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)));
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)
->set("palette", baton->pngPalette)
->set("Q", baton->pngQuality)
->set("colours", baton->pngColours)
->set("dither", baton->pngDither)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
area->free_fn = nullptr;
@@ -834,14 +839,15 @@ class PipelineWorker : public Nan::AsyncWorker {
(inputImageType == ImageType::PNG || inputImageType == ImageType::GIF || inputImageType == ImageType::SVG))) {
// Write PNG to file
sharp::AssertImageTypeDimensions(image, ImageType::PNG);
// Strip profile
if (!baton->withMetadata) {
vips_image_remove(image.get_image(), VIPS_META_ICC_NAME);
}
image.pngsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("strip", !baton->withMetadata)
->set("interlace", baton->pngProgressive)
->set("compression", baton->pngCompressionLevel)
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE));
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)
->set("palette", baton->pngPalette)
->set("Q", baton->pngQuality)
->set("colours", baton->pngColours)
->set("dither", baton->pngDither));
baton->formatOut = "png";
} else if (baton->formatOut == "webp" || (mightMatchInput && isWebp) ||
(willMatchInput && inputImageType == ImageType::WEBP)) {
@@ -1013,13 +1019,17 @@ class PipelineWorker : public Nan::AsyncWorker {
GetFromPersistent(index);
return index + 1;
});
// Delete baton
delete baton->input;
delete baton->overlay;
delete baton->boolean;
for_each(baton->joinChannelIn.begin(), baton->joinChannelIn.end(),
[this](sharp::InputDescriptor *joinChannelIn) {
delete joinChannelIn;
});
for (Composite *composite : baton->composite) {
delete composite->input;
delete composite;
}
for (sharp::InputDescriptor *input : baton->joinChannelIn) {
delete input;
}
delete baton;
// Handle warnings
@@ -1166,14 +1176,21 @@ NAN_METHOD(pipeline) {
// Tint chroma
baton->tintA = AttrTo<double>(options, "tintA");
baton->tintB = AttrTo<double>(options, "tintB");
// Overlay options
if (HasAttr(options, "overlay")) {
baton->overlay = CreateInputDescriptor(AttrAs<v8::Object>(options, "overlay"), buffersToPersist);
baton->overlayGravity = AttrTo<int32_t>(options, "overlayGravity");
baton->overlayXOffset = AttrTo<int32_t>(options, "overlayXOffset");
baton->overlayYOffset = AttrTo<int32_t>(options, "overlayYOffset");
baton->overlayTile = AttrTo<bool>(options, "overlayTile");
baton->overlayCutout = AttrTo<bool>(options, "overlayCutout");
// Composite
v8::Local<v8::Array> compositeArray = Nan::Get(options, Nan::New("composite").ToLocalChecked())
.ToLocalChecked().As<v8::Array>();
int const compositeArrayLength = AttrTo<uint32_t>(compositeArray, "length");
for (int i = 0; i < compositeArrayLength; i++) {
v8::Local<v8::Object> compositeObject = Nan::Get(compositeArray, i).ToLocalChecked().As<v8::Object>();
Composite *composite = new Composite;
composite->input = CreateInputDescriptor(AttrAs<v8::Object>(compositeObject, "input"), buffersToPersist);
composite->mode = static_cast<VipsBlendMode>(
vips_enum_from_nick(nullptr, VIPS_TYPE_BLEND_MODE, AttrAsStr(compositeObject, "blend").data()));
composite->gravity = AttrTo<uint32_t>(compositeObject, "gravity");
composite->left = AttrTo<int32_t>(compositeObject, "left");
composite->top = AttrTo<int32_t>(compositeObject, "top");
composite->tile = AttrTo<bool>(compositeObject, "tile");
baton->composite.push_back(composite);
}
// Resize options
baton->withoutEnlargement = AttrTo<bool>(options, "withoutEnlargement");
@@ -1199,6 +1216,9 @@ NAN_METHOD(pipeline) {
baton->flattenBackground = AttrAsRgba(options, "flattenBackground");
baton->negate = AttrTo<bool>(options, "negate");
baton->blurSigma = AttrTo<double>(options, "blurSigma");
baton->brightness = AttrTo<double>(options, "brightness");
baton->saturation = AttrTo<double>(options, "saturation");
baton->hue = AttrTo<int32_t>(options, "hue");
baton->medianSize = AttrTo<uint32_t>(options, "medianSize");
baton->sharpenSigma = AttrTo<double>(options, "sharpenSigma");
baton->sharpenFlat = AttrTo<double>(options, "sharpenFlat");
@@ -1227,6 +1247,7 @@ NAN_METHOD(pipeline) {
baton->extractChannel = AttrTo<int32_t>(options, "extractChannel");
baton->removeAlpha = AttrTo<bool>(options, "removeAlpha");
baton->ensureAlpha = AttrTo<bool>(options, "ensureAlpha");
if (HasAttr(options, "boolean")) {
baton->boolean = CreateInputDescriptor(AttrAs<v8::Object>(options, "boolean"), buffersToPersist);
baton->booleanOp = sharp::GetBooleanOperation(AttrAsStr(options, "booleanOp"));
@@ -1275,6 +1296,10 @@ NAN_METHOD(pipeline) {
baton->pngProgressive = AttrTo<bool>(options, "pngProgressive");
baton->pngCompressionLevel = AttrTo<uint32_t>(options, "pngCompressionLevel");
baton->pngAdaptiveFiltering = AttrTo<bool>(options, "pngAdaptiveFiltering");
baton->pngPalette = AttrTo<bool>(options, "pngPalette");
baton->pngQuality = AttrTo<uint32_t>(options, "pngQuality");
baton->pngColours = AttrTo<uint32_t>(options, "pngColours");
baton->pngDither = AttrTo<double>(options, "pngDither");
baton->webpQuality = AttrTo<uint32_t>(options, "webpQuality");
baton->webpAlphaQuality = AttrTo<uint32_t>(options, "webpAlphaQuality");
baton->webpLossless = AttrTo<bool>(options, "webpLossless");

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -34,6 +34,23 @@ enum class Canvas {
IGNORE_ASPECT
};
struct Composite {
sharp::InputDescriptor *input;
VipsBlendMode mode;
int gravity;
int left;
int top;
bool tile;
Composite():
input(nullptr),
mode(VIPS_BLEND_MODE_OVER),
gravity(0),
left(-1),
top(-1),
tile(false) {}
};
struct PipelineBaton {
sharp::InputDescriptor *input;
std::string iccProfilePath;
@@ -42,12 +59,7 @@ struct PipelineBaton {
std::string fileOut;
void *bufferOut;
size_t bufferOutLength;
sharp::InputDescriptor *overlay;
int overlayGravity;
int overlayXOffset;
int overlayYOffset;
bool overlayTile;
bool overlayCutout;
std::vector<Composite *> composite;
std::vector<sharp::InputDescriptor *> joinChannelIn;
int topOffsetPre;
int leftOffsetPre;
@@ -75,6 +87,9 @@ struct PipelineBaton {
std::vector<double> flattenBackground;
bool negate;
double blurSigma;
double brightness;
double saturation;
int hue;
int medianSize;
double sharpenSigma;
double sharpenFlat;
@@ -115,6 +130,10 @@ struct PipelineBaton {
bool pngProgressive;
int pngCompressionLevel;
bool pngAdaptiveFiltering;
bool pngPalette;
int pngQuality;
int pngColours;
double pngDither;
int webpQuality;
int webpAlphaQuality;
bool webpNearLossless;
@@ -142,6 +161,7 @@ struct PipelineBaton {
VipsOperationBoolean bandBoolOp;
int extractChannel;
bool removeAlpha;
bool ensureAlpha;
VipsInterpretation colourspace;
int tileSize;
int tileOverlap;
@@ -156,12 +176,6 @@ struct PipelineBaton {
input(nullptr),
limitInputPixels(0),
bufferOutLength(0),
overlay(nullptr),
overlayGravity(0),
overlayXOffset(-1),
overlayYOffset(-1),
overlayTile(false),
overlayCutout(false),
topOffsetPre(-1),
topOffsetPost(-1),
channels(0),
@@ -178,6 +192,9 @@ struct PipelineBaton {
flattenBackground{ 0.0, 0.0, 0.0 },
negate(false),
blurSigma(0.0),
brightness(1.0),
saturation(1.0),
hue(0.0),
medianSize(0),
sharpenSigma(0.0),
sharpenFlat(1.0),
@@ -215,6 +232,10 @@ struct PipelineBaton {
pngProgressive(false),
pngCompressionLevel(9),
pngAdaptiveFiltering(false),
pngPalette(false),
pngQuality(100),
pngColours(256),
pngDither(1.0),
webpQuality(80),
tiffQuality(80),
tiffCompression(VIPS_FOREIGN_TIFF_COMPRESSION_JPEG),
@@ -237,6 +258,7 @@ struct PipelineBaton {
bandBoolOp(VIPS_OPERATION_BOOLEAN_LAST),
extractChannel(-1),
removeAlpha(false),
ensureAlpha(false),
colourspace(VIPS_INTERPRETATION_LAST),
tileSize(256),
tileOverlap(0),

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

Binary file not shown.

After

Width:  |  Height:  |  Size: 175 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 222 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 197 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 197 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 194 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 192 B

BIN
test/fixtures/expected/modulate-all.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 664 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 426 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 692 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 653 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 92 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 606 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 672 KiB

View File

@@ -70,6 +70,7 @@ module.exports = {
inputJpgCenteredImage: getPath('centered_image.jpeg'),
inputJpgRandom: getPath('random.jpg'), // convert -size 200x200 xc: +noise Random random.jpg
inputJpgThRandom: getPath('thRandom.jpg'), // convert random.jpg -channel G -threshold 5% -separate +channel -negate thRandom.jpg
inputJpgLossless: getPath('testimgl.jpg'), // Lossless JPEG from ftp://ftp.fu-berlin.de/unix/X11/graphics/ImageMagick/delegates/ljpeg-6b.tar.gz
inputPng: getPath('50020484-00001.png'), // http://c.searspartsdirect.com/lis_png/PLDM/50020484-00001.png
inputPngWithTransparency: getPath('blackbug.png'), // public domain
@@ -99,6 +100,7 @@ module.exports = {
inputTiff8BitDepth: getPath('8bit_depth.tiff'),
inputGif: getPath('Crash_test.gif'), // http://upload.wikimedia.org/wikipedia/commons/e/e3/Crash_test.gif
inputGifGreyPlusAlpha: getPath('grey-plus-alpha.gif'), // http://i.imgur.com/gZ5jlmE.gif
inputGifAnimated: getPath('rotating-squares.gif'), // CC0 https://loading.io/spinner/blocks/-rotating-squares-preloader-gif
inputSvg: getPath('check.svg'), // http://dev.w3.org/SVG/tools/svgweb/samples/svg-files/check.svg
inputSvgWithEmbeddedImages: getPath('struct-image-04-t.svg'), // https://dev.w3.org/SVG/profiles/1.2T/test/svg/struct-image-04-t.svg
@@ -118,6 +120,8 @@ module.exports = {
outputTiff: getPath('output.tiff'),
outputZoinks: getPath('output.zoinks'), // an 'unknown' file extension
testPattern: getPath('test-pattern.png'),
// Path for tests requiring human inspection
path: getPath,

BIN
test/fixtures/rotating-squares.gif vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

BIN
test/fixtures/test-pattern.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 79 KiB

BIN
test/fixtures/testimgl.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@@ -147,6 +147,47 @@
...
fun:WebPDecode
}
{
cond_libwebp_generic
Memcheck:Cond
obj:/usr/lib/x86_64-linux-gnu/libwebp.so.6.0.2
}
# tiff
{
param_tiff_write_encoded_tile
Memcheck:Param
write(buf)
fun:write
...
fun:TIFFWriteEncodedTile
}
# gsf
{
param_gsf_output_write
Memcheck:Param
write(buf)
fun:write
...
fun:gsf_output_write
}
{
value_gsf_output_write_crc32_little
Memcheck:Value8
fun:crc32_little
...
fun:gsf_output_write
}
# fontconfig
{
leak_fontconfig_FcConfigSubstituteWithPat
Memcheck:Leak
match-leak-kinds: definite,indirect
...
fun:FcConfigSubstituteWithPat
}
# libvips
{
@@ -197,6 +238,11 @@
...
fun:vips_region_prepare_to
}
{
cond_libvips_vips_stats_scan
Memcheck:Cond
fun:vips_stats_scan
}
{
value_libvips_vips_region_fill
Memcheck:Value8
@@ -204,6 +250,17 @@
fun:vips_region_fill
fun:vips_region_prepare
}
{
value_libvips_vips_hist_find_uchar_scan
Memcheck:Value8
fun:vips_hist_find_uchar_scan
}
{
value_libvips_write_webp_image
Memcheck:Value8
...
fun:write_webp_image
}
{
leak_libvips_init
Memcheck:Leak
@@ -377,6 +434,70 @@
...
fun:_ZN4node12NodePlatformC1EiPN2v817TracingControllerE
}
{
param_nodejs_delayed_task_scheduler
Memcheck:Param
epoll_ctl(event)
fun:epoll_ctl
fun:uv__io_poll
fun:uv_run
fun:_ZZN4node20BackgroundTaskRunner20DelayedTaskScheduler5StartEvENUlPvE_4_FUNES2_
}
{
param_nodejs_isolate_data
Memcheck:Param
epoll_ctl(event)
fun:epoll_ctl
fun:uv__io_poll
fun:uv_run
fun:_ZN4node5StartEPN2v87IsolateEPNS_11IsolateDataERKSt6vectorISsSaISsEES9_
}
{
param_nodejs_try_init_and_run_loop
Memcheck:Param
epoll_ctl(event)
fun:epoll_ctl
fun:uv__io_poll
fun:uv_run
fun:_ZN4node17SyncProcessRunner23TryInitializeAndRunLoopEN2v85LocalINS1_5ValueEEE
}
{
param_nodejs_run_exit_handlers
Memcheck:Param
epoll_ctl(event)
fun:epoll_ctl
fun:uv__io_poll
fun:uv_run
fun:_ZN4node7tracing5AgentD1Ev
fun:_ZN4node5._215D1Ev
fun:__run_exit_handlers
}
{
leak_nodejs_crypto_entropy_source
Memcheck:Leak
...
fun:_ZN4node6crypto13EntropySourceEPhm
}
{
leak_nodejs_debug_options
Memcheck:Leak
...
fun:_ZN4node9inspector5Agent5StartERKSsSt10shared_ptrINS_12DebugOptionsEEb
}
{
leak_nodejs_start
Memcheck:Leak
match-leak-kinds: definite
fun:_Znwm
fun:_ZN4node5StartEiPPc
}
{
leak_nodejs_start_background_task_runner
Memcheck:Leak
match-leak-kinds: possible
...
fun:_ZN4node20BackgroundTaskRunnerC1Ei
}
{
leak_nan_FunctionCallbackInfo
Memcheck:Leak

View File

@@ -115,6 +115,7 @@ describe('Alpha transparency', function () {
fixtures.inputWebP
].map(function (input) {
return sharp(input)
.resize(10)
.removeAlpha()
.toBuffer({ resolveWithObject: true })
.then(function (result) {
@@ -122,4 +123,24 @@ describe('Alpha transparency', function () {
});
}));
});
it('Ensures alpha from fixtures without transparency, ignores those with', function () {
return Promise.all([
fixtures.inputPngWithTransparency,
fixtures.inputPngWithTransparency16bit,
fixtures.inputWebPWithTransparency,
fixtures.inputJpg,
fixtures.inputPng,
fixtures.inputWebP
].map(function (input) {
return sharp(input)
.resize(10)
.ensureAlpha()
.png()
.toBuffer({ resolveWithObject: true })
.then(function (result) {
assert.strictEqual(4, result.info.channels);
});
}));
});
});

View File

@@ -5,8 +5,10 @@ const sharp = require('../../');
const usingCache = detectLibc.family !== detectLibc.MUSL;
const usingSimd = !process.env.G_DEBUG;
const concurrency = detectLibc.family === detectLibc.MUSL ? 1 : undefined;
beforeEach(function () {
sharp.cache(usingCache);
sharp.simd(usingSimd);
sharp.concurrency(concurrency);
});

298
test/unit/composite.js Normal file
View File

@@ -0,0 +1,298 @@
'use strict';
const assert = require('assert');
const fixtures = require('../fixtures');
const sharp = require('../../');
const red = { r: 255, g: 0, b: 0, alpha: 0.5 };
const green = { r: 0, g: 255, b: 0, alpha: 0.5 };
const blue = { r: 0, g: 0, b: 255, alpha: 0.5 };
const redRect = {
create: {
width: 80,
height: 60,
channels: 4,
background: red
}
};
const greenRect = {
create: {
width: 40,
height: 40,
channels: 4,
background: green
}
};
const blueRect = {
create: {
width: 60,
height: 40,
channels: 4,
background: blue
}
};
const blends = [
'over',
'xor',
'saturate',
'dest-over'
];
// Test
describe('composite', () => {
it('blend', () => Promise.all(
blends.map(blend => {
const filename = `composite.blend.${blend}.png`;
const actual = fixtures.path(`output.${filename}`);
const expected = fixtures.expected(filename);
return sharp(redRect)
.composite([{
input: blueRect,
blend
}])
.toFile(actual)
.then(() => {
fixtures.assertMaxColourDistance(actual, expected);
});
})
));
it('multiple', () => {
const filename = 'composite-multiple.png';
const actual = fixtures.path(`output.${filename}`);
const expected = fixtures.expected(filename);
return sharp(redRect)
.composite([{
input: blueRect,
gravity: 'northeast'
}, {
input: greenRect,
gravity: 'southwest'
}])
.toFile(actual)
.then(() => {
fixtures.assertMaxColourDistance(actual, expected);
});
});
it('zero offset', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
top: 0,
left: 0
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-0.jpg'), data, done);
});
});
it('offset and gravity', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
left: 10,
top: 10,
gravity: 4
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-with-gravity.jpg'), data, done);
});
});
it('offset, gravity and tile', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
left: 10,
top: 10,
gravity: 4,
tile: true
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-with-gravity-tile.jpg'), data, done);
});
});
it('offset and tile', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
left: 10,
top: 10,
tile: true
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-with-tile.jpg'), data, done);
});
});
it('cutout via dest-in', done => {
sharp(fixtures.inputJpg)
.resize(300, 300)
.composite([{
input: Buffer.from('<svg><rect x="0" y="0" width="200" height="200" rx="50" ry="50"/></svg>'),
density: 96,
blend: 'dest-in',
cutout: true
}])
.png()
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(300, info.width);
assert.strictEqual(300, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('composite-cutout.png'), data, done);
});
});
describe('numeric gravity', () => {
Object.keys(sharp.gravity).forEach(gravity => {
it(gravity, done => {
sharp(fixtures.inputJpg)
.resize(80)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
gravity: gravity
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected(`overlay-gravity-${gravity}.jpg`), data, done);
});
});
});
});
describe('string gravity', () => {
Object.keys(sharp.gravity).forEach(gravity => {
it(gravity, done => {
const expected = fixtures.expected('overlay-gravity-' + gravity + '.jpg');
sharp(fixtures.inputJpg)
.resize(80)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
gravity: sharp.gravity[gravity]
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(expected, data, done);
});
});
});
});
describe('tile and gravity', () => {
Object.keys(sharp.gravity).forEach(gravity => {
it(gravity, done => {
const expected = fixtures.expected('overlay-tile-gravity-' + gravity + '.jpg');
sharp(fixtures.inputJpg)
.resize(80)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
tile: true,
gravity: gravity
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(expected, data, done);
});
});
});
});
describe('validation', () => {
it('missing images', () => {
assert.throws(() => {
sharp().composite();
}, /Expected array for images to composite but received undefined of type undefined/);
});
it('invalid images', () => {
assert.throws(() => {
sharp().composite(['invalid']);
}, /Expected object for image to composite but received invalid of type string/);
});
it('missing input', () => {
assert.throws(() => {
sharp().composite([{}]);
}, /Unsupported input/);
});
it('invalid blend', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', blend: 'invalid' }]);
}, /Expected valid blend name for blend but received invalid of type string/);
});
it('invalid tile', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', tile: 'invalid' }]);
}, /Expected boolean for tile but received invalid of type string/);
});
it('invalid left', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', left: 0.5 }]);
}, /Expected positive integer for left but received 0.5 of type number/);
});
it('invalid top', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', top: -1 }]);
}, /Expected positive integer for top but received -1 of type number/);
});
it('left but no top', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', left: 1 }]);
}, /Expected both left and top to be set/);
});
it('top but no left', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', top: 1 }]);
}, /Expected both left and top to be set/);
});
it('invalid gravity', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', gravity: 'invalid' }]);
}, /Expected valid gravity for gravity but received invalid of type string/);
});
});
});

View File

@@ -1,73 +0,0 @@
'use strict';
const assert = require('assert');
const fixtures = require('../fixtures');
const sharp = require('../../');
describe('Deprecated background', function () {
it('Flatten to RGB orange', function (done) {
sharp(fixtures.inputPngWithTransparency)
.flatten()
.background({ r: 255, g: 102, b: 0 })
.resize(400, 300)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(400, info.width);
assert.strictEqual(300, info.height);
fixtures.assertSimilar(fixtures.expected('flatten-orange.jpg'), data, done);
});
});
it('Flatten to CSS/hex orange', function (done) {
sharp(fixtures.inputPngWithTransparency)
.flatten()
.background('#ff6600')
.resize(400, 300)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(400, info.width);
assert.strictEqual(300, info.height);
fixtures.assertSimilar(fixtures.expected('flatten-orange.jpg'), data, done);
});
});
it('Flatten 16-bit PNG with transparency to orange', function (done) {
const output = fixtures.path('output.flatten-rgb16-orange.jpg');
sharp(fixtures.inputPngWithTransparency16bit)
.flatten()
.background({ r: 255, g: 102, b: 0 })
.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual(32, info.width);
assert.strictEqual(32, info.height);
fixtures.assertMaxColourDistance(output, fixtures.expected('flatten-rgb16-orange.jpg'), 25);
done();
});
});
it('Ignored for JPEG', function (done) {
sharp(fixtures.inputJpg)
.background('#ff0000')
.flatten()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
done();
});
});
it('extend all sides equally with RGB', function (done) {
sharp(fixtures.inputJpg)
.resize(120)
.background({ r: 255, g: 0, b: 0 })
.extend(10)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(140, info.width);
assert.strictEqual(118, info.height);
fixtures.assertSimilar(fixtures.expected('extend-equal.jpg'), data, done);
});
});
});

View File

@@ -1,279 +0,0 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Deprecated crop', function () {
[
{
name: 'North',
width: 320,
height: 80,
gravity: sharp.gravity.north,
fixture: 'gravity-north.jpg'
},
{
name: 'East',
width: 80,
height: 320,
gravity: sharp.gravity.east,
fixture: 'gravity-east.jpg'
},
{
name: 'South',
width: 320,
height: 80,
gravity: sharp.gravity.south,
fixture: 'gravity-south.jpg'
},
{
name: 'West',
width: 80,
height: 320,
gravity: sharp.gravity.west,
fixture: 'gravity-west.jpg'
},
{
name: 'Center',
width: 320,
height: 80,
gravity: sharp.gravity.center,
fixture: 'gravity-center.jpg'
},
{
name: 'Centre',
width: 80,
height: 320,
gravity: sharp.gravity.centre,
fixture: 'gravity-centre.jpg'
},
{
name: 'Default (centre)',
width: 80,
height: 320,
gravity: undefined,
fixture: 'gravity-centre.jpg'
},
{
name: 'Northeast',
width: 320,
height: 80,
gravity: sharp.gravity.northeast,
fixture: 'gravity-north.jpg'
},
{
name: 'Northeast',
width: 80,
height: 320,
gravity: sharp.gravity.northeast,
fixture: 'gravity-east.jpg'
},
{
name: 'Southeast',
width: 320,
height: 80,
gravity: sharp.gravity.southeast,
fixture: 'gravity-south.jpg'
},
{
name: 'Southeast',
width: 80,
height: 320,
gravity: sharp.gravity.southeast,
fixture: 'gravity-east.jpg'
},
{
name: 'Southwest',
width: 320,
height: 80,
gravity: sharp.gravity.southwest,
fixture: 'gravity-south.jpg'
},
{
name: 'Southwest',
width: 80,
height: 320,
gravity: sharp.gravity.southwest,
fixture: 'gravity-west.jpg'
},
{
name: 'Northwest',
width: 320,
height: 80,
gravity: sharp.gravity.northwest,
fixture: 'gravity-north.jpg'
},
{
name: 'Northwest',
width: 80,
height: 320,
gravity: sharp.gravity.northwest,
fixture: 'gravity-west.jpg'
}
].forEach(function (settings) {
it(settings.name + ' gravity', function (done) {
sharp(fixtures.inputJpg)
.resize(settings.width, settings.height)
.crop(settings.gravity)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(settings.width, info.width);
assert.strictEqual(settings.height, info.height);
fixtures.assertSimilar(fixtures.expected(settings.fixture), data, done);
});
});
});
it('Allows specifying the gravity as a string', function (done) {
sharp(fixtures.inputJpg)
.resize(80, 320)
.crop('east')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
fixtures.assertSimilar(fixtures.expected('gravity-east.jpg'), data, done);
});
});
it('Invalid values fail', function () {
assert.throws(function () {
sharp().crop(9);
}, /Expected valid crop id\/name\/strategy for crop but received 9 of type number/);
assert.throws(function () {
sharp().crop(1.1);
}, /Expected valid crop id\/name\/strategy for crop but received 1.1 of type number/);
assert.throws(function () {
sharp().crop(-1);
}, /Expected valid crop id\/name\/strategy for crop but received -1 of type number/);
assert.throws(function () {
sharp().crop('zoinks');
}, /Expected valid crop id\/name\/strategy for crop but received zoinks of type string/);
});
it('Uses default value when none specified', function () {
assert.doesNotThrow(function () {
sharp().crop();
});
});
it('Skip crop when post-resize dimensions are at target', function () {
return sharp(fixtures.inputJpg)
.resize(1600, 1200)
.toBuffer()
.then(function (input) {
return sharp(input)
.resize(1110)
.crop(sharp.strategy.attention)
.toBuffer({ resolveWithObject: true })
.then(function (result) {
assert.strictEqual(1110, result.info.width);
assert.strictEqual(832, result.info.height);
assert.strictEqual(undefined, result.info.cropOffsetLeft);
assert.strictEqual(undefined, result.info.cropOffsetTop);
});
});
});
describe('Entropy-based strategy', function () {
it('JPEG', function (done) {
sharp(fixtures.inputJpg)
.resize(80, 320)
.crop(sharp.strategy.entropy)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
assert.strictEqual(-117, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy-entropy.jpg'), data, done);
});
});
it('PNG', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80)
.crop(sharp.strategy.entropy)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(-80, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
it('supports the strategy passed as a string', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80)
.crop('entropy')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(-80, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
});
describe('Attention strategy', function () {
it('JPEG', function (done) {
sharp(fixtures.inputJpg)
.resize(80, 320)
.crop(sharp.strategy.attention)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
assert.strictEqual(-143, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy-attention.jpg'), data, done);
});
});
it('PNG', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80)
.crop(sharp.strategy.attention)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
it('supports the strategy passed as a string', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80)
.crop('attention')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
});
});

View File

@@ -1,440 +0,0 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Deprecated embed', function () {
it('Allows specifying the gravity as a string', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.embed('center')
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
fixtures.assertSimilar(fixtures.expected('embed-3-into-3.png'), data, done);
});
});
it('JPEG within PNG, no alpha channel', function (done) {
sharp(fixtures.inputJpg)
.embed()
.resize(320, 240)
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-3-into-3.png'), data, done);
});
});
it('JPEG within WebP, to include alpha channel', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed()
.webp()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-3-into-4.webp'), data, done);
});
});
it('PNG with alpha channel', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(50, 50)
.embed()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(50, info.width);
assert.strictEqual(50, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-4-into-4.png'), data, done);
});
});
it('16-bit PNG with alpha channel', function (done) {
sharp(fixtures.inputPngWithTransparency16bit)
.resize(32, 16)
.embed()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-16bit.png'), data, done);
});
});
it('16-bit PNG with alpha channel onto RGBA', function (done) {
sharp(fixtures.inputPngWithTransparency16bit)
.resize(32, 16)
.embed()
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-16bit-rgba.png'), data, done);
});
});
it('PNG with 2 channels', function (done) {
sharp(fixtures.inputPngWithGreyAlpha)
.resize(32, 16)
.embed()
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-2channel.png'), data, done);
});
});
it('Enlarge and embed', function (done) {
sharp(fixtures.inputPngWithOneColor)
.embed()
.resize(320, 240)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-enlarge.png'), data, done);
});
});
it('Embed invalid param values should fail', function () {
assert.throws(function () {
sharp().embed(-1);
});
assert.throws(function () {
sharp().embed(8.1);
});
assert.throws(function () {
sharp().embed(9);
});
assert.throws(function () {
sharp().embed(1000000);
});
assert.throws(function () {
sharp().embed(false);
});
assert.throws(function () {
sharp().embed('vallejo');
});
});
it('Embed gravity horizontal northwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.northwest)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a1-nw.png'), data, done);
});
});
it('Embed gravity horizontal north', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.north)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a2-n.png'), data, done);
});
});
it('Embed gravity horizontal northeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.northeast)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a3-ne.png'), data, done);
});
});
it('Embed gravity horizontal east', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.east)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a4-e.png'), data, done);
});
});
it('Embed gravity horizontal southeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.southeast)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a5-se.png'), data, done);
});
});
it('Embed gravity horizontal south', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.south)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a6-s.png'), data, done);
});
});
it('Embed gravity horizontal southwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.southwest)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a7-sw.png'), data, done);
});
});
it('Embed gravity horizontal west', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.west)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a8-w.png'), data, done);
});
});
it('Embed gravity horizontal center', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.center)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a9-c.png'), data, done);
});
});
it('Embed gravity vertical northwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.northwest)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/1-nw.png'), data, done);
});
});
it('Embed gravity vertical north', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.north)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/2-n.png'), data, done);
});
});
it('Embed gravity vertical northeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.northeast)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/3-ne.png'), data, done);
});
});
it('Embed gravity vertical east', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.east)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/4-e.png'), data, done);
});
});
it('Embed gravity vertical southeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.southeast)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/5-se.png'), data, done);
});
});
it('Embed gravity vertical south', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.south)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/6-s.png'), data, done);
});
});
it('Embed gravity vertical southwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.southwest)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/7-sw.png'), data, done);
});
});
it('Embed gravity vertical west', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.west)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/8-w.png'), data, done);
});
});
it('Embed gravity vertical center', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({ r: 0, g: 0, b: 0, alpha: 0 })
.embed(sharp.gravity.center)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/9-c.png'), data, done);
});
});
});

View File

@@ -1,261 +0,0 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Deprecated resize-related functions', function () {
it('Max width or height considering ratio (portrait)', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 320)
.max()
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(243, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Min width or height considering ratio (portrait)', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 320)
.min()
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(422, info.height);
done();
});
});
it('Max width or height considering ratio (landscape)', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.max()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Provide only one dimension with max, should default to crop', function (done) {
sharp(fixtures.inputJpg)
.resize(320)
.max()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Min width or height considering ratio (landscape)', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.min()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(392, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Provide only one dimension with min, should default to crop', function (done) {
sharp(fixtures.inputJpg)
.resize(320)
.min()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Do not enlarge when input width is already less than output width', function (done) {
sharp(fixtures.inputJpg)
.resize(2800)
.withoutEnlargement()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Do not enlarge when input height is already less than output height', function (done) {
sharp(fixtures.inputJpg)
.resize(null, 2300)
.withoutEnlargement()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Do enlarge when input width is less than output width', function (done) {
sharp(fixtures.inputJpg)
.resize(2800)
.withoutEnlargement(false)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2800, info.width);
assert.strictEqual(2286, info.height);
done();
});
});
it('Downscale width and height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Downscale width, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Downscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(null, 320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Upscale width and height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(3000, 3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3000, info.width);
assert.strictEqual(3000, info.height);
done();
});
});
it('Upscale width, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3000, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Upscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(null, 3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(3000, info.height);
done();
});
});
it('Downscale width, upscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(3000, info.height);
done();
});
});
it('Upscale width, downscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(3000, 320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3000, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Identity transform, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
});

View File

@@ -6,10 +6,9 @@ const sharp = require('../../');
const fixtures = require('../fixtures');
describe('failOnError', function () {
it('handles truncated JPEG by default', function (done) {
sharp(fixtures.inputJpgTruncated)
it('handles truncated JPEG', function (done) {
sharp(fixtures.inputJpgTruncated, { failOnError: false })
.resize(320, 240)
// .toFile(fixtures.expected('truncated.jpg'), done);
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
@@ -19,10 +18,9 @@ describe('failOnError', function () {
});
});
it('handles truncated PNG by default', function (done) {
sharp(fixtures.inputPngTruncated)
it('handles truncated PNG', function (done) {
sharp(fixtures.inputPngTruncated, { failOnError: false })
.resize(320, 240)
// .toFile(fixtures.expected('truncated.png'), done);
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
@@ -46,8 +44,8 @@ describe('failOnError', function () {
});
});
it('returns errors to callback for truncated JPEG when failOnError is set', function (done) {
sharp(fixtures.inputJpgTruncated, { failOnError: true }).toBuffer(function (err, data, info) {
it('returns errors to callback for truncated JPEG', function (done) {
sharp(fixtures.inputJpgTruncated).toBuffer(function (err, data, info) {
assert.ok(err.message.includes('VipsJpeg: Premature end of JPEG file'), err);
assert.strictEqual(data, null);
assert.strictEqual(info, null);
@@ -55,8 +53,8 @@ describe('failOnError', function () {
});
});
it('returns errors to callback for truncated PNG when failOnError is set', function (done) {
sharp(fixtures.inputPngTruncated, { failOnError: true }).toBuffer(function (err, data, info) {
it('returns errors to callback for truncated PNG', function (done) {
sharp(fixtures.inputPngTruncated).toBuffer(function (err, data, info) {
assert.ok(err.message.includes('vipspng: libpng read error'), err);
assert.strictEqual(data, null);
assert.strictEqual(info, null);
@@ -64,8 +62,8 @@ describe('failOnError', function () {
});
});
it('rejects promises for truncated JPEG when failOnError is set', function (done) {
sharp(fixtures.inputJpgTruncated, { failOnError: true })
it('rejects promises for truncated JPEG', function (done) {
sharp(fixtures.inputJpgTruncated)
.toBuffer()
.then(() => {
throw new Error('Expected rejection');

64
test/unit/gif.js Normal file
View File

@@ -0,0 +1,64 @@
'use strict';
const fs = require('fs');
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('GIF input', () => {
it('GIF Buffer to JPEG Buffer', () =>
sharp(fs.readFileSync(fixtures.inputGif))
.resize(8, 4)
.jpeg()
.toBuffer({ resolveWithObject: true })
.then(({ data, info }) => {
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(8, info.width);
assert.strictEqual(4, info.height);
})
);
it('2 channel GIF file to PNG Buffer', () =>
sharp(fixtures.inputGifGreyPlusAlpha)
.resize(8, 4)
.png()
.toBuffer({ resolveWithObject: true })
.then(({ data, info }) => {
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('png', info.format);
assert.strictEqual(8, info.width);
assert.strictEqual(4, info.height);
assert.strictEqual(4, info.channels);
})
);
it('Animated GIF first page to PNG', () =>
sharp(fixtures.inputGifAnimated)
.toBuffer({ resolveWithObject: true })
.then(({ data, info }) => {
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('png', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(4, info.channels);
})
);
it('Animated GIF all pages to PNG "toilet roll"', () =>
sharp(fixtures.inputGifAnimated, { pages: -1 })
.toBuffer({ resolveWithObject: true })
.then(({ data, info }) => {
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('png', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(2400, info.height);
assert.strictEqual(4, info.channels);
})
);
});

File diff suppressed because it is too large Load Diff

262
test/unit/jpeg.js Normal file
View File

@@ -0,0 +1,262 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('JPEG', function () {
it('JPEG quality', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ quality: 70 })
.toBuffer(function (err, buffer70) {
if (err) throw err;
sharp(fixtures.inputJpg)
.resize(320, 240)
.toBuffer(function (err, buffer80) {
if (err) throw err;
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ quality: 90 })
.toBuffer(function (err, buffer90) {
if (err) throw err;
assert(buffer70.length < buffer80.length);
assert(buffer80.length < buffer90.length);
done();
});
});
});
});
describe('Invalid JPEG quality', function () {
[-1, 88.2, 'test'].forEach(function (quality) {
it(quality.toString(), function () {
assert.throws(function () {
sharp().jpeg({ quality: quality });
});
});
});
});
describe('Invalid JPEG quantisation table', function () {
[-1, 88.2, 'test'].forEach(function (table) {
it(table.toString(), function () {
assert.throws(function () {
sharp().jpeg({ quantisationTable: table });
});
});
});
});
it('Progressive JPEG image', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ progressive: false })
.toBuffer(function (err, nonProgressiveData, nonProgressiveInfo) {
if (err) throw err;
assert.strictEqual(true, nonProgressiveData.length > 0);
assert.strictEqual(nonProgressiveData.length, nonProgressiveInfo.size);
assert.strictEqual('jpeg', nonProgressiveInfo.format);
assert.strictEqual(320, nonProgressiveInfo.width);
assert.strictEqual(240, nonProgressiveInfo.height);
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ progressive: true })
.toBuffer(function (err, progressiveData, progressiveInfo) {
if (err) throw err;
assert.strictEqual(true, progressiveData.length > 0);
assert.strictEqual(progressiveData.length, progressiveInfo.size);
assert.strictEqual(false, progressiveData.length === nonProgressiveData.length);
assert.strictEqual('jpeg', progressiveInfo.format);
assert.strictEqual(320, progressiveInfo.width);
assert.strictEqual(240, progressiveInfo.height);
done();
});
});
});
it('Without chroma subsampling generates larger file', function (done) {
// First generate with chroma subsampling (default)
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ chromaSubsampling: '4:2:0' })
.toBuffer(function (err, withChromaSubsamplingData, withChromaSubsamplingInfo) {
if (err) throw err;
assert.strictEqual(true, withChromaSubsamplingData.length > 0);
assert.strictEqual(withChromaSubsamplingData.length, withChromaSubsamplingInfo.size);
assert.strictEqual('jpeg', withChromaSubsamplingInfo.format);
assert.strictEqual(320, withChromaSubsamplingInfo.width);
assert.strictEqual(240, withChromaSubsamplingInfo.height);
// Then generate without
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ chromaSubsampling: '4:4:4' })
.toBuffer(function (err, withoutChromaSubsamplingData, withoutChromaSubsamplingInfo) {
if (err) throw err;
assert.strictEqual(true, withoutChromaSubsamplingData.length > 0);
assert.strictEqual(withoutChromaSubsamplingData.length, withoutChromaSubsamplingInfo.size);
assert.strictEqual('jpeg', withoutChromaSubsamplingInfo.format);
assert.strictEqual(320, withoutChromaSubsamplingInfo.width);
assert.strictEqual(240, withoutChromaSubsamplingInfo.height);
assert.strictEqual(true, withChromaSubsamplingData.length < withoutChromaSubsamplingData.length);
done();
});
});
});
it('Invalid JPEG chromaSubsampling value throws error', function () {
assert.throws(function () {
sharp().jpeg({ chromaSubsampling: '4:2:2' });
});
});
it('Trellis quantisation', function (done) {
// First generate without
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ trellisQuantisation: false })
.toBuffer(function (err, withoutData, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withoutData.length > 0);
assert.strictEqual(withoutData.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Then generate with
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ trellisQuantization: true })
.toBuffer(function (err, withData, withInfo) {
if (err) throw err;
assert.strictEqual(true, withData.length > 0);
assert.strictEqual(withData.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
// Verify image is same (as mozjpeg may not be present) size or less
assert.strictEqual(true, withData.length <= withoutData.length);
done();
});
});
});
it('Overshoot deringing', function (done) {
// First generate without
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ overshootDeringing: false })
.toBuffer(function (err, withoutData, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withoutData.length > 0);
assert.strictEqual(withoutData.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Then generate with
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ overshootDeringing: true })
.toBuffer(function (err, withData, withInfo) {
if (err) throw err;
assert.strictEqual(true, withData.length > 0);
assert.strictEqual(withData.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
done();
});
});
});
it('Optimise scans generates different output length', function (done) {
// First generate without
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimiseScans: false })
.toBuffer(function (err, withoutData, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withoutData.length > 0);
assert.strictEqual(withoutData.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Then generate with
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimizeScans: true })
.toBuffer(function (err, withData, withInfo) {
if (err) throw err;
assert.strictEqual(true, withData.length > 0);
assert.strictEqual(withData.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
// Verify image is of a different size (progressive output even without mozjpeg)
assert.notStrictEqual(withData.length, withoutData.length);
done();
});
});
});
it('Optimise coding generates smaller output length', function (done) {
// First generate with optimize coding enabled (default)
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg()
.toBuffer(function (err, withOptimiseCoding, withInfo) {
if (err) throw err;
assert.strictEqual(true, withOptimiseCoding.length > 0);
assert.strictEqual(withOptimiseCoding.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
// Then generate with coding disabled
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimizeCoding: false })
.toBuffer(function (err, withoutOptimiseCoding, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withoutOptimiseCoding.length > 0);
assert.strictEqual(withoutOptimiseCoding.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Verify optimised image is of a smaller size
assert.strictEqual(true, withOptimiseCoding.length < withoutOptimiseCoding.length);
done();
});
});
});
it('Specifying quantisation table provides different JPEG', function (done) {
// First generate with default quantisation table
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimiseCoding: false })
.toBuffer(function (err, withDefaultQuantisationTable, withInfo) {
if (err) throw err;
assert.strictEqual(true, withDefaultQuantisationTable.length > 0);
assert.strictEqual(withDefaultQuantisationTable.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
// Then generate with different quantisation table
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimiseCoding: false, quantisationTable: 3 })
.toBuffer(function (err, withQuantTable3, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withQuantTable3.length > 0);
assert.strictEqual(withQuantTable3.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Verify image is same (as mozjpeg may not be present) size or less
assert.strictEqual(true, withQuantTable3.length <= withDefaultQuantisationTable.length);
done();
});
});
});
});

View File

@@ -103,6 +103,29 @@ describe('Image metadata', function () {
});
});
it('Multipage TIFF', function (done) {
sharp(fixtures.inputTiffMultipage).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual('tiff', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(2464, metadata.width);
assert.strictEqual(3248, metadata.height);
assert.strictEqual('b-w', metadata.space);
assert.strictEqual(1, metadata.channels);
assert.strictEqual('uchar', metadata.depth);
assert.strictEqual(300, metadata.density);
assert.strictEqual('undefined', typeof metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(2, metadata.pages);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual(1, metadata.orientation);
assert.strictEqual('undefined', typeof metadata.exif);
assert.strictEqual('undefined', typeof metadata.icc);
done();
});
});
it('PNG', function (done) {
sharp(fixtures.inputPng).metadata(function (err, metadata) {
if (err) throw err;
@@ -434,6 +457,7 @@ describe('Image metadata', function () {
sharp(fixtures.inputJpgWithCorruptHeader)
.metadata(function (err) {
assert.strictEqual(true, !!err);
assert.strictEqual(true, /Input file has corrupt header: VipsJpeg: Premature end of JPEG file/.test(err.message));
done();
});
});
@@ -442,6 +466,16 @@ describe('Image metadata', function () {
sharp(fs.readFileSync(fixtures.inputJpgWithCorruptHeader))
.metadata(function (err) {
assert.strictEqual(true, !!err);
assert.strictEqual(true, /Input buffer has corrupt header: VipsJpeg: Premature end of JPEG file/.test(err.message));
done();
});
});
it('Unsupported lossless JPEG passes underlying error message', function (done) {
sharp(fixtures.inputJpgLossless)
.metadata(function (err) {
assert.strictEqual(true, !!err);
assert.strictEqual(true, /Input file has corrupt header: VipsJpeg: Unsupported JPEG process: SOF type 0xc3/.test(err.message));
done();
});
});

125
test/unit/modulate.js Normal file
View File

@@ -0,0 +1,125 @@
'use strict';
const sharp = require('../../');
const assert = require('assert');
const fixtures = require('../fixtures');
describe('Modulate', function () {
describe('Invalid options', function () {
[
null,
undefined,
10,
{ brightness: -1 },
{ brightness: '50%' },
{ brightness: null },
{ saturation: -1 },
{ saturation: '50%' },
{ saturation: null },
{ hue: '50deg' },
{ hue: 1.5 },
{ hue: null }
].forEach(function (options) {
it('should throw', function () {
assert.throws(function () {
sharp(fixtures.inputJpg).modulate(options);
});
});
});
});
it('should be able to hue-rotate', function () {
const base = 'modulate-hue-120.jpg';
const actual = fixtures.path('output.' + base);
const expected = fixtures.expected(base);
return sharp(fixtures.inputJpg)
.modulate({ hue: 120 })
.toFile(actual)
.then(function () {
fixtures.assertMaxColourDistance(actual, expected, 25);
});
});
it('should be able to brighten', function () {
const base = 'modulate-brightness-2.jpg';
const actual = fixtures.path('output.' + base);
const expected = fixtures.expected(base);
return sharp(fixtures.inputJpg)
.modulate({ brightness: 2 })
.toFile(actual)
.then(function () {
fixtures.assertMaxColourDistance(actual, expected, 25);
});
});
it('should be able to unbrighten', function () {
const base = 'modulate-brightness-0-5.jpg';
const actual = fixtures.path('output.' + base);
const expected = fixtures.expected(base);
return sharp(fixtures.inputJpg)
.modulate({ brightness: 0.5 })
.toFile(actual)
.then(function () {
fixtures.assertMaxColourDistance(actual, expected, 25);
});
});
it('should be able to saturate', function () {
const base = 'modulate-saturation-2.jpg';
const actual = fixtures.path('output.' + base);
const expected = fixtures.expected(base);
return sharp(fixtures.inputJpg)
.modulate({ saturation: 2 })
.toFile(actual)
.then(function () {
fixtures.assertMaxColourDistance(actual, expected, 30);
});
});
it('should be able to desaturate', function () {
const base = 'modulate-saturation-0.5.jpg';
const actual = fixtures.path('output.' + base);
const expected = fixtures.expected(base);
return sharp(fixtures.inputJpg)
.modulate({ saturation: 0.5 })
.toFile(actual)
.then(function () {
fixtures.assertMaxColourDistance(actual, expected, 25);
});
});
it('should be able to modulate all channels', function () {
const base = 'modulate-all.jpg';
const actual = fixtures.path('output.' + base);
const expected = fixtures.expected(base);
return sharp(fixtures.inputJpg)
.modulate({ brightness: 2, saturation: 0.5, hue: 180 })
.toFile(actual)
.then(function () {
fixtures.assertMaxColourDistance(actual, expected, 25);
});
});
describe('hue-rotate', function (done) {
[30, 60, 90, 120, 150, 180, 210, 240, 270, 300, 330, 360].forEach(function (angle) {
it('should properly hue rotate by ' + angle + 'deg', function () {
const base = 'modulate-hue-angle-' + angle + '.png';
const actual = fixtures.path('output.' + base);
const expected = fixtures.expected(base);
return sharp(fixtures.testPattern)
.modulate({ hue: angle })
.toFile(actual)
.then(function () {
fixtures.assertMaxColourDistance(actual, expected, 25);
});
});
});
});
});

View File

@@ -140,37 +140,35 @@ describe('Overlays', function () {
});
});
if (sharp.format.webp.input.file) {
it('Composite WebP onto JPEG', function (done) {
const paths = getPaths('overlay-jpeg-with-webp', 'jpg');
it('Composite WebP onto JPEG', function (done) {
const paths = getPaths('overlay-jpeg-with-webp', 'jpg');
sharp(fixtures.inputJpg)
.resize(300, 300)
.overlayWith(fixtures.inputWebPWithTransparency)
.toFile(paths.actual, function (error, info) {
if (error) return done(error);
fixtures.assertMaxColourDistance(paths.actual, paths.expected, 102);
done();
});
});
}
it('Composite JPEG onto PNG, no premultiply', function (done) {
sharp(fixtures.inputPngOverlayLayer1)
.overlayWith(fixtures.inputJpgWithLandscapeExif1)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(false, info.premultiplied);
sharp(fixtures.inputJpg)
.resize(300, 300)
.overlayWith(fixtures.inputWebPWithTransparency)
.toFile(paths.actual, function (error, info) {
if (error) return done(error);
fixtures.assertMaxColourDistance(paths.actual, paths.expected, 102);
done();
});
});
it('Composite opaque JPEG onto JPEG, no premultiply', function (done) {
it('Composite JPEG onto PNG, ensure premultiply', function (done) {
sharp(fixtures.inputPngOverlayLayer1)
.overlayWith(fixtures.inputJpgWithLandscapeExif1)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, info.premultiplied);
done();
});
});
it('Composite opaque JPEG onto JPEG, ensure premultiply', function (done) {
sharp(fixtures.inputJpg)
.overlayWith(fixtures.inputJpgWithLandscapeExif1)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(false, info.premultiplied);
assert.strictEqual(true, info.premultiplied);
done();
});
});
@@ -409,12 +407,6 @@ describe('Overlays', function () {
});
});
it('Overlay with invalid cutout option', function () {
assert.throws(function () {
sharp().overlayWith('ignore', { cutout: 1 });
});
});
it('Overlay with invalid tile option', function () {
assert.throws(function () {
sharp().overlayWith('ignore', { tile: 1 });
@@ -580,18 +572,17 @@ describe('Overlays', function () {
});
});
it('Composite JPEG onto JPEG, no premultiply', function (done) {
it('Composite JPEG onto JPEG', function (done) {
sharp(fixtures.inputJpg)
.resize(480, 320)
.overlayWith(fixtures.inputJpgBooleanTest)
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(480, info.width);
assert.strictEqual(320, info.height);
assert.strictEqual(3, info.channels);
assert.strictEqual(false, info.premultiplied);
assert.strictEqual(true, info.premultiplied);
fixtures.assertSimilar(fixtures.expected('overlay-jpeg-with-jpeg.jpg'), data, done);
});
});

View File

@@ -16,16 +16,24 @@ describe('Platform-detection', function () {
delete process.env.npm_config_platform;
});
it('Can override ARM version via npm_config_armv', function () {
it('Can override ARM version via --arm-version', function () {
process.env.npm_config_arch = 'arm';
process.env.npm_config_armv = 'test';
process.env.npm_config_arm_version = 'test';
assert.strictEqual('armvtest', platform().split('-')[1]);
delete process.env.npm_config_armv;
delete process.env.npm_config_arm_version;
delete process.env.npm_config_arch;
});
it('Can override ARM64 version via --arm-version', function () {
process.env.npm_config_arch = 'arm64';
process.env.npm_config_arm_version = 'test';
assert.strictEqual('arm64vtest', platform().split('-')[1]);
delete process.env.npm_config_arm_version;
delete process.env.npm_config_arch;
});
it('Can detect ARM version via process.config', function () {
process.env.npm_config_arch = 'armhf';
process.env.npm_config_arch = 'arm';
const armVersion = process.config.variables.arm_version;
process.config.variables.arm_version = 'test';
assert.strictEqual('armvtest', platform().split('-')[1]);
@@ -41,7 +49,7 @@ describe('Platform-detection', function () {
it('Defaults to ARMv8 for 64-bit', function () {
process.env.npm_config_arch = 'arm64';
assert.strictEqual('armv8', platform().split('-')[1]);
assert.strictEqual('arm64v8', platform().split('-')[1]);
delete process.env.npm_config_arch;
});
});

171
test/unit/png.js Normal file
View File

@@ -0,0 +1,171 @@
'use strict';
const fs = require('fs');
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('PNG', function () {
it('compression level is valid', function () {
assert.doesNotThrow(function () {
sharp().png({ compressionLevel: 0 });
});
});
it('compression level is invalid', function () {
assert.throws(function () {
sharp().png({ compressionLevel: -1 });
});
});
it('default compressionLevel generates smaller file than compressionLevel=6', function (done) {
// First generate with default compressionLevel
sharp(fixtures.inputPng)
.resize(320, 240)
.png()
.toBuffer(function (err, defaultData, defaultInfo) {
if (err) throw err;
assert.strictEqual(true, defaultData.length > 0);
assert.strictEqual('png', defaultInfo.format);
// Then generate with compressionLevel=6
sharp(fixtures.inputPng)
.resize(320, 240)
.png({ compressionLevel: 6 })
.toBuffer(function (err, largerData, largerInfo) {
if (err) throw err;
assert.strictEqual(true, largerData.length > 0);
assert.strictEqual('png', largerInfo.format);
assert.strictEqual(true, defaultData.length < largerData.length);
done();
});
});
});
it('without adaptiveFiltering generates smaller file', function (done) {
// First generate with adaptive filtering
sharp(fixtures.inputPng)
.resize(320, 240)
.png({ adaptiveFiltering: true })
.toBuffer(function (err, adaptiveData, adaptiveInfo) {
if (err) throw err;
assert.strictEqual(true, adaptiveData.length > 0);
assert.strictEqual(adaptiveData.length, adaptiveInfo.size);
assert.strictEqual('png', adaptiveInfo.format);
assert.strictEqual(320, adaptiveInfo.width);
assert.strictEqual(240, adaptiveInfo.height);
// Then generate without
sharp(fixtures.inputPng)
.resize(320, 240)
.png({ adaptiveFiltering: false })
.toBuffer(function (err, withoutAdaptiveData, withoutAdaptiveInfo) {
if (err) throw err;
assert.strictEqual(true, withoutAdaptiveData.length > 0);
assert.strictEqual(withoutAdaptiveData.length, withoutAdaptiveInfo.size);
assert.strictEqual('png', withoutAdaptiveInfo.format);
assert.strictEqual(320, withoutAdaptiveInfo.width);
assert.strictEqual(240, withoutAdaptiveInfo.height);
assert.strictEqual(true, withoutAdaptiveData.length < adaptiveData.length);
done();
});
});
});
it('Invalid PNG adaptiveFiltering value throws error', function () {
assert.throws(function () {
sharp().png({ adaptiveFiltering: 1 });
});
});
it('Progressive PNG image', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.png({ progressive: false })
.toBuffer(function (err, nonProgressiveData, nonProgressiveInfo) {
if (err) throw err;
assert.strictEqual(true, nonProgressiveData.length > 0);
assert.strictEqual(nonProgressiveData.length, nonProgressiveInfo.size);
assert.strictEqual('png', nonProgressiveInfo.format);
assert.strictEqual(320, nonProgressiveInfo.width);
assert.strictEqual(240, nonProgressiveInfo.height);
sharp(nonProgressiveData)
.png({ progressive: true })
.toBuffer(function (err, progressiveData, progressiveInfo) {
if (err) throw err;
assert.strictEqual(true, progressiveData.length > 0);
assert.strictEqual(progressiveData.length, progressiveInfo.size);
assert.strictEqual(true, progressiveData.length > nonProgressiveData.length);
assert.strictEqual('png', progressiveInfo.format);
assert.strictEqual(320, progressiveInfo.width);
assert.strictEqual(240, progressiveInfo.height);
done();
});
});
});
it('Valid PNG libimagequant palette value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().png({ palette: false });
});
});
it('Invalid PNG libimagequant palette value throws error', function () {
assert.throws(function () {
sharp().png({ palette: 'fail' });
});
});
it('Valid PNG libimagequant quality value produces image of same size or smaller', function () {
const inputPngBuffer = fs.readFileSync(fixtures.inputPng);
return Promise.all([
sharp(inputPngBuffer).resize(10).png({ palette: true, quality: 80 }).toBuffer(),
sharp(inputPngBuffer).resize(10).png({ palette: true, quality: 100 }).toBuffer()
]).then(function (data) {
assert.strictEqual(true, data[0].length <= data[1].length);
});
});
it('Invalid PNG libimagequant quality value throws error', function () {
assert.throws(function () {
sharp().png({ palette: true, quality: 101 });
});
});
it('Valid PNG libimagequant colours value produces image of same size or smaller', function () {
const inputPngBuffer = fs.readFileSync(fixtures.inputPng);
return Promise.all([
sharp(inputPngBuffer).resize(10).png({ palette: true, colours: 100 }).toBuffer(),
sharp(inputPngBuffer).resize(10).png({ palette: true, colours: 200 }).toBuffer()
]).then(function (data) {
assert.strictEqual(true, data[0].length <= data[1].length);
});
});
it('Invalid PNG libimagequant colours value throws error', function () {
assert.throws(function () {
sharp().png({ palette: true, colours: -1 });
});
});
it('Invalid PNG libimagequant colors value throws error', function () {
assert.throws(function () {
sharp().png({ palette: true, colors: 0.1 });
});
});
it('Valid PNG libimagequant dither value produces image of same size or smaller', function () {
const inputPngBuffer = fs.readFileSync(fixtures.inputPng);
return Promise.all([
sharp(inputPngBuffer).resize(10).png({ palette: true, dither: 0.1 }).toBuffer(),
sharp(inputPngBuffer).resize(10).png({ palette: true, dither: 0.9 }).toBuffer()
]).then(function (data) {
assert.strictEqual(true, data[0].length <= data[1].length);
});
});
it('Invalid PNG libimagequant dither value throws error', function () {
assert.throws(function () {
sharp().png({ palette: true, dither: 'fail' });
});
});
});

170
test/unit/raw.js Normal file
View File

@@ -0,0 +1,170 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Raw pixel data', function () {
describe('Raw pixel input', function () {
it('Missing options', function () {
assert.throws(function () {
sharp({ raw: {} });
});
});
it('Incomplete options', function () {
assert.throws(function () {
sharp({ raw: { width: 1, height: 1 } });
});
});
it('Invalid channels', function () {
assert.throws(function () {
sharp({ raw: { width: 1, height: 1, channels: 5 } });
});
});
it('Invalid height', function () {
assert.throws(function () {
sharp({ raw: { width: 1, height: 0, channels: 4 } });
});
});
it('Invalid width', function () {
assert.throws(function () {
sharp({ raw: { width: 'zoinks', height: 1, channels: 4 } });
});
});
it('RGB', function (done) {
// Convert to raw pixel data
sharp(fixtures.inputJpg)
.resize(256)
.raw()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(256, info.width);
assert.strictEqual(209, info.height);
assert.strictEqual(3, info.channels);
// Convert back to JPEG
sharp(data, {
raw: {
width: info.width,
height: info.height,
channels: info.channels
} })
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(256, info.width);
assert.strictEqual(209, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.inputJpg, data, done);
});
});
});
it('RGBA', function (done) {
// Convert to raw pixel data
sharp(fixtures.inputPngOverlayLayer1)
.resize(256)
.raw()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(256, info.width);
assert.strictEqual(192, info.height);
assert.strictEqual(4, info.channels);
// Convert back to PNG
sharp(data, {
raw: {
width: info.width,
height: info.height,
channels: info.channels
} })
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(256, info.width);
assert.strictEqual(192, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.inputPngOverlayLayer1, data, { threshold: 7 }, done);
});
});
});
it('JPEG to raw Stream and back again', function (done) {
const width = 32;
const height = 24;
const writable = sharp({
raw: {
width,
height,
channels: 3
}
});
writable
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(24, info.height);
done();
});
sharp(fixtures.inputJpg)
.resize(width, height)
.raw()
.pipe(writable);
});
});
describe('Ouput raw, uncompressed image data', function () {
it('1 channel greyscale image', function (done) {
sharp(fixtures.inputJpg)
.greyscale()
.resize(32, 24)
.raw()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(32 * 24 * 1, info.size);
assert.strictEqual(data.length, info.size);
assert.strictEqual('raw', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(24, info.height);
assert.strictEqual(1, info.channels);
done();
});
});
it('3 channel colour image without transparency', function (done) {
sharp(fixtures.inputJpg)
.resize(32, 24)
.toFormat('raw')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(32 * 24 * 3, info.size);
assert.strictEqual(data.length, info.size);
assert.strictEqual('raw', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(24, info.height);
done();
});
});
it('4 channel colour image with transparency', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(32, 24)
.toFormat(sharp.format.raw)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(32 * 24 * 4, info.size);
assert.strictEqual(data.length, info.size);
assert.strictEqual('raw', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(24, info.height);
done();
});
});
});
});

View File

@@ -116,7 +116,7 @@ describe('Resize fit=contain', function () {
});
});
it.skip('TIFF in LAB colourspace onto RGBA background', function (done) {
it('TIFF in LAB colourspace onto RGBA background', function (done) {
sharp(fixtures.inputTiffCielab)
.resize(64, 128, {
fit: 'contain',

View File

@@ -525,6 +525,40 @@ describe('Resize dimensions', function () {
});
});
it('Ensure shortest edge (height) is at least 1 pixel', function () {
return sharp({
create: {
width: 10,
height: 2,
channels: 3,
background: 'red'
}
})
.resize(2)
.toBuffer({ resolveWithObject: true })
.then(function (output) {
assert.strictEqual(2, output.info.width);
assert.strictEqual(1, output.info.height);
});
});
it('Ensure shortest edge (width) is at least 1 pixel', function () {
return sharp({
create: {
width: 2,
height: 10,
channels: 3,
background: 'red'
}
})
.resize(null, 2)
.toBuffer({ resolveWithObject: true })
.then(function (output) {
assert.strictEqual(1, output.info.width);
assert.strictEqual(2, output.info.height);
});
});
it('unknown kernel throws', function () {
assert.throws(function () {
sharp().resize(null, null, { kernel: 'unknown' });

77
test/unit/svg.js Normal file
View File

@@ -0,0 +1,77 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('SVG input', function () {
it('Convert SVG to PNG at default 72DPI', function (done) {
sharp(fixtures.inputSvg)
.resize(1024)
.extract({ left: 290, top: 760, width: 40, height: 40 })
.toFormat('png')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(40, info.width);
assert.strictEqual(40, info.height);
fixtures.assertSimilar(fixtures.expected('svg72.png'), data, function (err) {
if (err) throw err;
sharp(data).metadata(function (err, info) {
if (err) throw err;
assert.strictEqual(72, info.density);
done();
});
});
});
});
it('Convert SVG to PNG at 1200DPI', function (done) {
sharp(fixtures.inputSvg, { density: 1200 })
.resize(1024)
.extract({ left: 290, top: 760, width: 40, height: 40 })
.toFormat('png')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(40, info.width);
assert.strictEqual(40, info.height);
fixtures.assertSimilar(fixtures.expected('svg1200.png'), data, function (err) {
if (err) throw err;
sharp(data).metadata(function (err, info) {
if (err) throw err;
assert.strictEqual(1200, info.density);
done();
});
});
});
});
it('Convert SVG to PNG at 14.4DPI', function (done) {
sharp(fixtures.inputSvg, { density: 14.4 })
.toFormat('png')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(20, info.width);
assert.strictEqual(20, info.height);
fixtures.assertSimilar(fixtures.expected('svg14.4.png'), data, function (err) {
if (err) throw err;
done();
});
});
});
it('Convert SVG with embedded images to PNG, respecting dimensions, autoconvert to PNG', function (done) {
sharp(fixtures.inputSvgWithEmbeddedImages)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(480, info.width);
assert.strictEqual(360, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('svg-embedded.png'), data, done);
});
});
});

424
test/unit/tiff.js Normal file
View File

@@ -0,0 +1,424 @@
'use strict';
const fs = require('fs');
const assert = require('assert');
const rimraf = require('rimraf');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('TIFF', function () {
it('Load TIFF from Buffer', function (done) {
const inputTiffBuffer = fs.readFileSync(fixtures.inputTiff);
sharp(inputTiffBuffer)
.resize(320, 240)
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('Load multi-page TIFF from file', function (done) {
sharp(fixtures.inputTiffMultipage) // defaults to page 0
.jpeg()
.toBuffer(function (err, defaultData, defaultInfo) {
if (err) throw err;
assert.strictEqual(true, defaultData.length > 0);
assert.strictEqual(defaultData.length, defaultInfo.size);
assert.strictEqual('jpeg', defaultInfo.format);
sharp(fixtures.inputTiffMultipage, { page: 1 }) // 50%-scale copy of page 0
.jpeg()
.toBuffer(function (err, scaledData, scaledInfo) {
if (err) throw err;
assert.strictEqual(true, scaledData.length > 0);
assert.strictEqual(scaledData.length, scaledInfo.size);
assert.strictEqual('jpeg', scaledInfo.format);
assert.strictEqual(defaultInfo.width, scaledInfo.width * 2);
assert.strictEqual(defaultInfo.height, scaledInfo.height * 2);
done();
});
});
});
it('Load multi-page TIFF from Buffer', function (done) {
const inputTiffBuffer = fs.readFileSync(fixtures.inputTiffMultipage);
sharp(inputTiffBuffer) // defaults to page 0
.jpeg()
.toBuffer(function (err, defaultData, defaultInfo) {
if (err) throw err;
assert.strictEqual(true, defaultData.length > 0);
assert.strictEqual(defaultData.length, defaultInfo.size);
assert.strictEqual('jpeg', defaultInfo.format);
sharp(inputTiffBuffer, { page: 1 }) // 50%-scale copy of page 0
.jpeg()
.toBuffer(function (err, scaledData, scaledInfo) {
if (err) throw err;
assert.strictEqual(true, scaledData.length > 0);
assert.strictEqual(scaledData.length, scaledInfo.size);
assert.strictEqual('jpeg', scaledInfo.format);
assert.strictEqual(defaultInfo.width, scaledInfo.width * 2);
assert.strictEqual(defaultInfo.height, scaledInfo.height * 2);
done();
});
});
});
it('Save TIFF to Buffer', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 240)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('tiff', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('Invalid TIFF quality throws error', function () {
assert.throws(function () {
sharp().tiff({ quality: 101 });
});
});
it('Missing TIFF quality does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff();
});
});
it('Not squashing TIFF to a bit depth of 1 should not change the file size', function (done) {
const startSize = fs.statSync(fixtures.inputTiff8BitDepth).size;
sharp(fixtures.inputTiff8BitDepth)
.toColourspace('b-w') // can only squash 1 band uchar images
.tiff({
squash: false,
compression: 'none',
predictor: 'none'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size === startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('Squashing TIFF to a bit depth of 1 should significantly reduce file size', function (done) {
const startSize = fs.statSync(fixtures.inputTiff8BitDepth).size;
sharp(fixtures.inputTiff8BitDepth)
.toColourspace('b-w') // can only squash 1 band uchar images
.tiff({
squash: true,
compression: 'none',
predictor: 'none'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < (startSize / 2));
rimraf(fixtures.outputTiff, done);
});
});
it('Invalid TIFF squash value throws error', function () {
assert.throws(function () {
sharp().tiff({ squash: 'true' });
});
});
it('TIFF setting xres and yres on file', function (done) {
const res = 1000.0; // inputTiff has a dpi of 300 (res*2.54)
sharp(fixtures.inputTiff)
.tiff({
xres: (res),
yres: (res)
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
sharp(fixtures.outputTiff).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual(metadata.density, res * 2.54); // convert to dpi
rimraf(fixtures.outputTiff, done);
});
});
});
it('TIFF setting xres and yres on buffer', function (done) {
const res = 1000.0; // inputTiff has a dpi of 300 (res*2.54)
sharp(fixtures.inputTiff)
.tiff({
xres: (res),
yres: (res)
})
.toBuffer(function (err, data, info) {
if (err) throw err;
sharp(data).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual(metadata.density, res * 2.54); // convert to dpi
done();
});
});
});
it('TIFF invalid xres value should throw an error', function () {
assert.throws(function () {
sharp().tiff({ xres: '1000.0' });
});
});
it('TIFF invalid yres value should throw an error', function () {
assert.throws(function () {
sharp().tiff({ yres: '1000.0' });
});
});
it('TIFF lzw compression with horizontal predictor shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'lzw',
predictor: 'horizontal'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF ccittfax4 compression shrinks b-w test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiff).size;
sharp(fixtures.inputTiff)
.toColourspace('b-w')
.tiff({
squash: true,
compression: 'ccittfax4'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF deflate compression with horizontal predictor shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'deflate',
predictor: 'horizontal'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF deflate compression with float predictor shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'deflate',
predictor: 'float'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF deflate compression without predictor shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'deflate',
predictor: 'none'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF jpeg compression shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'jpeg'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF none compression does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ compression: 'none' });
});
});
it('TIFF lzw compression does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ compression: 'lzw' });
});
});
it('TIFF deflate compression does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ compression: 'deflate' });
});
});
it('TIFF invalid compression option throws', function () {
assert.throws(function () {
sharp().tiff({ compression: 0 });
});
});
it('TIFF invalid compression option throws', function () {
assert.throws(function () {
sharp().tiff({ compression: 'a' });
});
});
it('TIFF invalid predictor option throws', function () {
assert.throws(function () {
sharp().tiff({ predictor: 'a' });
});
});
it('TIFF horizontal predictor does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ predictor: 'horizontal' });
});
});
it('TIFF float predictor does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ predictor: 'float' });
});
});
it('TIFF none predictor does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ predictor: 'none' });
});
});
it('TIFF tiled pyramid image without compression enlarges test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'none',
pyramid: true,
tile: true,
tileHeight: 256,
tileWidth: 256
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size > startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF pyramid true value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ pyramid: true });
});
});
it('Invalid TIFF pyramid value throws error', function () {
assert.throws(function () {
sharp().tiff({ pyramid: 'true' });
});
});
it('Invalid TIFF tile value throws error', function () {
assert.throws(function () {
sharp().tiff({ tile: 'true' });
});
});
it('TIFF tile true value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ tile: true });
});
});
it('Valid TIFF tileHeight value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ tileHeight: 512 });
});
});
it('Valid TIFF tileWidth value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ tileWidth: 512 });
});
});
it('Invalid TIFF tileHeight value throws error', function () {
assert.throws(function () {
sharp().tiff({ tileHeight: '256' });
});
});
it('Invalid TIFF tileWidth value throws error', function () {
assert.throws(function () {
sharp().tiff({ tileWidth: '256' });
});
});
it('Invalid TIFF tileHeight value throws error', function () {
assert.throws(function () {
sharp().tiff({ tileHeight: 0 });
});
});
it('Invalid TIFF tileWidth value throws error', function () {
assert.throws(function () {
sharp().tiff({ tileWidth: 0 });
});
});
it('TIFF file input with invalid page fails gracefully', function (done) {
sharp(fixtures.inputTiffMultipage, { page: 2 })
.toBuffer(function (err) {
assert.strictEqual(true, !!err);
done();
});
});
it('TIFF buffer input with invalid page fails gracefully', function (done) {
sharp(fs.readFileSync(fixtures.inputTiffMultipage), { page: 2 })
.toBuffer(function (err) {
assert.strictEqual(true, !!err);
done();
});
});
});

78
test/unit/webp.js Normal file
View File

@@ -0,0 +1,78 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('WebP', function () {
it('WebP output', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.toFormat(sharp.format.webp)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('Invalid WebP quality throws error', function () {
assert.throws(function () {
sharp().webp({ quality: 101 });
});
});
it('Invalid WebP alpha quality throws error', function () {
assert.throws(function () {
sharp().webp({ alphaQuality: 101 });
});
});
it('should work for webp alpha quality', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({ alphaQuality: 80 })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
fixtures.assertSimilar(fixtures.expected('webp-alpha-80.webp'), data, done);
});
});
it('should work for webp lossless', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({ lossless: true })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
fixtures.assertSimilar(fixtures.expected('webp-lossless.webp'), data, done);
});
});
it('should work for webp near-lossless', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({ nearLossless: true, quality: 50 })
.toBuffer(function (err50, data50, info50) {
if (err50) throw err50;
assert.strictEqual(true, data50.length > 0);
assert.strictEqual('webp', info50.format);
fixtures.assertSimilar(fixtures.expected('webp-near-lossless-50.webp'), data50, done);
});
});
it('should use near-lossless when both lossless and nearLossless are specified', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({ nearLossless: true, quality: 50, lossless: true })
.toBuffer(function (err50, data50, info50) {
if (err50) throw err50;
assert.strictEqual(true, data50.length > 0);
assert.strictEqual('webp', info50.format);
fixtures.assertSimilar(fixtures.expected('webp-near-lossless-50.webp'), data50, done);
});
});
});