Compare commits

...

80 Commits

Author SHA1 Message Date
Lovell Fuller
18afcf5f90 Release v0.22.0 2019-03-18 23:26:39 +00:00
Lovell Fuller
87a422942d Pin prebuild due to breaking change in 8.2.0 2019-03-18 23:10:33 +00:00
Lovell Fuller
ac515121e5 Release v0.22.0 2019-03-18 21:31:46 +00:00
Lovell Fuller
2bfea0ad76 Docs: refresh usage examples 2019-03-18 21:29:17 +00:00
Lovell Fuller
83cdb558f6 Allow Stream-based input of raw pixel data #1579 2019-03-18 20:15:18 +00:00
Lovell Fuller
9ee377963e Improve error message if libvips tarball is corrupt 2019-03-17 23:07:58 +00:00
Lovell Fuller
9cc06c887b Add support for pages option for multi-page input #1566 2019-03-17 16:37:27 +00:00
Lovell Fuller
7457b50373 Remove unused shared library 2019-03-15 15:58:25 +00:00
Lovell Fuller
6387fb79b1 Small improvements to input and install docs, bump deps 2019-03-15 15:48:55 +00:00
Lovell Fuller
54e5514b9a Bump dependencies to latest 2019-03-10 18:14:43 +00:00
Lovell Fuller
1e4597c284 Changelog entry for #1595 (plus add GIF) 2019-03-10 17:26:26 +00:00
Lovell Fuller
7cafd4386c Add composite op, supporting multiple images and blend modes #728 2019-03-09 22:46:23 +00:00
Lovell Fuller
e3549ba28c Remove functions previously deprecated in v0.21.0
background, crop, embed, ignoreAspectRatio, max, min, withoutEnlargement
2019-03-01 23:43:35 +00:00
Lovell Fuller
d1bbe62e52 Rename armv8 as arm64v8 to match Node's process.arch 2019-03-01 23:43:35 +00:00
Lovell Fuller
36af74a09b Upgrade to libvips v8.7.4 2019-03-01 23:43:35 +00:00
Fabrizio Ruggeri
5afe02be60 Allow page input option to be set for PDF (#1595) 2019-03-01 23:29:34 +00:00
Jack Cross
2262959673 Docs: add missing comma to extend example (#1588) 2019-02-27 11:03:02 +00:00
Lovell Fuller
ba3f914445 Document support for animated WebP in metadata pages 2019-01-27 21:01:49 +00:00
Lovell Fuller
770be35c44 Tests: add a couple of extra leak suppressions for Node 2019-01-27 20:48:17 +00:00
Lovell Fuller
cc9f2b90fd Docs: use absolute URL for logo 2019-01-19 15:15:22 +00:00
Lovell Fuller
4aff57b071 Release v0.21.3 2019-01-19 14:25:37 +00:00
Maxime BACONNAIS
1df8d82fe0 Docs: overlay parameter of overlayWith is optional (#1547) 2019-01-19 14:19:41 +00:00
Lovell Fuller
98e90784f4 Docs: overlay parameter of overlayWith is optional 2019-01-19 14:11:54 +00:00
Lovell Fuller
87ea54cc66 Bump devDependencies 2019-01-19 14:06:16 +00:00
Lovell Fuller
d5e98bc8ad Split file-based input errors into missing vs invalid #1542 2019-01-19 11:59:36 +00:00
Lovell Fuller
fa69ff773a Input image decoding fail fast by default 2019-01-18 19:25:55 +00:00
Lovell Fuller
a183bb1cac Add valgrind memory leak suppressions 2019-01-18 12:08:28 +00:00
Lovell Fuller
cf62372cab Install: log the fallback to build from source
https://github.com/lovell/sharp-libvips/issues/18
2019-01-14 19:33:01 +00:00
Lovell Fuller
56fa9c95a1 Release v0.21.2 2019-01-13 10:26:47 +00:00
Lovell Fuller
32a34a8841 Tests: separate IO suite into per-format unit files 2019-01-13 10:11:32 +00:00
Lovell Fuller
98797445de Expose PNG output options requiring libimagequant #1484 2019-01-13 09:06:05 +00:00
Lovell Fuller
bd377438b6 Ignore colour profiles in LAB images as they are already LAB 2019-01-12 18:13:43 +00:00
Lovell Fuller
9dd6510de6 Expose underlying error message for invalid input #1505 2019-01-12 16:10:25 +00:00
Lovell Fuller
93ad9d4a4a Ensure all metadata removed from PNG unless withMetadata used 2019-01-09 21:17:53 +00:00
Lovell Fuller
4c01a099ea Add ensureAlpha op, adds alpha channel if missing #1153 2019-01-05 21:12:33 +00:00
Lovell Fuller
8e70579e47 Docs: use HTTPS links where available 2019-01-04 21:25:21 +00:00
Lovell Fuller
ee8bfa3980 Add 2019 to list of years of copyright 2019-01-04 16:05:26 +00:00
Lovell Fuller
c5dfa49cae Docs: add sharp logo to readme 2019-01-04 15:56:22 +00:00
Lovell Fuller
0822404129 Docs: expand logo viewBox to prevent clipping 2019-01-04 15:54:33 +00:00
Lovell Fuller
144f39cd45 Docs: add sharp logo, CC BY-SA 4.0 2019-01-04 15:12:53 +00:00
Lovell Fuller
87f191fd05 Node 11 now supported by nodejs/nan 2019-01-03 14:01:18 +00:00
Lovell Fuller
37ed436202 Version bump of devDependencies 2019-01-03 13:26:56 +00:00
Lovell Fuller
88e490356d Test: remove stray console.log 2019-01-03 12:35:54 +00:00
Lovell Fuller
7c631c0787 Ensure shortest resized edge is >= 1px #1003 2019-01-03 12:01:55 +00:00
Lovell Fuller
f5d3721fe0 Doc refresh for #1205 2019-01-02 19:04:49 +00:00
Lovell Fuller
cc633589d9 Expose pages metadata for multi-page input images #1205 2019-01-01 22:10:27 +00:00
Lovell Fuller
cc1d4c1a6d Expose palette-bit-depth metadata, requires upcoming libvips v8.8.0 2019-01-01 21:02:00 +00:00
Lovell Fuller
30ca424942 Apply correct forced output when chaining #1528 2019-01-01 18:40:09 +00:00
Pascal Temel
813831acf0 Docs: Update deprecated overlayWith example (#1526) 2018-12-30 20:44:36 +00:00
Lovell Fuller
a54fe9f77c Prevent mutatation of jpeg options #1516 2018-12-21 19:54:33 +00:00
Amila Welihinda
8c6da5548a Docs: change repo badge to SVG (#1498) 2018-12-10 11:03:03 +01:00
Lovell Fuller
a2aa7d69e7 Add error handler to download stream lovell/sharp-libvips#14 2018-12-08 13:56:05 +00:00
Lovell Fuller
34d5252242 Release v0.21.1 2018-12-07 19:23:54 +00:00
Lovell Fuller
f31e4d2869 Changelog, credit and doc refresh for #1483 2018-12-06 21:58:14 +00:00
Michael B. Klein
c695c40abc Expose libvips pyramid/tile options for TIFF output (#1483) 2018-12-06 22:33:46 +01:00
Lovell Fuller
fd1ca1dbb2 Ensure the tests for #1477 pass on OS X 2018-12-04 23:58:02 +01:00
Lovell Fuller
f25dbd5f61 Ensure the tests for #1477 pass on OS X 2018-12-04 23:45:08 +01:00
Keith
541e7104fd Expose libvips recombination matrix operation #1477 2018-12-04 23:06:34 +01:00
Lovell Fuller
94945cf6ac Changelog and credit for #1475 2018-12-04 07:49:41 +00:00
Lovell Fuller
db76e655f8 Ensure licensing checker works on Windows 2018-11-29 10:15:13 +00:00
Lovell Fuller
d43c7b581d Add licensing checker for production dependencies 2018-11-29 09:53:00 +00:00
Julian Aubourg
383b933e26 Build prototype with Object.assign to allow minification (#1475) 2018-11-26 19:40:06 +01:00
Lovell Fuller
d26ccf6294 Docs: update Alpine repository URLs 2018-11-21 22:42:58 +00:00
Lovell Fuller
6f9699f605 Ensure correct channel info for raw, greyscale output #1425 2018-11-19 20:00:30 +00:00
Quinn Pan
1e9093d781 Docs: correct code example in extend operation 2018-11-19 20:35:16 +01:00
Lovell Fuller
9dc6492e52 Docs: correct code example in extend operation 2018-11-19 19:34:16 +00:00
Lovell Fuller
d22f7cae6a Silence cast-function-type warnings from GCC 8+ 2018-11-19 18:47:35 +00:00
Lovell Fuller
473afaab45 Install: detect missing libvips on OpenBSD and SunOS
See https://github.com/lovell/sharp-libvips/issues/12
2018-11-19 18:46:05 +00:00
Lovell Fuller
dcd68303a4 Docs: add installation details for Lambda without Docker 2018-11-16 10:28:34 +00:00
Lovell Fuller
03394556b5 Update semistandard linter to latest 2018-11-11 18:05:40 +00:00
Lovell Fuller
1c4f6f75f3 Add Node 11 to CI, experimental only, no prebuild
Hide deprecation warnings - see nodejs/nan#811
2018-11-11 17:55:35 +00:00
Lovell Fuller
f00928dedb Doc refresh for #1438 #1439 2018-11-11 17:40:19 +00:00
Daiz
a48f8fbb61 Allow separate parameters for gamma encoding and decoding (#1439) 2018-11-11 10:15:38 +01:00
Daiz
1fa388370e Add support for the "mitchell" kernel for image reductions (#1438) 2018-10-28 15:11:27 +00:00
Christoph Tavan
95ef6b3f71 Docs: update Alpine libvips installation instructions (#1429)
With version 8.7.0 of the vips-dev Alpine package the documented installation instructions no longer work and result in:

ERROR: unsatisfiable constraints:
  pc:fftw3 (missing):
    required by: vips-dev-8.7.0-r0[pc:fftw3] vips-dev-8.7.0-r0[pc:fftw3] vips-dev-8.7.0-r0[pc:fftw3]

The fix was proposed in https://bugs.alpinelinux.org/issues/9561#note-2
2018-10-24 14:31:32 +01:00
Lovell Fuller
de11d36d00 Minor version bumps 2018-10-22 19:38:06 +01:00
Lovell Fuller
d77c2adabe Changelog and docs for #1422 2018-10-22 19:17:42 +01:00
SethWen
c89c055ae0 Install: add support for npm_config_sharp_dist_base_url (#1422) 2018-10-22 18:54:47 +01:00
Lovell Fuller
dac8117f32 Docs: ensure options are included for flatten op 2018-10-08 19:56:30 +01:00
Waylon Walker
937b091bab Docs: clear _libvips cache on Windows (#1403) 2018-10-07 16:49:05 +01:00
83 changed files with 3093 additions and 2811 deletions

View File

@@ -21,6 +21,12 @@ matrix:
after_success:
- npm install coveralls
- cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js
- name: "Linux (glibc) - Node 11"
os: linux
dist: trusty
sudo: false
language: node_js
node_js: "11"
- name: "Linux (musl) - Node 8"
os: linux
dist: trusty
@@ -41,6 +47,16 @@ matrix:
- sudo docker exec sharp apk add build-base git python2 --update-cache
install: sudo docker exec sharp sh -c "npm install --unsafe-perm"
script: sudo docker exec sharp sh -c "npm test"
- name: "Linux (musl) - Node 11"
os: linux
dist: trusty
sudo: true
language: minimal
before_install:
- sudo docker run -dit --name sharp --env CI --env PREBUILD_TOKEN --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp node:11-alpine
- sudo docker exec sharp apk add build-base git python2 --update-cache
install: sudo docker exec sharp sh -c "npm install --unsafe-perm"
script: sudo docker exec sharp sh -c "npm test"
- name: "OS X - Node 6"
os: osx
osx_image: xcode9.2
@@ -56,3 +72,8 @@ matrix:
osx_image: xcode9.2
language: node_js
node_js: "10"
- name: "OS X - Node 11"
os: osx
osx_image: xcode9.2
language: node_js
node_js: "11"

View File

@@ -41,8 +41,8 @@ Any change that modifies the existing public API should be added to the relevant
| Release | WIP branch |
| ------: | :--------- |
| v0.21.0 | teeth |
| v0.22.0 | uptake |
| v0.23.0 | vision |
Please squash your changes into a single commit using a command like `git rebase -i upstream/<wip-branch>`.

View File

@@ -1,13 +1,11 @@
# sharp
<img src="https://raw.githubusercontent.com/lovell/sharp/master/docs/image/sharp-logo.svg?sanitize=true" width="160" height="160" alt="sharp logo" align="right">
```sh
npm install sharp
```
```sh
yarn add sharp
```
The typical use case for this high speed Node.js module
is to convert large images in common formats to
smaller, web-friendly JPEG, PNG and WebP images of varying dimensions.
@@ -22,7 +20,7 @@ As well as image resizing, operations such as
rotation, extraction, compositing and gamma correction are available.
Most modern 64-bit OS X, Windows and Linux systems running
Node versions 6, 8 and 10
Node versions 6, 8, 10 and 11
do not require any additional install or runtime dependencies.
## Examples
@@ -31,22 +29,42 @@ do not require any additional install or runtime dependencies.
const sharp = require('sharp');
```
### Callback
```javascript
sharp(inputBuffer)
.resize(320, 240)
.toFile('output.webp', (err, info) => ... );
// A Promises/A+ promise is returned when callback is not provided.
.toFile('output.webp', (err, info) => { ... });
```
### Promise
```javascript
sharp('input.jpg')
.rotate()
.resize(200)
.toBuffer()
.then( data => ... )
.catch( err => ... );
.then( data => { ... })
.catch( err => { ... });
```
### Async/await
```javascript
const semiTransparentRedPng = await sharp({
create: {
width: 48,
height: 48,
channels: 4,
background: { r: 255, g: 0, b: 0, alpha: 0.5 }
}
})
.png()
.toBuffer();
```
### Stream
```javascript
const roundedCorners = Buffer.from(
'<svg><rect x="0" y="0" width="200" height="200" rx="50" ry="50"/></svg>'
@@ -55,7 +73,10 @@ const roundedCorners = Buffer.from(
const roundedCornerResizer =
sharp()
.resize(200, 200)
.overlayWith(roundedCorners, { cutout: true })
.composite([{
input: roundedCorners,
blend: 'dest-in'
}])
.png();
readableStream
@@ -63,15 +84,15 @@ readableStream
.pipe(writableStream);
```
[![Test Coverage](https://coveralls.io/repos/lovell/sharp/badge.png?branch=master)](https://coveralls.io/r/lovell/sharp?branch=master)
[![Test Coverage](https://coveralls.io/repos/lovell/sharp/badge.svg?branch=master)](https://coveralls.io/r/lovell/sharp?branch=master)
### Documentation
Visit [sharp.pixelplumbing.com](http://sharp.pixelplumbing.com/) for complete
[installation instructions](http://sharp.pixelplumbing.com/page/install),
[API documentation](http://sharp.pixelplumbing.com/page/api),
[benchmark tests](http://sharp.pixelplumbing.com/page/performance) and
[changelog](http://sharp.pixelplumbing.com/page/changelog).
Visit [sharp.pixelplumbing.com](https://sharp.pixelplumbing.com/) for complete
[installation instructions](https://sharp.pixelplumbing.com/page/install),
[API documentation](https://sharp.pixelplumbing.com/page/api),
[benchmark tests](https://sharp.pixelplumbing.com/page/performance) and
[changelog](https://sharp.pixelplumbing.com/page/changelog).
### Contributing
@@ -80,12 +101,12 @@ covers reporting bugs, requesting features and submitting code changes.
### Licensing
Copyright 2013, 2014, 2015, 2016, 2017, 2018 Lovell Fuller and contributors.
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
[http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html)
[https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,

View File

@@ -7,6 +7,7 @@ environment:
- nodejs_version: "6"
- nodejs_version: "8"
- nodejs_version: "10"
- nodejs_version: "11"
install:
- ps: Install-Product node $env:nodejs_version x64
- npm install -g npm@5

View File

@@ -140,7 +140,6 @@
'../vendor/lib/libgsf-1.so',
'../vendor/lib/libgthread-2.0.so',
'../vendor/lib/libharfbuzz.so',
'../vendor/lib/libharfbuzz-subset.so.0',
'../vendor/lib/libjpeg.so',
'../vendor/lib/liblcms2.so',
'../vendor/lib/liborc-0.4.so',
@@ -183,13 +182,23 @@
},
'configurations': {
'Release': {
'cflags_cc': [
'-Wno-cast-function-type',
'-Wno-deprecated-declarations'
],
'xcode_settings': {
'OTHER_CPLUSPLUSFLAGS': [
'-Wno-deprecated-declarations'
]
},
'msvs_settings': {
'VCCLCompilerTool': {
'ExceptionHandling': 1
}
},
'msvs_disabled_warnings': [
4275
4275,
4996
]
}
},

View File

@@ -16,6 +16,22 @@ sharp('rgba.png')
Returns **Sharp**
## ensureAlpha
Ensure alpha channel, if missing. The added alpha channel will be fully opaque. This is a no-op if the image already has an alpha channel.
### Examples
```javascript
sharp('rgb.jpg')
.ensureAlpha()
.toFile('rgba.png', function(err, info) {
// rgba.png is a 4 channel image with a fully opaque alpha channel
});
```
Returns **Sharp**
## extractChannel
Extract a single channel from a multi-channel image.

View File

@@ -1,33 +1,41 @@
<!-- Generated by documentation.js. Update this documentation by updating the source code. -->
## overlayWith
## composite
Overlay (composite) an image over the processed (resized, extracted etc.) image.
Composite image(s) over the processed (resized, extracted etc.) image.
The overlay image must be the same size or smaller than the processed image.
The images to composite must be the same size or smaller than the processed image.
If both `top` and `left` options are provided, they take precedence over `gravity`.
If the overlay image contains an alpha channel then composition with premultiplication will occur.
The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
`dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
`xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
`colour-dodge`, `color-dodge`, `colour-burn`,`color-burn`,
`hard-light`, `soft-light`, `difference`, `exclusion`.
More information about blend modes can be found at
[https://libvips.github.io/libvips/API/current/libvips-conversion.html#VipsBlendMode][1]
and [https://www.cairographics.org/operators/][2]
### Parameters
- `overlay` **([Buffer][1] \| [String][2])** Buffer containing image data or String containing the path to an image file.
- `options` **[Object][3]?**
- `options.gravity` **[String][2]** gravity at which to place the overlay. (optional, default `'centre'`)
- `options.top` **[Number][4]?** the pixel offset from the top edge.
- `options.left` **[Number][4]?** the pixel offset from the left edge.
- `options.tile` **[Boolean][5]** set to true to repeat the overlay image across the entire image with the given `gravity`. (optional, default `false`)
- `options.cutout` **[Boolean][5]** set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another. (optional, default `false`)
- `options.density` **[Number][4]** number representing the DPI for vector overlay image. (optional, default `72`)
- `options.raw` **[Object][3]?** describes overlay when using raw pixel data.
- `options.raw.width` **[Number][4]?**
- `options.raw.height` **[Number][4]?**
- `options.raw.channels` **[Number][4]?**
- `options.create` **[Object][3]?** describes a blank overlay to be created.
- `options.create.width` **[Number][4]?**
- `options.create.height` **[Number][4]?**
- `options.create.channels` **[Number][4]?** 3-4
- `options.create.background` **([String][2] \| [Object][3])?** parsed by the [color][6] module to extract values for red, green, blue and alpha.
- `images` **[Array][3]&lt;[Object][4]>** Ordered list of images to composite
- `images[].input` **([Buffer][5] \| [String][6])?** Buffer containing image data or String containing the path to an image file.
- `images[].blend` **[String][6]** how to blend this image with the image below. (optional, default `'over'`)
- `images[].gravity` **[String][6]** gravity at which to place the overlay. (optional, default `'centre'`)
- `images[].top` **[Number][7]?** the pixel offset from the top edge.
- `images[].left` **[Number][7]?** the pixel offset from the left edge.
- `images[].tile` **[Boolean][8]** set to true to repeat the overlay image across the entire image with the given `gravity`. (optional, default `false`)
- `images[].density` **[Number][7]** number representing the DPI for vector overlay image. (optional, default `72`)
- `images[].raw` **[Object][4]?** describes overlay when using raw pixel data.
- `images[].raw.width` **[Number][7]?**
- `images[].raw.height` **[Number][7]?**
- `images[].raw.channels` **[Number][7]?**
- `images[].create` **[Object][4]?** describes a blank overlay to be created.
- `images[].create.width` **[Number][7]?**
- `images[].create.height` **[Number][7]?**
- `images[].create.channels` **[Number][7]?** 3-4
- `images[].create.background` **([String][6] \| [Object][4])?** parsed by the [color][9] module to extract values for red, green, blue and alpha.
### Examples
@@ -35,9 +43,8 @@ If the overlay image contains an alpha channel then composition with premultipli
sharp('input.png')
.rotate(180)
.resize(300)
.flatten()
.background('#ff6600')
.overlayWith('overlay.png', { gravity: sharp.gravity.southeast } )
.flatten( { background: '#ff6600' } )
.composite([{ input: 'overlay.png', gravity: 'southeast' }])
.sharpen()
.withMetadata()
.webp( { quality: 90 } )
@@ -49,20 +56,26 @@ sharp('input.png')
});
```
- Throws **[Error][7]** Invalid parameters
- Throws **[Error][10]** Invalid parameters
Returns **Sharp**
[1]: https://nodejs.org/api/buffer.html
[1]: https://libvips.github.io/libvips/API/current/libvips-conversion.html#VipsBlendMode
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[2]: https://www.cairographics.org/operators/
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[5]: https://nodejs.org/api/buffer.html
[6]: https://www.npmjs.org/package/color
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[9]: https://www.npmjs.org/package/color
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error

View File

@@ -9,11 +9,11 @@
a String containing the path to an JPEG, PNG, WebP, GIF, SVG or TIFF image file.
JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
- `options` **[Object][3]?** if present, is an Object with optional attributes.
- `options.failOnError` **[Boolean][4]** by default apply a "best effort"
to decode images, even if the data is corrupt or invalid. Set this flag to true
if you'd rather halt processing and raise an error when loading invalid images. (optional, default `false`)
- `options.failOnError` **[Boolean][4]** by default halt processing and raise an error when loading invalid images.
Set this flag to `false` if you'd rather apply a "best effort" to decode images, even if the data is corrupt or invalid. (optional, default `true`)
- `options.density` **[Number][5]** number representing the DPI for vector images. (optional, default `72`)
- `options.page` **[Number][5]** page number to extract for multi-page input (GIF, TIFF) (optional, default `0`)
- `options.pages` **[Number][5]** number of pages to extract for multi-page input (GIF, TIFF, PDF), use -1 for all pages. (optional, default `1`)
- `options.page` **[Number][5]** page number to start extracting from for multi-page input (GIF, TIFF, PDF), zero based. (optional, default `0`)
- `options.raw` **[Object][3]?** describes raw pixel input image data. See `raw()` for pixel ordering.
- `options.raw.width` **[Number][5]?**
- `options.raw.height` **[Number][5]?**

View File

@@ -22,7 +22,7 @@ Returns **Sharp**
## metadata
Fast access to (uncached) image metadata without decoding any compressed image data.
A Promises/A+ promise is returned when `callback` is not provided.
A `Promise` is returned when `callback` is not provided.
- `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
- `size`: Total size of image in bytes, for Stream and Buffer input only
@@ -34,6 +34,8 @@ A Promises/A+ promise is returned when `callback` is not provided.
- `density`: Number of pixels per inch (DPI), if present
- `chromaSubsampling`: String containing JPEG chroma subsampling, `4:2:0` or `4:4:4` for RGB, `4:2:0:4` or `4:4:4:4` for CMYK
- `isProgressive`: Boolean indicating whether the image is interlaced using a progressive scan
- `pages`: Number of pages/frames contained within the image, with support for TIFF, PDF, animated GIF and animated WebP
- `pageHeight`: Number of pixels high each page in this PDF image will be.
- `hasProfile`: Boolean indicating the presence of an embedded ICC profile
- `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
- `orientation`: Number value of the EXIF Orientation header, if present
@@ -68,7 +70,7 @@ Returns **([Promise][5]&lt;[Object][6]> | Sharp)**
## stats
Access to pixel-derived image statistics for every channel in the image.
A Promise is returned when `callback` is not provided.
A `Promise` is returned when `callback` is not provided.
- `channels`: Array of channel statistics for each channel in the image. Each channel statistic contains
- `min` (minimum value in the channel)
@@ -103,9 +105,9 @@ Returns **[Promise][5]&lt;[Object][6]>**
## limitInputPixels
Do not process input images where the number of pixels (width _ height) exceeds this limit.
Do not process input images where the number of pixels (width x height) exceeds this limit.
Assumes image dimensions contained in the input metadata can be trusted.
The default limit is 268402689 (0x3FFF _ 0x3FFF) pixels.
The default limit is 268402689 (0x3FFF x 0x3FFF) pixels.
### Parameters

View File

@@ -118,7 +118,8 @@ Merge alpha transparency channel, if any, with a background.
### Parameters
- `options`
- `options` **[Object][2]?**
- `options.background` **([String][3] \| [Object][2])** background colour, parsed by the [color][4] module, defaults to black. (optional, default `{r:0,g:0,b:0}`)
Returns **Sharp**
@@ -130,9 +131,12 @@ This can improve the perceived brightness of a resized image in non-linear colou
JPEG and WebP input images will not take advantage of the shrink-on-load performance optimisation
when applying a gamma correction.
Supply a second argument to use a different output gamma value, otherwise the first value is used in both cases.
### Parameters
- `gamma` **[Number][1]** value between 1.0 and 3.0. (optional, default `2.2`)
- `gammaOut` **[Number][1]?** value between 1.0 and 3.0. (optional, defaults to same as `gamma`)
- Throws **[Error][5]** Invalid parameters
@@ -250,6 +254,35 @@ Apply the linear formula a \* input + b to the image (levels adjustment)
- `b` **[Number][1]** offset (optional, default `0.0`)
- Throws **[Error][5]** Invalid parameters
Returns **Sharp**
## recomb
Recomb the image with the specified matrix.
### Parameters
- `inputMatrix`
- `3x3` **[Array][7]&lt;[Array][7]&lt;[Number][1]>>** Recombination matrix
### Examples
```javascript
sharp(input)
.recomb([
[0.3588, 0.7044, 0.1368],
[0.2990, 0.5870, 0.1140],
[0.2392, 0.4696, 0.0912],
])
.raw()
.toBuffer(function(err, data, info) {
// data contains the raw pixel data after applying the recomb
// With this example input, a sepia filter has been applied
});
```
- Throws **[Error][5]** Invalid parameters
Returns **Sharp**

View File

@@ -154,6 +154,11 @@ Indexed PNG input at 1, 2 or 4 bits per pixel is converted to 8 bits per pixel.
- `options.progressive` **[Boolean][6]** use progressive (interlace) scan (optional, default `false`)
- `options.compressionLevel` **[Number][8]** zlib compression level, 0-9 (optional, default `9`)
- `options.adaptiveFiltering` **[Boolean][6]** use adaptive row filtering (optional, default `false`)
- `options.palette` **[Boolean][6]** quantise to a palette-based image with alpha transparency support, requires libimagequant (optional, default `false`)
- `options.quality` **[Number][8]** use the lowest number of colours needed to achieve given quality, requires libimagequant (optional, default `100`)
- `options.colours` **[Number][8]** maximum number of palette entries, requires libimagequant (optional, default `256`)
- `options.colors` **[Number][8]** alternative spelling of `options.colours`, requires libimagequant (optional, default `256`)
- `options.dither` **[Number][8]** level of Floyd-Steinberg error diffusion, requires libimagequant (optional, default `1.0`)
- `options.force` **[Boolean][6]** force PNG output, otherwise attempt to use input format (optional, default `true`)
### Examples
@@ -206,6 +211,10 @@ Use these TIFF options for output image.
- `options.force` **[Boolean][6]** force TIFF output, otherwise attempt to use input format (optional, default `true`)
- `options.compression` **[Boolean][6]** compression options: lzw, deflate, jpeg, ccittfax4 (optional, default `'jpeg'`)
- `options.predictor` **[Boolean][6]** compression predictor options: none, horizontal, float (optional, default `'horizontal'`)
- `options.pyramid` **[Boolean][6]** write an image pyramid (optional, default `false`)
- `options.tile` **[Boolean][6]** write a tiled tiff (optional, default `false`)
- `options.tileWidth` **[Boolean][6]** horizontal tile size (optional, default `256`)
- `options.tileHeight` **[Boolean][6]** vertical tile size (optional, default `256`)
- `options.xres` **[Number][8]** horizontal resolution in pixels/mm (optional, default `1.0`)
- `options.yres` **[Number][8]** vertical resolution in pixels/mm (optional, default `1.0`)
- `options.squash` **[Boolean][6]** squash 8-bit images down to 1 bit (optional, default `false`)

View File

@@ -30,22 +30,23 @@ Possible interpolation kernels are:
- `nearest`: Use [nearest neighbour interpolation][4].
- `cubic`: Use a [Catmull-Rom spline][5].
- `lanczos2`: Use a [Lanczos kernel][6] with `a=2`.
- `mitchell`: Use a [Mitchell-Netravali spline][6].
- `lanczos2`: Use a [Lanczos kernel][7] with `a=2`.
- `lanczos3`: Use a Lanczos kernel with `a=3` (the default).
### Parameters
- `width` **[Number][7]?** pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height.
- `height` **[Number][7]?** pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
- `options` **[Object][8]?**
- `options.width` **[String][9]?** alternative means of specifying `width`. If both are present this take priority.
- `options.height` **[String][9]?** alternative means of specifying `height`. If both are present this take priority.
- `options.fit` **[String][9]** how the image should be resized to fit both provided dimensions, one of `cover`, `contain`, `fill`, `inside` or `outside`. (optional, default `'cover'`)
- `options.position` **[String][9]** position, gravity or strategy to use when `fit` is `cover` or `contain`. (optional, default `'centre'`)
- `options.background` **([String][9] \| [Object][8])** background colour when using a `fit` of `contain`, parsed by the [color][10] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
- `options.kernel` **[String][9]** the kernel to use for image reduction. (optional, default `'lanczos3'`)
- `options.withoutEnlargement` **[Boolean][11]** do not enlarge if the width _or_ height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option. (optional, default `false`)
- `options.fastShrinkOnLoad` **[Boolean][11]** take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images. (optional, default `true`)
- `width` **[Number][8]?** pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height.
- `height` **[Number][8]?** pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
- `options` **[Object][9]?**
- `options.width` **[String][10]?** alternative means of specifying `width`. If both are present this take priority.
- `options.height` **[String][10]?** alternative means of specifying `height`. If both are present this take priority.
- `options.fit` **[String][10]** how the image should be resized to fit both provided dimensions, one of `cover`, `contain`, `fill`, `inside` or `outside`. (optional, default `'cover'`)
- `options.position` **[String][10]** position, gravity or strategy to use when `fit` is `cover` or `contain`. (optional, default `'centre'`)
- `options.background` **([String][10] \| [Object][9])** background colour when using a `fit` of `contain`, parsed by the [color][11] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
- `options.kernel` **[String][10]** the kernel to use for image reduction. (optional, default `'lanczos3'`)
- `options.withoutEnlargement` **[Boolean][12]** do not enlarge if the width _or_ height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option. (optional, default `false`)
- `options.fastShrinkOnLoad` **[Boolean][12]** take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images. (optional, default `true`)
### Examples
@@ -113,7 +114,7 @@ sharp(input)
});
```
- Throws **[Error][12]** Invalid parameters
- Throws **[Error][13]** Invalid parameters
Returns **Sharp**
@@ -124,12 +125,12 @@ This operation will always occur after resizing and extraction, if any.
### Parameters
- `extend` **([Number][7] \| [Object][8])** single pixel count to add to all edges or an Object with per-edge counts
- `extend.top` **[Number][7]?**
- `extend.left` **[Number][7]?**
- `extend.bottom` **[Number][7]?**
- `extend.right` **[Number][7]?**
- `extend.background` **([String][9] \| [Object][8])** background colour, parsed by the [color][10] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
- `extend` **([Number][8] \| [Object][9])** single pixel count to add to all edges or an Object with per-edge counts
- `extend.top` **[Number][8]?**
- `extend.left` **[Number][8]?**
- `extend.bottom` **[Number][8]?**
- `extend.right` **[Number][8]?**
- `extend.background` **([String][10] \| [Object][9])** background colour, parsed by the [color][11] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
### Examples
@@ -138,18 +139,17 @@ This operation will always occur after resizing and extraction, if any.
// to the top, left and right edges and 20 to the bottom edge
sharp(input)
.resize(140)
.)
.extend({
top: 10,
bottom: 20,
left: 10,
right: 10
right: 10,
background: { r: 0, g: 0, b: 0, alpha: 0 }
})
...
```
- Throws **[Error][12]** Invalid parameters
- Throws **[Error][13]** Invalid parameters
Returns **Sharp**
@@ -163,11 +163,11 @@ Extract a region of the image.
### Parameters
- `options` **[Object][8]**
- `options.left` **[Number][7]** zero-indexed offset from left edge
- `options.top` **[Number][7]** zero-indexed offset from top edge
- `options.width` **[Number][7]** dimension of extracted image
- `options.height` **[Number][7]** dimension of extracted image
- `options` **[Object][9]**
- `options.left` **[Number][8]** zero-indexed offset from left edge
- `options.top` **[Number][8]** zero-indexed offset from top edge
- `options.width` **[Number][8]** dimension of extracted image
- `options.height` **[Number][8]** dimension of extracted image
### Examples
@@ -189,7 +189,7 @@ sharp(input)
});
```
- Throws **[Error][12]** Invalid parameters
- Throws **[Error][13]** Invalid parameters
Returns **Sharp**
@@ -200,10 +200,10 @@ The `info` response Object will contain `trimOffsetLeft` and `trimOffsetTop` pro
### Parameters
- `threshold` **[Number][7]** the allowed difference from the top-left pixel, a number greater than zero. (optional, default `10`)
- `threshold` **[Number][8]** the allowed difference from the top-left pixel, a number greater than zero. (optional, default `10`)
- Throws **[Error][12]** Invalid parameters
- Throws **[Error][13]** Invalid parameters
Returns **Sharp**
@@ -217,16 +217,18 @@ Returns **Sharp**
[5]: https://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline
[6]: https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel
[6]: https://www.cs.utexas.edu/~fussell/courses/cs384g-fall2013/lectures/mitchell/Mitchell.pdf
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[7]: https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[9]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[9]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[10]: https://www.npmjs.org/package/color
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[11]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[11]: https://www.npmjs.org/package/color
[12]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
[12]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[13]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error

View File

@@ -1,9 +1,92 @@
# Changelog
### v0.22 - "*uptake*"
Requires libvips v8.7.4.
#### v0.22.0 - 18<sup>th</sup> March 2019
* Remove functions previously deprecated in v0.21.0:
`background`, `crop`, `embed`, `ignoreAspectRatio`, `max`, `min` and `withoutEnlargement`.
* Add `composite` operation supporting multiple images and blend modes; deprecate `overlayWith`.
[#728](https://github.com/lovell/sharp/issues/728)
* Add support for `pages` input option for multi-page input.
[#1566](https://github.com/lovell/sharp/issues/1566)
* Allow Stream-based input of raw pixel data.
[#1579](https://github.com/lovell/sharp/issues/1579)
* Add support for `page` input option to GIF and PDF.
[#1595](https://github.com/lovell/sharp/pull/1595)
[@ramiel](https://github.com/ramiel)
### v0.21 - "*teeth*"
Requires libvips v8.7.0.
#### v0.21.3 - 19<sup>th</sup> January 2019
* Input image decoding now fails fast, set `failOnError` to change this behaviour.
* Failed filesystem-based input now separates missing file and invalid format errors.
[#1542](https://github.com/lovell/sharp/issues/1542)
#### v0.21.2 - 13<sup>th</sup> January 2019
* Ensure all metadata is removed from PNG output unless `withMetadata` used.
* Ensure shortest edge is at least one pixel after resizing.
[#1003](https://github.com/lovell/sharp/issues/1003)
* Add `ensureAlpha` operation to add an alpha channel, if missing.
[#1153](https://github.com/lovell/sharp/issues/1153)
* Expose `pages` and `pageHeight` metadata for multi-page input images.
[#1205](https://github.com/lovell/sharp/issues/1205)
* Expose PNG output options requiring libimagequant.
[#1484](https://github.com/lovell/sharp/issues/1484)
* Expose underlying error message for invalid input.
[#1505](https://github.com/lovell/sharp/issues/1505)
* Prevent mutatation of options passed to `jpeg`.
[#1516](https://github.com/lovell/sharp/issues/1516)
* Ensure forced output format applied correctly when output chaining.
[#1528](https://github.com/lovell/sharp/issues/1528)
#### v0.21.1 - 7<sup>th</sup> December 2018
* Install: support `sharp_dist_base_url` npm config, like existing `SHARP_DIST_BASE_URL`.
[#1422](https://github.com/lovell/sharp/pull/1422)
[@SethWen](https://github.com/SethWen)
* Ensure `channel` metadata is correct for raw, greyscale output.
[#1425](https://github.com/lovell/sharp/issues/1425)
* Add support for the "mitchell" kernel for image reductions.
[#1438](https://github.com/lovell/sharp/pull/1438)
[@Daiz](https://github.com/Daiz)
* Allow separate parameters for gamma encoding and decoding.
[#1439](https://github.com/lovell/sharp/pull/1439)
[@Daiz](https://github.com/Daiz)
* Build prototype with `Object.assign` to allow minification.
[#1475](https://github.com/lovell/sharp/pull/1475)
[@jaubourg](https://github.com/jaubourg)
* Expose libvips' recombination matrix operation.
[#1477](https://github.com/lovell/sharp/pull/1477)
[@fromkeith](https://github.com/fromkeith)
* Expose libvips' pyramid/tile options for TIFF output.
[#1483](https://github.com/lovell/sharp/pull/1483)
[@mbklein](https://github.com/mbklein)
#### v0.21.0 - 4<sup>th</sup> October 2018
* Deprecate the following resize-related functions:

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="86 86 550 550">
<!-- Copyright 2019 Lovell Fuller. This work is licensed under the Creative Commons Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) License. -->
<path fill="none" stroke="#9c0" stroke-width="80" d="M258.411 285.777l200.176-26.8M244.113 466.413L451.44 438.66M451.441 438.66V238.484M451.441 88.363v171.572l178.725-23.917M270.323 255.602V477.22M272.71 634.17V462.591L93.984 486.515"/>
<path fill="none" stroke="#090" stroke-width="80" d="M451.441 610.246V438.66l178.725-23.91M269.688 112.59v171.58L90.964 308.093"/>
</svg>

After

Width:  |  Height:  |  Size: 592 B

View File

@@ -1,5 +1,7 @@
# sharp
<img src="image/sharp-logo.svg" width="160" height="160" alt="sharp logo" align="right">
The typical use case for this high speed Node.js module
is to convert large images in common formats to
smaller, web-friendly JPEG, PNG and WebP images of varying dimensions.
@@ -119,17 +121,20 @@ the help and code contributions of the following people:
* [Aidan Hoolachan](https://github.com/ajhool)
* [Axel Eirola](https://github.com/aeirola)
* [Freezy](https://github.com/freezy)
* [Julian Aubourg](https://github.com/jaubourg)
* [Keith Belovay](https://github.com/fromkeith)
* [Michael B. Klein](https://github.com/mbklein)
Thank you!
### Licensing
Copyright 2013, 2014, 2015, 2016, 2017, 2018 Lovell Fuller and contributors.
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
[http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html)
[https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,

View File

@@ -15,7 +15,7 @@ yarn add sharp
### Building from source
Pre-compiled binaries for sharp are provided for use with
Node versions 6, 8 and 10 on
Node versions 6, 8, 10 and 11 on
64-bit Windows, OS X and Linux platforms.
Sharp will be built from source at install time when:
@@ -36,7 +36,7 @@ Building from source requires:
[![Ubuntu 16.04 Build Status](https://travis-ci.org/lovell/sharp.png?branch=master)](https://travis-ci.org/lovell/sharp)
libvips and its dependencies are fetched and stored within `node_modules/sharp/vendor` during `npm install`.
This involves an automated HTTPS download of approximately 8MB.
This involves an automated HTTPS download of approximately 9MB.
Most Linux-based (glibc, musl) operating systems running on x64 and ARMv6+ CPUs should "just work", e.g.:
@@ -72,7 +72,9 @@ libvips is available in the
[testing repository](https://pkgs.alpinelinux.org/packages?name=vips-dev):
```sh
apk add vips-dev fftw-dev build-base --update-cache --repository https://dl-3.alpinelinux.org/alpine/edge/testing/
apk add vips-dev fftw-dev build-base --update-cache \
--repository https://alpine.global.ssl.fastly.net/alpine/edge/testing/ \
--repository https://alpine.global.ssl.fastly.net/alpine/edge/main
```
The smaller stack size of musl libc means
@@ -84,7 +86,7 @@ via `sharp.cache(false)` to avoid a stack overflow.
[![OS X 10.12 Build Status](https://travis-ci.org/lovell/sharp.png?branch=master)](https://travis-ci.org/lovell/sharp)
libvips and its dependencies are fetched and stored within `node_modules/sharp/vendor` during `npm install`.
This involves an automated HTTPS download of approximately 7MB.
This involves an automated HTTPS download of approximately 8MB.
To use your own version of libvips instead of the provided binaries, make sure it is
at least the version listed under `config.libvips` in the `package.json` file and
@@ -95,7 +97,9 @@ that it can be located using `pkg-config --modversion vips-cpp`.
[![Windows x64 Build Status](https://ci.appveyor.com/api/projects/status/pgtul704nkhhg6sg)](https://ci.appveyor.com/project/lovell/sharp)
libvips and its dependencies are fetched and stored within `node_modules\sharp\vendor` during `npm install`.
This involves an automated HTTPS download of approximately 13MB.
This involves an automated HTTPS download of approximately 14MB.
If you are having issues during installation consider removing the directory
`C:\Users\[user]\AppData\Roaming\npm-cache\_libvips`.
Only 64-bit (x64) `node.exe` is supported.
@@ -146,16 +150,25 @@ docker pull tailor/docker-libvips
### AWS Lambda
A [deployment package](http://docs.aws.amazon.com/lambda/latest/dg/nodejs-create-deployment-pkg.html) for the
[Lambda Execution Environment](http://docs.aws.amazon.com/lambda/latest/dg/current-supported-versions.html)
can be built using Docker.
Set the Lambda runtime to Node.js 8.10.
The binaries in the `node_modules` directory of the
[deployment package](https://docs.aws.amazon.com/lambda/latest/dg/nodejs-create-deployment-pkg.html)
must be for the Linux x64 platform/architecture.
On non-Linux machines such as OS X and Windows run the following:
```sh
rm -rf node_modules/sharp
docker run -v "$PWD":/var/task lambci/lambda:build-nodejs8.10 npm install
npm install --arch=x64 --platform=linux --target=8.10.0 sharp
```
Set the Lambda runtime to Node.js 8.10.
Alternatively a Docker container closely matching the Lambda runtime can be used:
```sh
rm -rf node_modules/sharp
docker run -v "$PWD":/var/task lambci/lambda:build-nodejs8.10 npm install sharp
```
To get the best performance select the largest memory available.
A 1536 MB function provides ~12x more CPU time than a 128 MB function.
@@ -217,7 +230,14 @@ you can do so via
[https://github.com/lovell/sharp-libvips/releases](https://github.com/lovell/sharp-libvips/releases)
Should you wish to install these from your own location,
set the `SHARP_DIST_BASE_URL` environment variable, e.g.
set the `sharp_dist_base_url` npm config option, e.g.
```sh
npm config set sharp_dist_base_url "https://hostname/path/"
npm install sharp
```
or set the `SHARP_DIST_BASE_URL` environment variable, e.g.
```sh
SHARP_DIST_BASE_URL="https://hostname/path/" npm install sharp

View File

@@ -16,11 +16,12 @@ const libvips = require('../lib/libvips');
const platform = require('../lib/platform');
const minimumLibvipsVersion = libvips.minimumLibvipsVersion;
const distBaseUrl = process.env.SHARP_DIST_BASE_URL || `https://github.com/lovell/sharp-libvips/releases/download/v${minimumLibvipsVersion}/`;
const distBaseUrl = process.env.npm_config_sharp_dist_base_url || process.env.SHARP_DIST_BASE_URL || `https://github.com/lovell/sharp-libvips/releases/download/v${minimumLibvipsVersion}/`;
const fail = function (err) {
npmLog.error('sharp', err.message);
npmLog.error('sharp', 'Please see http://sharp.pixelplumbing.com/page/install');
npmLog.info('sharp', 'Attempting to build from source via node-gyp but this may fail due to the above error');
npmLog.info('sharp', 'Please see https://sharp.pixelplumbing.com/page/install for required dependencies');
process.exit(1);
};
@@ -33,7 +34,12 @@ const extractTarball = function (tarPath) {
cwd: vendorPath,
strict: true
})
.catch(fail);
.catch(function (err) {
if (/unexpected end of file/.test(err.message)) {
npmLog.error('sharp', `Please delete ${tarPath} as it is not a valid tarball`);
}
fail(err);
});
};
try {
@@ -55,8 +61,8 @@ try {
if (arch === 'ia32') {
throw new Error(`Intel Architecture 32-bit systems require manual installation of libvips >= ${minimumLibvipsVersion}`);
}
if (platformAndArch === 'freebsd-x64') {
throw new Error(`FreeBSD systems require manual installation of libvips >= ${minimumLibvipsVersion}`);
if (platformAndArch === 'freebsd-x64' || platformAndArch === 'openbsd-x64' || platformAndArch === 'sunos-x64') {
throw new Error(`BSD/SunOS systems require manual installation of libvips >= ${minimumLibvipsVersion}`);
}
if (detectLibc.family === detectLibc.GLIBC && detectLibc.version && semver.lt(`${detectLibc.version}.0`, '2.13.0')) {
throw new Error(`Use with glibc version ${detectLibc.version} requires manual installation of libvips >= ${minimumLibvipsVersion}`);
@@ -79,7 +85,9 @@ try {
if (response.statusCode !== 200) {
throw new Error(`Status ${response.statusCode}`);
}
response.pipe(tmpFile);
response
.on('error', fail)
.pipe(tmpFile);
});
tmpFile
.on('error', fail)

View File

@@ -29,6 +29,23 @@ function removeAlpha () {
return this;
}
/**
* Ensure alpha channel, if missing. The added alpha channel will be fully opaque. This is a no-op if the image already has an alpha channel.
*
* @example
* sharp('rgb.jpg')
* .ensureAlpha()
* .toFile('rgba.png', function(err, info) {
* // rgba.png is a 4 channel image with a fully opaque alpha channel
* });
*
* @returns {Sharp}
*/
function ensureAlpha () {
this.options.ensureAlpha = true;
return this;
}
/**
* Extract a single channel from a multi-channel image.
*
@@ -117,14 +134,13 @@ function bandbool (boolOp) {
* @private
*/
module.exports = function (Sharp) {
// Public instance functions
[
Object.assign(Sharp.prototype, {
// Public instance functions
removeAlpha,
ensureAlpha,
extractChannel,
joinChannel,
bandbool
].forEach(function (f) {
Sharp.prototype[f.name] = f;
});
// Class attributes
Sharp.bool = bool;

View File

@@ -1,7 +1,5 @@
'use strict';
const deprecate = require('util').deprecate;
const color = require('color');
const is = require('./is');
@@ -17,24 +15,6 @@ const colourspace = {
srgb: 'srgb'
};
/**
* @deprecated
* @private
*/
function background (rgba) {
const colour = color(rgba);
const background = [
colour.red(),
colour.green(),
colour.blue(),
Math.round(colour.alpha() * 255)
];
this.options.resizeBackground = background;
this.options.extendBackground = background;
this.options.flattenBackground = background.slice(0, 3);
return this;
}
/**
* Tint the image using the provided chroma while preserving the image luminance.
* An alpha channel may be present and will be unchanged by the operation.
@@ -123,7 +103,7 @@ function _setColourOption (key, val) {
* @private
*/
module.exports = function (Sharp) {
[
Object.assign(Sharp.prototype, {
// Public
tint,
greyscale,
@@ -132,12 +112,8 @@ module.exports = function (Sharp) {
toColorspace,
// Private
_setColourOption
].forEach(function (f) {
Sharp.prototype[f.name] = f;
});
// Class attributes
Sharp.colourspace = colourspace;
Sharp.colorspace = colourspace;
// Deprecated
Sharp.prototype.background = deprecate(background, 'background(background) is deprecated, use resize({ background }), extend({ background }) or flatten({ background }) instead');
};

View File

@@ -1,22 +1,66 @@
'use strict';
const deprecate = require('util').deprecate;
const is = require('./is');
/**
* Overlay (composite) an image over the processed (resized, extracted etc.) image.
* Blend modes.
* @member
* @private
*/
const blend = {
clear: 'clear',
source: 'source',
over: 'over',
in: 'in',
out: 'out',
atop: 'atop',
dest: 'dest',
'dest-over': 'dest-over',
'dest-in': 'dest-in',
'dest-out': 'dest-out',
'dest-atop': 'dest-atop',
xor: 'xor',
add: 'add',
saturate: 'saturate',
multiply: 'multiply',
screen: 'screen',
overlay: 'overlay',
darken: 'darken',
lighten: 'lighten',
'colour-dodge': 'colour-dodge',
'color-dodge': 'colour-dodge',
'colour-burn': 'colour-burn',
'color-burn': 'colour-burn',
'hard-light': 'hard-light',
'soft-light': 'soft-light',
difference: 'difference',
exclusion: 'exclusion'
};
/**
* Composite image(s) over the processed (resized, extracted etc.) image.
*
* The overlay image must be the same size or smaller than the processed image.
* The images to composite must be the same size or smaller than the processed image.
* If both `top` and `left` options are provided, they take precedence over `gravity`.
*
* If the overlay image contains an alpha channel then composition with premultiplication will occur.
* The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
* `dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
* `xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
* `colour-dodge`, `color-dodge`, `colour-burn`,`color-burn`,
* `hard-light`, `soft-light`, `difference`, `exclusion`.
*
* More information about blend modes can be found at
* https://libvips.github.io/libvips/API/current/libvips-conversion.html#VipsBlendMode
* and https://www.cairographics.org/operators/
*
* @example
* sharp('input.png')
* .rotate(180)
* .resize(300)
* .flatten()
* .background('#ff6600')
* .overlayWith('overlay.png', { gravity: sharp.gravity.southeast } )
* .flatten( { background: '#ff6600' } )
* .composite([{ input: 'overlay.png', gravity: 'southeast' }])
* .sharpen()
* .withMetadata()
* .webp( { quality: 90 } )
@@ -27,70 +71,104 @@ const is = require('./is');
* // sharpened, with metadata, 90% quality WebP image data. Phew!
* });
*
* @param {(Buffer|String)} overlay - Buffer containing image data or String containing the path to an image file.
* @param {Object} [options]
* @param {String} [options.gravity='centre'] - gravity at which to place the overlay.
* @param {Number} [options.top] - the pixel offset from the top edge.
* @param {Number} [options.left] - the pixel offset from the left edge.
* @param {Boolean} [options.tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`.
* @param {Boolean} [options.cutout=false] - set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another.
* @param {Number} [options.density=72] - number representing the DPI for vector overlay image.
* @param {Object} [options.raw] - describes overlay when using raw pixel data.
* @param {Number} [options.raw.width]
* @param {Number} [options.raw.height]
* @param {Number} [options.raw.channels]
* @param {Object} [options.create] - describes a blank overlay to be created.
* @param {Number} [options.create.width]
* @param {Number} [options.create.height]
* @param {Number} [options.create.channels] - 3-4
* @param {String|Object} [options.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @param {Object[]} images - Ordered list of images to composite
* @param {Buffer|String} [images[].input] - Buffer containing image data or String containing the path to an image file.
* @param {String} [images[].blend='over'] - how to blend this image with the image below.
* @param {String} [images[].gravity='centre'] - gravity at which to place the overlay.
* @param {Number} [images[].top] - the pixel offset from the top edge.
* @param {Number} [images[].left] - the pixel offset from the left edge.
* @param {Boolean} [images[].tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`.
* @param {Number} [images[].density=72] - number representing the DPI for vector overlay image.
* @param {Object} [images[].raw] - describes overlay when using raw pixel data.
* @param {Number} [images[].raw.width]
* @param {Number} [images[].raw.height]
* @param {Number} [images[].raw.channels]
* @param {Object} [images[].create] - describes a blank overlay to be created.
* @param {Number} [images[].create.width]
* @param {Number} [images[].create.height]
* @param {Number} [images[].create.channels] - 3-4
* @param {String|Object} [images[].create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function overlayWith (overlay, options) {
this.options.overlay = this._createInputDescriptor(overlay, options, {
allowStream: false
});
if (is.object(options)) {
if (is.defined(options.tile)) {
if (is.bool(options.tile)) {
this.options.overlayTile = options.tile;
} else {
throw new Error('Invalid overlay tile ' + options.tile);
}
}
if (is.defined(options.cutout)) {
if (is.bool(options.cutout)) {
this.options.overlayCutout = options.cutout;
} else {
throw new Error('Invalid overlay cutout ' + options.cutout);
}
}
if (is.defined(options.left) || is.defined(options.top)) {
if (is.integer(options.left) && options.left >= 0 && is.integer(options.top) && options.top >= 0) {
this.options.overlayXOffset = options.left;
this.options.overlayYOffset = options.top;
} else {
throw new Error('Invalid overlay left ' + options.left + ' and/or top ' + options.top);
}
}
if (is.defined(options.gravity)) {
if (is.integer(options.gravity) && is.inRange(options.gravity, 0, 8)) {
this.options.overlayGravity = options.gravity;
} else if (is.string(options.gravity) && is.integer(this.constructor.gravity[options.gravity])) {
this.options.overlayGravity = this.constructor.gravity[options.gravity];
} else {
throw new Error('Unsupported overlay gravity ' + options.gravity);
}
}
function composite (images) {
if (!Array.isArray(images)) {
throw is.invalidParameterError('images to composite', 'array', images);
}
this.options.composite = images.map(image => {
if (!is.object(image)) {
throw is.invalidParameterError('image to composite', 'object', image);
}
const { raw, density } = image;
const inputOptions = (raw || density) ? { raw, density } : undefined;
const composite = {
input: this._createInputDescriptor(image.input, inputOptions, { allowStream: false }),
blend: 'over',
tile: false,
left: -1,
top: -1,
gravity: 0
};
if (is.defined(image.blend)) {
if (is.string(blend[image.blend])) {
composite.blend = blend[image.blend];
} else {
throw is.invalidParameterError('blend', 'valid blend name', image.blend);
}
}
if (is.defined(image.tile)) {
if (is.bool(image.tile)) {
composite.tile = image.tile;
} else {
throw is.invalidParameterError('tile', 'boolean', image.tile);
}
}
if (is.defined(image.left)) {
if (is.integer(image.left) && image.left >= 0) {
composite.left = image.left;
} else {
throw is.invalidParameterError('left', 'positive integer', image.left);
}
}
if (is.defined(image.top)) {
if (is.integer(image.top) && image.top >= 0) {
composite.top = image.top;
} else {
throw is.invalidParameterError('top', 'positive integer', image.top);
}
}
if (composite.left !== composite.top && Math.min(composite.left, composite.top) === -1) {
throw new Error('Expected both left and top to be set');
}
if (is.defined(image.gravity)) {
if (is.integer(image.gravity) && is.inRange(image.gravity, 0, 8)) {
composite.gravity = image.gravity;
} else if (is.string(image.gravity) && is.integer(this.constructor.gravity[image.gravity])) {
composite.gravity = this.constructor.gravity[image.gravity];
} else {
throw is.invalidParameterError('gravity', 'valid gravity', image.gravity);
}
}
return composite;
});
return this;
}
/**
* @deprecated
* @private
*/
function overlayWith (input, options) {
const blend = (is.object(options) && options.cutout) ? 'dest-in' : 'over';
return this.composite([Object.assign({ input, blend }, options)]);
}
/**
* Decorate the Sharp prototype with composite-related functions.
* @private
*/
module.exports = function (Sharp) {
Sharp.prototype.overlayWith = overlayWith;
Sharp.prototype.composite = composite;
Sharp.prototype.overlayWith = deprecate(overlayWith, 'overlayWith(input, options) is deprecated, use composite([{ input, ...options }]) instead');
Sharp.blend = blend;
};

View File

@@ -7,7 +7,7 @@ const events = require('events');
const is = require('./is');
require('./libvips').hasVendoredLibvips();
const sharp = require('../build/Release/sharp.node');
const sharp = require('bindings')('sharp.node');
// Use NODE_DEBUG=sharp to enable libvips warnings
const debuglog = util.debuglog('sharp');
@@ -61,11 +61,11 @@ const debuglog = util.debuglog('sharp');
* a String containing the path to an JPEG, PNG, WebP, GIF, SVG or TIFF image file.
* JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
* @param {Object} [options] - if present, is an Object with optional attributes.
* @param {Boolean} [options.failOnError=false] - by default apply a "best effort"
* to decode images, even if the data is corrupt or invalid. Set this flag to true
* if you'd rather halt processing and raise an error when loading invalid images.
* @param {Boolean} [options.failOnError=true] - by default halt processing and raise an error when loading invalid images.
* Set this flag to `false` if you'd rather apply a "best effort" to decode images, even if the data is corrupt or invalid.
* @param {Number} [options.density=72] - number representing the DPI for vector images.
* @param {Number} [options.page=0] - page number to extract for multi-page input (GIF, TIFF)
* @param {Number} [options.pages=1] - number of pages to extract for multi-page input (GIF, TIFF, PDF), use -1 for all pages.
* @param {Number} [options.page=0] - page number to start extracting from for multi-page input (GIF, TIFF, PDF), zero based.
* @param {Object} [options.raw] - describes raw pixel input image data. See `raw()` for pixel ordering.
* @param {Number} [options.raw.width]
* @param {Number} [options.raw.height]
@@ -136,6 +136,7 @@ const Sharp = function (input, options) {
thresholdGrayscale: true,
trimThreshold: 0,
gamma: 0,
gammaOut: 0,
greyscale: false,
normalise: 0,
booleanBufferIn: null,
@@ -143,13 +144,9 @@ const Sharp = function (input, options) {
joinChannelIn: [],
extractChannel: -1,
removeAlpha: false,
ensureAlpha: false,
colourspace: 'srgb',
// overlay
overlayGravity: 0,
overlayXOffset: -1,
overlayYOffset: -1,
overlayTile: false,
overlayCutout: false,
composite: [],
// output
fileOut: '',
formatOut: 'input',
@@ -169,6 +166,10 @@ const Sharp = function (input, options) {
pngProgressive: false,
pngCompressionLevel: 9,
pngAdaptiveFiltering: false,
pngPalette: false,
pngQuality: 100,
pngColours: 256,
pngDither: 1,
webpQuality: 80,
webpAlphaQuality: 100,
webpLossless: false,
@@ -176,7 +177,11 @@ const Sharp = function (input, options) {
tiffQuality: 80,
tiffCompression: 'jpeg',
tiffPredictor: 'horizontal',
tiffPyramid: false,
tiffSquash: false,
tiffTile: false,
tiffTileHeight: 256,
tiffTileWidth: 256,
tiffXres: 1.0,
tiffYres: 1.0,
tileSize: 256,

View File

@@ -9,7 +9,7 @@ const sharp = require('../build/Release/sharp.node');
* @private
*/
function _createInputDescriptor (input, inputOptions, containerOptions) {
const inputDescriptor = { failOnError: false };
const inputDescriptor = { failOnError: true };
if (is.string(input)) {
// filesystem
inputDescriptor.file = input;
@@ -19,6 +19,10 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
} else if (is.plainObject(input) && !is.defined(inputOptions)) {
// Plain Object descriptor, e.g. create
inputOptions = input;
if (is.plainObject(inputOptions.raw)) {
// Raw Stream
inputDescriptor.buffer = [];
}
} else if (!is.defined(input) && is.object(containerOptions) && containerOptions.allowStream) {
// Stream
inputDescriptor.buffer = [];
@@ -57,7 +61,12 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
throw new Error('Expected width, height and channels for raw pixel input');
}
}
// Page input for multi-page TIFF
// Multi-page input (GIF, TIFF, PDF)
if (is.defined(inputOptions.pages)) {
if (is.integer(inputOptions.pages) && is.inRange(inputOptions.pages, -1, 100000)) {
inputDescriptor.pages = inputOptions.pages;
}
}
if (is.defined(inputOptions.page)) {
if (is.integer(inputOptions.page) && is.inRange(inputOptions.page, 0, 100000)) {
inputDescriptor.page = inputOptions.page;
@@ -174,7 +183,7 @@ function clone () {
/**
* Fast access to (uncached) image metadata without decoding any compressed image data.
* A Promises/A+ promise is returned when `callback` is not provided.
* A `Promise` is returned when `callback` is not provided.
*
* - `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
* - `size`: Total size of image in bytes, for Stream and Buffer input only
@@ -186,6 +195,8 @@ function clone () {
* - `density`: Number of pixels per inch (DPI), if present
* - `chromaSubsampling`: String containing JPEG chroma subsampling, `4:2:0` or `4:4:4` for RGB, `4:2:0:4` or `4:4:4:4` for CMYK
* - `isProgressive`: Boolean indicating whether the image is interlaced using a progressive scan
* - `pages`: Number of pages/frames contained within the image, with support for TIFF, PDF, animated GIF and animated WebP
* - `pageHeight`: Number of pixels high each page in this PDF image will be.
* - `hasProfile`: Boolean indicating the presence of an embedded ICC profile
* - `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
* - `orientation`: Number value of the EXIF Orientation header, if present
@@ -253,7 +264,7 @@ function metadata (callback) {
/**
* Access to pixel-derived image statistics for every channel in the image.
* A Promise is returned when `callback` is not provided.
* A `Promise` is returned when `callback` is not provided.
*
* - `channels`: Array of channel statistics for each channel in the image. Each channel statistic contains
* - `min` (minimum value in the channel)
@@ -321,9 +332,9 @@ function stats (callback) {
}
/**
* Do not process input images where the number of pixels (width * height) exceeds this limit.
* Do not process input images where the number of pixels (width x height) exceeds this limit.
* Assumes image dimensions contained in the input metadata can be trusted.
* The default limit is 268402689 (0x3FFF * 0x3FFF) pixels.
* The default limit is 268402689 (0x3FFF x 0x3FFF) pixels.
* @param {(Number|Boolean)} limit - an integral Number of pixels, zero or false to remove limit, true to use default limit.
* @returns {Sharp}
* @throws {Error} Invalid limit
@@ -362,7 +373,7 @@ function sequentialRead (sequentialRead) {
* @private
*/
module.exports = function (Sharp) {
[
Object.assign(Sharp.prototype, {
// Private
_createInputDescriptor,
_write,
@@ -374,7 +385,5 @@ module.exports = function (Sharp) {
stats,
limitInputPixels,
sequentialRead
].forEach(function (f) {
Sharp.prototype[f.name] = f;
});
};

View File

@@ -172,6 +172,7 @@ function blur (sigma) {
/**
* Merge alpha transparency channel, if any, with a background.
* @param {Object} [options]
* @param {String|Object} [options.background={r: 0, g: 0, b: 0}] - background colour, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black.
* @returns {Sharp}
*/
@@ -189,11 +190,15 @@ function flatten (options) {
* This can improve the perceived brightness of a resized image in non-linear colour spaces.
* JPEG and WebP input images will not take advantage of the shrink-on-load performance optimisation
* when applying a gamma correction.
*
* Supply a second argument to use a different output gamma value, otherwise the first value is used in both cases.
*
* @param {Number} [gamma=2.2] value between 1.0 and 3.0.
* @param {Number} [gammaOut] value between 1.0 and 3.0. (optional, defaults to same as `gamma`)
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function gamma (gamma) {
function gamma (gamma, gammaOut) {
if (!is.defined(gamma)) {
// Default gamma correction of 2.2 (sRGB)
this.options.gamma = 2.2;
@@ -202,6 +207,14 @@ function gamma (gamma) {
} else {
throw new Error('Invalid gamma correction (1.0 to 3.0) ' + gamma);
}
if (!is.defined(gammaOut)) {
// Default gamma correction for output is same as input
this.options.gammaOut = this.options.gamma;
} else if (is.number(gammaOut) && is.inRange(gammaOut, 1, 3)) {
this.options.gammaOut = gammaOut;
} else {
throw new Error('Invalid output gamma correction (1.0 to 3.0) ' + gammaOut);
}
return this;
}
@@ -365,12 +378,49 @@ function linear (a, b) {
return this;
}
/**
* Recomb the image with the specified matrix.
*
* @example
* sharp(input)
* .recomb([
* [0.3588, 0.7044, 0.1368],
* [0.2990, 0.5870, 0.1140],
* [0.2392, 0.4696, 0.0912],
* ])
* .raw()
* .toBuffer(function(err, data, info) {
* // data contains the raw pixel data after applying the recomb
* // With this example input, a sepia filter has been applied
* });
*
* @param {Array<Array<Number>>} 3x3 Recombination matrix
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function recomb (inputMatrix) {
if (!Array.isArray(inputMatrix) || inputMatrix.length !== 3 ||
inputMatrix[0].length !== 3 ||
inputMatrix[1].length !== 3 ||
inputMatrix[2].length !== 3
) {
// must pass in a kernel
throw new Error('Invalid Recomb Matrix');
}
this.options.recombMatrix = [
inputMatrix[0][0], inputMatrix[0][1], inputMatrix[0][2],
inputMatrix[1][0], inputMatrix[1][1], inputMatrix[1][2],
inputMatrix[2][0], inputMatrix[2][1], inputMatrix[2][2]
].map(Number);
return this;
}
/**
* Decorate the Sharp prototype with operation-related functions.
* @private
*/
module.exports = function (Sharp) {
[
Object.assign(Sharp.prototype, {
rotate,
flip,
flop,
@@ -385,8 +435,7 @@ module.exports = function (Sharp) {
convolve,
threshold,
boolean,
linear
].forEach(function (f) {
Sharp.prototype[f.name] = f;
linear,
recomb
});
};

View File

@@ -32,7 +32,7 @@ const sharp = require('../build/Release/sharp.node');
*/
function toFile (fileOut, callback) {
if (!fileOut || fileOut.length === 0) {
const errOutputInvalid = new Error('Invalid output');
const errOutputInvalid = new Error('Missing output file path');
if (is.fn(callback)) {
callback(errOutputInvalid);
} else {
@@ -175,30 +175,30 @@ function jpeg (options) {
throw new Error('Invalid chromaSubsampling (4:2:0, 4:4:4) ' + options.chromaSubsampling);
}
}
options.trellisQuantisation = is.bool(options.trellisQuantization) ? options.trellisQuantization : options.trellisQuantisation;
if (is.defined(options.trellisQuantisation)) {
this._setBooleanOption('jpegTrellisQuantisation', options.trellisQuantisation);
const trellisQuantisation = is.bool(options.trellisQuantization) ? options.trellisQuantization : options.trellisQuantisation;
if (is.defined(trellisQuantisation)) {
this._setBooleanOption('jpegTrellisQuantisation', trellisQuantisation);
}
if (is.defined(options.overshootDeringing)) {
this._setBooleanOption('jpegOvershootDeringing', options.overshootDeringing);
}
options.optimiseScans = is.bool(options.optimizeScans) ? options.optimizeScans : options.optimiseScans;
if (is.defined(options.optimiseScans)) {
this._setBooleanOption('jpegOptimiseScans', options.optimiseScans);
if (options.optimiseScans) {
const optimiseScans = is.bool(options.optimizeScans) ? options.optimizeScans : options.optimiseScans;
if (is.defined(optimiseScans)) {
this._setBooleanOption('jpegOptimiseScans', optimiseScans);
if (optimiseScans) {
this.options.jpegProgressive = true;
}
}
options.optimiseCoding = is.bool(options.optimizeCoding) ? options.optimizeCoding : options.optimiseCoding;
if (is.defined(options.optimiseCoding)) {
this._setBooleanOption('jpegOptimiseCoding', options.optimiseCoding);
const optimiseCoding = is.bool(options.optimizeCoding) ? options.optimizeCoding : options.optimiseCoding;
if (is.defined(optimiseCoding)) {
this._setBooleanOption('jpegOptimiseCoding', optimiseCoding);
}
options.quantisationTable = is.number(options.quantizationTable) ? options.quantizationTable : options.quantisationTable;
if (is.defined(options.quantisationTable)) {
if (is.integer(options.quantisationTable) && is.inRange(options.quantisationTable, 0, 8)) {
this.options.jpegQuantisationTable = options.quantisationTable;
const quantisationTable = is.number(options.quantizationTable) ? options.quantizationTable : options.quantisationTable;
if (is.defined(quantisationTable)) {
if (is.integer(quantisationTable) && is.inRange(quantisationTable, 0, 8)) {
this.options.jpegQuantisationTable = quantisationTable;
} else {
throw new Error('Invalid quantisation table (integer, 0-8) ' + options.quantisationTable);
throw new Error('Invalid quantisation table (integer, 0-8) ' + quantisationTable);
}
}
}
@@ -221,6 +221,11 @@ function jpeg (options) {
* @param {Boolean} [options.progressive=false] - use progressive (interlace) scan
* @param {Number} [options.compressionLevel=9] - zlib compression level, 0-9
* @param {Boolean} [options.adaptiveFiltering=false] - use adaptive row filtering
* @param {Boolean} [options.palette=false] - quantise to a palette-based image with alpha transparency support, requires libimagequant
* @param {Number} [options.quality=100] - use the lowest number of colours needed to achieve given quality, requires libimagequant
* @param {Number} [options.colours=256] - maximum number of palette entries, requires libimagequant
* @param {Number} [options.colors=256] - alternative spelling of `options.colours`, requires libimagequant
* @param {Number} [options.dither=1.0] - level of Floyd-Steinberg error diffusion, requires libimagequant
* @param {Boolean} [options.force=true] - force PNG output, otherwise attempt to use input format
* @returns {Sharp}
* @throws {Error} Invalid options
@@ -240,6 +245,33 @@ function png (options) {
if (is.defined(options.adaptiveFiltering)) {
this._setBooleanOption('pngAdaptiveFiltering', options.adaptiveFiltering);
}
if (is.defined(options.palette)) {
this._setBooleanOption('pngPalette', options.palette);
if (this.options.pngPalette) {
if (is.defined(options.quality)) {
if (is.integer(options.quality) && is.inRange(options.quality, 0, 100)) {
this.options.pngQuality = options.quality;
} else {
throw is.invalidParameterError('quality', 'integer between 0 and 100', options.quality);
}
}
const colours = options.colours || options.colors;
if (is.defined(colours)) {
if (is.integer(colours) && is.inRange(colours, 2, 256)) {
this.options.pngColours = colours;
} else {
throw is.invalidParameterError('colours', 'integer between 2 and 256', colours);
}
}
if (is.defined(options.dither)) {
if (is.number(options.dither) && is.inRange(options.dither, 0, 1)) {
this.options.pngDither = options.dither;
} else {
throw is.invalidParameterError('dither', 'number between 0.0 and 1.0', options.dither);
}
}
}
}
}
return this._updateFormatOut('png', options);
}
@@ -304,6 +336,10 @@ function webp (options) {
* @param {Boolean} [options.force=true] - force TIFF output, otherwise attempt to use input format
* @param {Boolean} [options.compression='jpeg'] - compression options: lzw, deflate, jpeg, ccittfax4
* @param {Boolean} [options.predictor='horizontal'] - compression predictor options: none, horizontal, float
* @param {Boolean} [options.pyramid=false] - write an image pyramid
* @param {Boolean} [options.tile=false] - write a tiled tiff
* @param {Boolean} [options.tileWidth=256] - horizontal tile size
* @param {Boolean} [options.tileHeight=256] - vertical tile size
* @param {Number} [options.xres=1.0] - horizontal resolution in pixels/mm
* @param {Number} [options.yres=1.0] - vertical resolution in pixels/mm
* @param {Boolean} [options.squash=false] - squash 8-bit images down to 1 bit
@@ -311,51 +347,83 @@ function webp (options) {
* @throws {Error} Invalid options
*/
function tiff (options) {
if (is.object(options) && is.defined(options.quality)) {
if (is.integer(options.quality) && is.inRange(options.quality, 1, 100)) {
this.options.tiffQuality = options.quality;
} else {
throw new Error('Invalid quality (integer, 1-100) ' + options.quality);
if (is.object(options)) {
if (is.defined(options.quality)) {
if (is.integer(options.quality) && is.inRange(options.quality, 1, 100)) {
this.options.tiffQuality = options.quality;
} else {
throw new Error('Invalid quality (integer, 1-100) ' + options.quality);
}
}
}
if (is.object(options) && is.defined(options.squash)) {
if (is.bool(options.squash)) {
this.options.tiffSquash = options.squash;
} else {
throw new Error('Invalid Value for squash ' + options.squash + ' Only Boolean Values allowed for options.squash.');
if (is.defined(options.squash)) {
if (is.bool(options.squash)) {
this.options.tiffSquash = options.squash;
} else {
throw new Error('Invalid Value for squash ' + options.squash + ' Only Boolean Values allowed for options.squash.');
}
}
}
// resolution
if (is.object(options) && is.defined(options.xres)) {
if (is.number(options.xres)) {
this.options.tiffXres = options.xres;
} else {
throw new Error('Invalid Value for xres ' + options.xres + ' Only numeric values allowed for options.xres');
// tiling
if (is.defined(options.tile)) {
if (is.bool(options.tile)) {
this.options.tiffTile = options.tile;
} else {
throw new Error('Invalid Value for tile ' + options.tile + ' Only Boolean values allowed for options.tile');
}
}
}
if (is.object(options) && is.defined(options.yres)) {
if (is.number(options.yres)) {
this.options.tiffYres = options.yres;
} else {
throw new Error('Invalid Value for yres ' + options.yres + ' Only numeric values allowed for options.yres');
if (is.defined(options.tileWidth)) {
if (is.number(options.tileWidth) && options.tileWidth > 0) {
this.options.tiffTileWidth = options.tileWidth;
} else {
throw new Error('Invalid Value for tileWidth ' + options.tileWidth + ' Only positive numeric values allowed for options.tileWidth');
}
}
}
// compression
if (is.defined(options) && is.defined(options.compression)) {
if (is.string(options.compression) && is.inArray(options.compression, ['lzw', 'deflate', 'jpeg', 'ccittfax4', 'none'])) {
this.options.tiffCompression = options.compression;
} else {
const message = `Invalid compression option "${options.compression}". Should be one of: lzw, deflate, jpeg, ccittfax4, none`;
throw new Error(message);
if (is.defined(options.tileHeight)) {
if (is.number(options.tileHeight) && options.tileHeight > 0) {
this.options.tiffTileHeight = options.tileHeight;
} else {
throw new Error('Invalid Value for tileHeight ' + options.tileHeight + ' Only positive numeric values allowed for options.tileHeight');
}
}
}
// predictor
if (is.defined(options) && is.defined(options.predictor)) {
if (is.string(options.predictor) && is.inArray(options.predictor, ['none', 'horizontal', 'float'])) {
this.options.tiffPredictor = options.predictor;
} else {
const message = `Invalid predictor option "${options.predictor}". Should be one of: none, horizontal, float`;
throw new Error(message);
// pyramid
if (is.defined(options.pyramid)) {
if (is.bool(options.pyramid)) {
this.options.tiffPyramid = options.pyramid;
} else {
throw new Error('Invalid Value for pyramid ' + options.pyramid + ' Only Boolean values allowed for options.pyramid');
}
}
// resolution
if (is.defined(options.xres)) {
if (is.number(options.xres)) {
this.options.tiffXres = options.xres;
} else {
throw new Error('Invalid Value for xres ' + options.xres + ' Only numeric values allowed for options.xres');
}
}
if (is.defined(options.yres)) {
if (is.number(options.yres)) {
this.options.tiffYres = options.yres;
} else {
throw new Error('Invalid Value for yres ' + options.yres + ' Only numeric values allowed for options.yres');
}
}
// compression
if (is.defined(options.compression)) {
if (is.string(options.compression) && is.inArray(options.compression, ['lzw', 'deflate', 'jpeg', 'ccittfax4', 'none'])) {
this.options.tiffCompression = options.compression;
} else {
const message = `Invalid compression option "${options.compression}". Should be one of: lzw, deflate, jpeg, ccittfax4, none`;
throw new Error(message);
}
}
// predictor
if (is.defined(options.predictor)) {
if (is.string(options.predictor) && is.inArray(options.predictor, ['none', 'horizontal', 'float'])) {
this.options.tiffPredictor = options.predictor;
} else {
const message = `Invalid predictor option "${options.predictor}". Should be one of: none, horizontal, float`;
throw new Error(message);
}
}
}
return this._updateFormatOut('tiff', options);
@@ -505,7 +573,9 @@ function tile (tile) {
* @returns {Sharp}
*/
function _updateFormatOut (formatOut, options) {
this.options.formatOut = (is.object(options) && options.force === false) ? 'input' : formatOut;
if (!(is.object(options) && options.force === false)) {
this.options.formatOut = formatOut;
}
return this;
}
@@ -641,7 +711,7 @@ function _pipeline (callback) {
* @private
*/
module.exports = function (Sharp) {
[
Object.assign(Sharp.prototype, {
// Public
toFile,
toBuffer,
@@ -658,7 +728,5 @@ module.exports = function (Sharp) {
_setBooleanOption,
_read,
_pipeline
].forEach(function (f) {
Sharp.prototype[f.name] = f;
});
};

View File

@@ -2,17 +2,22 @@
const detectLibc = require('detect-libc');
const env = process.env;
module.exports = function () {
const arch = process.env.npm_config_arch || process.arch;
const platform = process.env.npm_config_platform || process.platform;
const arch = env.npm_config_arch || process.arch;
const platform = env.npm_config_platform || process.platform;
const libc = (platform === 'linux' && detectLibc.isNonGlibcLinux) ? detectLibc.family : '';
const platformId = [`${platform}${libc}`];
if (arch === 'arm' || arch === 'armhf' || arch === 'arm64') {
const armVersion = (arch === 'arm64') ? '8' : process.env.npm_config_armv || process.config.variables.arm_version || '6';
platformId.push(`armv${armVersion}`);
if (arch === 'arm') {
platformId.push(`armv${env.npm_config_arm_version || process.config.variables.arm_version || '6'}`);
} else if (arch === 'arm64') {
platformId.push(`arm64v${env.npm_config_arm_version || '8'}`);
} else {
platformId.push(arch);
}
return platformId.join('-');
};

View File

@@ -1,6 +1,5 @@
'use strict';
const deprecate = require('util').deprecate;
const is = require('./is');
/**
@@ -55,6 +54,7 @@ const strategy = {
const kernel = {
nearest: 'nearest',
cubic: 'cubic',
mitchell: 'mitchell',
lanczos2: 'lanczos2',
lanczos3: 'lanczos3'
};
@@ -110,6 +110,7 @@ const mapFitToCanvas = {
* Possible interpolation kernels are:
* - `nearest`: Use [nearest neighbour interpolation](http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation).
* - `cubic`: Use a [Catmull-Rom spline](https://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline).
* - `mitchell`: Use a [Mitchell-Netravali spline](https://www.cs.utexas.edu/~fussell/courses/cs384g-fall2013/lectures/mitchell/Mitchell.pdf).
* - `lanczos2`: Use a [Lanczos kernel](https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel) with `a=2`.
* - `lanczos3`: Use a Lanczos kernel with `a=3` (the default).
*
@@ -269,12 +270,11 @@ function resize (width, height, options) {
* // to the top, left and right edges and 20 to the bottom edge
* sharp(input)
* .resize(140)
* .)
* .extend({
* top: 10,
* bottom: 20,
* left: 10,
* right: 10
* right: 10,
* background: { r: 0, g: 0, b: 0, alpha: 0 }
* })
* ...
@@ -377,104 +377,16 @@ function trim (threshold) {
return this;
}
// Deprecated functions
/**
* @deprecated
* @private
*/
function crop (crop) {
this.options.canvas = 'crop';
if (!is.defined(crop)) {
// Default
this.options.position = gravity.center;
} else if (is.integer(crop) && is.inRange(crop, 0, 8)) {
// Gravity (numeric)
this.options.position = crop;
} else if (is.string(crop) && is.integer(gravity[crop])) {
// Gravity (string)
this.options.position = gravity[crop];
} else if (is.integer(crop) && crop >= strategy.entropy) {
// Strategy
this.options.position = crop;
} else if (is.string(crop) && is.integer(strategy[crop])) {
// Strategy (string)
this.options.position = strategy[crop];
} else {
throw is.invalidParameterError('crop', 'valid crop id/name/strategy', crop);
}
return this;
}
/**
* @deprecated
* @private
*/
function embed (embed) {
this.options.canvas = 'embed';
if (!is.defined(embed)) {
// Default
this.options.position = gravity.center;
} else if (is.integer(embed) && is.inRange(embed, 0, 8)) {
// Gravity (numeric)
this.options.position = embed;
} else if (is.string(embed) && is.integer(gravity[embed])) {
// Gravity (string)
this.options.position = gravity[embed];
} else {
throw is.invalidParameterError('embed', 'valid embed id/name', embed);
}
return this;
}
/**
* @deprecated
* @private
*/
function max () {
this.options.canvas = 'max';
return this;
}
/**
* @deprecated
* @private
*/
function min () {
this.options.canvas = 'min';
return this;
}
/**
* @deprecated
* @private
*/
function ignoreAspectRatio () {
this.options.canvas = 'ignore_aspect';
return this;
}
/**
* @deprecated
* @private
*/
function withoutEnlargement (withoutEnlargement) {
this.options.withoutEnlargement = is.bool(withoutEnlargement) ? withoutEnlargement : true;
return this;
}
/**
* Decorate the Sharp prototype with resize-related functions.
* @private
*/
module.exports = function (Sharp) {
[
Object.assign(Sharp.prototype, {
resize,
extend,
extract,
trim
].forEach(function (f) {
Sharp.prototype[f.name] = f;
});
// Class attributes
Sharp.gravity = gravity;
@@ -482,11 +394,4 @@ module.exports = function (Sharp) {
Sharp.kernel = kernel;
Sharp.fit = fit;
Sharp.position = position;
// Deprecated functions, to be removed in v0.22.0
Sharp.prototype.crop = deprecate(crop, 'crop(position) is deprecated, use resize({ fit: "cover", position }) instead');
Sharp.prototype.embed = deprecate(embed, 'embed(position) is deprecated, use resize({ fit: "contain", position }) instead');
Sharp.prototype.max = deprecate(max, 'max() is deprecated, use resize({ fit: "inside" }) instead');
Sharp.prototype.min = deprecate(min, 'min() is deprecated, use resize({ fit: "outside" }) instead');
Sharp.prototype.ignoreAspectRatio = deprecate(ignoreAspectRatio, 'ignoreAspectRatio() is deprecated, use resize({ fit: "fill" }) instead');
Sharp.prototype.withoutEnlargement = deprecate(withoutEnlargement, 'withoutEnlargement() is deprecated, use resize({ withoutEnlargement: true }) instead');
};

View File

@@ -1,5 +1,5 @@
site_name: sharp
site_url: http://sharp.pixelplumbing.com/
site_url: https://sharp.pixelplumbing.com/
repo_url: https://github.com/lovell/sharp
site_description: High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images
copyright: <a href="https://pixelplumbing.com/">pixelplumbing.com</a>

View File

@@ -1,7 +1,7 @@
{
"name": "sharp",
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images",
"version": "0.21.0",
"version": "0.22.0",
"author": "Lovell Fuller <npm@lovell.info>",
"homepage": "https://github.com/lovell/sharp",
"contributors": [
@@ -55,13 +55,19 @@
"Alun Davies <alun.owain.davies@googlemail.com>",
"Aidan Hoolachan <ajhoolachan21@gmail.com>",
"Axel Eirola <axel.eirola@iki.fi>",
"Freezy <freezy@xbmc.org>"
"Freezy <freezy@xbmc.org>",
"Daiz <taneli.vatanen@gmail.com>",
"Julian Aubourg <j@ubourg.net>",
"Keith Belovay <keith@picthrive.com>",
"Michael B. Klein <mbklein@gmail.com>"
],
"scripts": {
"install": "(node install/libvips && node install/dll-copy && prebuild-install) || (node-gyp rebuild && node install/dll-copy)",
"clean": "rm -rf node_modules/ build/ vendor/ .nyc_output/ coverage/ test/fixtures/output.*",
"test": "semistandard && cc && nyc --reporter=lcov --branches=99 mocha --slow=5000 --timeout=60000 ./test/unit/*.js && prebuild-ci",
"coverage": "./test/coverage/report.sh",
"test": "semistandard && cc && npm run test-unit && npm run test-licensing && prebuild-ci",
"test-unit": "nyc --reporter=lcov --branches=99 mocha --slow=5000 --timeout=60000 ./test/unit/*.js",
"test-licensing": "license-checker --production --summary --onlyAllow=\"Apache-2.0;BSD;ISC;MIT\"",
"test-coverage": "./test/coverage/report.sh",
"test-leak": "./test/leak/leak.sh",
"docs": "for m in constructor input resize composite operation colour channel output utility; do documentation build --shallow --format=md --markdown-toc=false lib/$m.js >docs/api-$m.md; done"
},
@@ -87,35 +93,37 @@
"vips"
],
"dependencies": {
"color": "^3.0.0",
"bindings": "^1.5.0",
"color": "^3.1.0",
"detect-libc": "^1.0.3",
"nan": "^2.11.1",
"fs-copy-file-sync": "^1.1.1",
"nan": "^2.13.1",
"npmlog": "^4.1.2",
"prebuild-install": "^5.2.0",
"semver": "^5.5.1",
"prebuild-install": "^5.2.5",
"semver": "^5.6.0",
"simple-get": "^3.0.3",
"tar": "^4.4.6",
"tar": "^4.4.8",
"tunnel-agent": "^0.6.0"
},
"devDependencies": {
"async": "^2.6.1",
"async": "^2.6.2",
"cc": "^1.0.2",
"decompress-zip": "^0.3.1",
"documentation": "^8.1.2",
"decompress-zip": "^0.3.2",
"documentation": "^9.3.1",
"exif-reader": "^1.0.2",
"icc": "^1.0.0",
"mocha": "^5.2.0",
"mock-fs": "^4.7.0",
"nyc": "^13.1.0",
"prebuild": "^8.1.0",
"prebuild-ci": "^2.2.3",
"rimraf": "^2.6.2",
"semistandard": "^12.0.1"
"license-checker": "^25.0.1",
"mocha": "^6.0.2",
"mock-fs": "^4.8.0",
"nyc": "^13.3.0",
"prebuild": "8.1.0",
"prebuild-ci": "^2.3.0",
"rimraf": "^2.6.3",
"semistandard": "^13.0.1"
},
"license": "Apache-2.0",
"config": {
"libvips": "8.7.0"
"libvips": "8.7.4"
},
"engines": {
"node": ">=6"

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -71,7 +71,10 @@ namespace sharp {
descriptor->rawWidth = AttrTo<uint32_t>(input, "rawWidth");
descriptor->rawHeight = AttrTo<uint32_t>(input, "rawHeight");
}
// Page input for multi-page TIFF
// Multi-page input (GIF, TIFF, PDF)
if (HasAttr(input, "pages")) {
descriptor->pages = AttrTo<int32_t>(input, "pages");
}
if (HasAttr(input, "page")) {
descriptor->page = AttrTo<uint32_t>(input, "page");
}
@@ -137,6 +140,7 @@ namespace sharp {
case ImageType::VIPS: id = "v"; break;
case ImageType::RAW: id = "raw"; break;
case ImageType::UNKNOWN: id = "unknown"; break;
case ImageType::MISSING: id = "missing"; break;
}
return id;
}
@@ -203,10 +207,24 @@ namespace sharp {
} else if (EndsWith(loader, "Magick") || EndsWith(loader, "MagickFile")) {
imageType = ImageType::MAGICK;
}
} else {
if (EndsWith(vips::VError().what(), " not found\n")) {
imageType = ImageType::MISSING;
}
}
return imageType;
}
/*
Does this image type support multiple pages?
*/
bool ImageTypeSupportsPage(ImageType imageType) {
return
imageType == ImageType::GIF ||
imageType == ImageType::TIFF ||
imageType == ImageType::PDF;
}
/*
Open an image from the given InputDescriptor (filesystem, compressed buffer, raw pixel data)
*/
@@ -238,15 +256,16 @@ namespace sharp {
if (imageType == ImageType::MAGICK) {
option->set("density", std::to_string(descriptor->density).data());
}
if (imageType == ImageType::TIFF) {
option->set("page", descriptor->page);
if (ImageTypeSupportsPage(imageType)) {
option->set("n", descriptor->pages);
option->set("page", descriptor->page);
}
image = VImage::new_from_buffer(descriptor->buffer, descriptor->bufferLength, nullptr, option);
if (imageType == ImageType::SVG || imageType == ImageType::PDF || imageType == ImageType::MAGICK) {
SetDensity(image, descriptor->density);
}
} catch (...) {
throw vips::VError("Input buffer has corrupt header");
} catch (vips::VError const &err) {
throw vips::VError(std::string("Input buffer has corrupt header: ") + err.what());
}
} else {
throw vips::VError("Input buffer contains unsupported image format");
@@ -269,6 +288,9 @@ namespace sharp {
} else {
// From filesystem
imageType = DetermineImageType(descriptor->file.data());
if (imageType == ImageType::MISSING) {
throw vips::VError("Input file is missing");
}
if (imageType != ImageType::UNKNOWN) {
try {
vips::VOption *option = VImage::option()
@@ -280,18 +302,19 @@ namespace sharp {
if (imageType == ImageType::MAGICK) {
option->set("density", std::to_string(descriptor->density).data());
}
if (imageType == ImageType::TIFF) {
option->set("page", descriptor->page);
if (ImageTypeSupportsPage(imageType)) {
option->set("n", descriptor->pages);
option->set("page", descriptor->page);
}
image = VImage::new_from_file(descriptor->file.data(), option);
if (imageType == ImageType::SVG || imageType == ImageType::PDF || imageType == ImageType::MAGICK) {
SetDensity(image, descriptor->density);
}
} catch (...) {
throw vips::VError("Input file has corrupt header");
} catch (vips::VError const &err) {
throw vips::VError(std::string("Input file has corrupt header: ") + err.what());
}
} else {
throw vips::VError("Input file is missing or of an unsupported image format");
throw vips::VError("Input file contains unsupported image format");
}
}
}

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -25,8 +25,8 @@
// Verify platform and compiler compatibility
#if (VIPS_MAJOR_VERSION < 8 || (VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 6))
#error libvips version 8.6.1+ is required - see sharp.pixelplumbing.com/page/install
#if (VIPS_MAJOR_VERSION < 8 || (VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 7))
#error libvips version 8.7.0+ is required - see sharp.pixelplumbing.com/page/install
#endif
#if ((!defined(__clang__)) && defined(__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 6)))
@@ -53,6 +53,7 @@ namespace sharp {
int rawChannels;
int rawWidth;
int rawHeight;
int pages;
int page;
int createChannels;
int createWidth;
@@ -61,12 +62,13 @@ namespace sharp {
InputDescriptor():
buffer(nullptr),
failOnError(FALSE),
failOnError(TRUE),
bufferLength(0),
density(72.0),
rawChannels(0),
rawWidth(0),
rawHeight(0),
pages(1),
page(0),
createChannels(0),
createWidth(0),
@@ -106,7 +108,8 @@ namespace sharp {
FITS,
VIPS,
RAW,
UNKNOWN
UNKNOWN,
MISSING
};
// How many tasks are in the queue?
@@ -139,6 +142,11 @@ namespace sharp {
*/
ImageType DetermineImageType(char const *file);
/*
Does this image type support multiple pages?
*/
bool ImageTypeSupportsPage(ImageType imageType);
/*
Open an image from the given InputDescriptor (filesystem, compressed buffer, raw pixel data)
*/

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -68,6 +68,15 @@ class MetadataWorker : public Nan::AsyncWorker {
if (image.get_typeof("interlaced") == G_TYPE_INT) {
baton->isProgressive = image.get_int("interlaced") == 1;
}
if (image.get_typeof("palette-bit-depth") == G_TYPE_INT) {
baton->paletteBitDepth = image.get_int("palette-bit-depth");
}
if (image.get_typeof(VIPS_META_N_PAGES) == G_TYPE_INT) {
baton->pages = image.get_int(VIPS_META_N_PAGES);
}
if (image.get_typeof(VIPS_META_PAGE_HEIGHT) == G_TYPE_INT) {
baton->pageHeight = image.get_int(VIPS_META_PAGE_HEIGHT);
}
baton->hasProfile = sharp::HasProfile(image);
// Derived attributes
baton->hasAlpha = sharp::HasAlpha(image);
@@ -140,6 +149,15 @@ class MetadataWorker : public Nan::AsyncWorker {
New<v8::String>(baton->chromaSubsampling).ToLocalChecked());
}
Set(info, New("isProgressive").ToLocalChecked(), New<v8::Boolean>(baton->isProgressive));
if (baton->paletteBitDepth > 0) {
Set(info, New("paletteBitDepth").ToLocalChecked(), New<v8::Uint32>(baton->paletteBitDepth));
}
if (baton->pages > 0) {
Set(info, New("pages").ToLocalChecked(), New<v8::Uint32>(baton->pages));
}
if (baton->pageHeight > 0) {
Set(info, New("pageHeight").ToLocalChecked(), New<v8::Uint32>(baton->pageHeight));
}
Set(info, New("hasProfile").ToLocalChecked(), New<v8::Boolean>(baton->hasProfile));
Set(info, New("hasAlpha").ToLocalChecked(), New<v8::Boolean>(baton->hasAlpha));
if (baton->orientation > 0) {

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -33,6 +33,9 @@ struct MetadataBaton {
int density;
std::string chromaSubsampling;
bool isProgressive;
int paletteBitDepth;
int pages;
int pageHeight;
bool hasProfile;
bool hasAlpha;
int orientation;
@@ -53,6 +56,9 @@ struct MetadataBaton {
channels(0),
density(0),
isProgressive(false),
paletteBitDepth(0),
pages(0),
pageHeight(0),
hasProfile(false),
hasAlpha(false),
orientation(0),

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -39,127 +39,15 @@ namespace sharp {
}
/*
Composite overlayImage over image at given position
Assumes alpha channels are already premultiplied and will be unpremultiplied after
*/
VImage Composite(VImage image, VImage overlayImage, int const left, int const top) {
if (HasAlpha(overlayImage)) {
// Alpha composite
if (overlayImage.width() < image.width() || overlayImage.height() < image.height()) {
// Enlarge overlay
std::vector<double> const background { 0.0, 0.0, 0.0, 0.0 };
overlayImage = overlayImage.embed(left, top, image.width(), image.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background));
}
return AlphaComposite(image, overlayImage);
} else {
if (HasAlpha(image)) {
// Add alpha channel to overlayImage so channels match
double const multiplier = sharp::Is16Bit(overlayImage.interpretation()) ? 256.0 : 1.0;
overlayImage = overlayImage.bandjoin(
VImage::new_matrix(overlayImage.width(), overlayImage.height()).new_from_image(255 * multiplier));
}
return image.insert(overlayImage, left, top);
Ensures alpha channel, if missing.
*/
VImage EnsureAlpha(VImage image) {
if (!HasAlpha(image)) {
std::vector<double> alpha;
alpha.push_back(sharp::MaximumImageAlpha(image.interpretation()));
image = image.bandjoin_const(alpha);
}
}
VImage AlphaComposite(VImage dst, VImage src) {
// Split src into non-alpha and alpha channels
VImage srcWithoutAlpha = src.extract_band(0, VImage::option()->set("n", src.bands() - 1));
VImage srcAlpha = src[src.bands() - 1] * (1.0 / 255.0);
// Split dst into non-alpha and alpha channels
VImage dstWithoutAlpha = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
VImage dstAlpha = dst[dst.bands() - 1] * (1.0 / 255.0);
//
// Compute normalized output alpha channel:
//
// References:
// - http://en.wikipedia.org/wiki/Alpha_compositing#Alpha_blending
// - https://github.com/libvips/ruby-vips/issues/28#issuecomment-9014826
//
// out_a = src_a + dst_a * (1 - src_a)
// ^^^^^^^^^^^
// t0
VImage t0 = srcAlpha.linear(-1.0, 1.0);
VImage outAlphaNormalized = srcAlpha + dstAlpha * t0;
//
// Compute output RGB channels:
//
// Wikipedia:
// out_rgb = (src_rgb * src_a + dst_rgb * dst_a * (1 - src_a)) / out_a
// ^^^^^^^^^^^
// t0
//
// Omit division by `out_a` since `Compose` is supposed to output a
// premultiplied RGBA image as reversal of premultiplication is handled
// externally.
//
VImage outRGBPremultiplied = srcWithoutAlpha + dstWithoutAlpha * t0;
// Combine RGB and alpha channel into output image:
return outRGBPremultiplied.bandjoin(outAlphaNormalized * 255.0);
}
/*
Cutout src over dst with given gravity.
*/
VImage Cutout(VImage mask, VImage dst, const int gravity) {
using sharp::CalculateCrop;
using sharp::HasAlpha;
using sharp::MaximumImageAlpha;
bool maskHasAlpha = HasAlpha(mask);
if (!maskHasAlpha && mask.bands() > 1) {
throw VError("Overlay image must have an alpha channel or one band");
}
if (!HasAlpha(dst)) {
throw VError("Image to be overlaid must have an alpha channel");
}
if (mask.width() > dst.width() || mask.height() > dst.height()) {
throw VError("Overlay image must have same dimensions or smaller");
}
// Enlarge overlay mask, if required
if (mask.width() < dst.width() || mask.height() < dst.height()) {
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
int left;
int top;
std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), mask.width(), mask.height(), gravity);
// Embed onto transparent background
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
mask = mask.embed(left, top, dst.width(), dst.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background));
}
// we use the mask alpha if it has alpha
if (maskHasAlpha) {
mask = mask.extract_band(mask.bands() - 1, VImage::option()->set("n", 1));;
}
// Split dst into an optional alpha
VImage dstAlpha = dst.extract_band(dst.bands() - 1, VImage::option()->set("n", 1));
// we use the dst non-alpha
dst = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
// the range of the mask and the image need to match .. one could be
// 16-bit, one 8-bit
double const dstMax = MaximumImageAlpha(dst.interpretation());
double const maskMax = MaximumImageAlpha(mask.interpretation());
// combine the new mask and the existing alpha ... there are
// many ways of doing this, mult is the simplest
mask = dstMax * ((mask / maskMax) * (dstAlpha / dstMax));
// append the mask to the image data ... the mask might be float now,
// we must cast the format down to match the image data
return dst.bandjoin(mask.cast(dst.format()));
return image;
}
/*
@@ -278,6 +166,25 @@ namespace sharp {
return image.conv(kernel);
}
/*
* Recomb with a Matrix of the given bands/channel size.
* Eg. RGB will be a 3x3 matrix.
*/
VImage Recomb(VImage image, std::unique_ptr<double[]> const &matrix) {
double *m = matrix.get();
return image
.colourspace(VIPS_INTERPRETATION_sRGB)
.recomb(image.bands() == 3
? VImage::new_from_memory(
m, 9 * sizeof(double), 3, 3, 1, VIPS_FORMAT_DOUBLE
)
: VImage::new_matrixv(4, 4,
m[0], m[1], m[2], 0.0,
m[3], m[4], m[5], 0.0,
m[6], m[7], m[8], 0.0,
0.0, 0.0, 0.0, 1.0));
}
/*
* Sharpen flat and jagged areas. Use sigma of -1.0 for fast sharpen.
*/

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -31,25 +31,9 @@ namespace sharp {
VImage RemoveAlpha(VImage image);
/*
Alpha composite src over dst with given gravity.
Assumes alpha channels are already premultiplied and will be unpremultiplied after.
*/
VImage Composite(VImage src, VImage dst, const int gravity);
/*
Composite overlayImage over image at given position
*/
VImage Composite(VImage image, VImage overlayImage, int const x, int const y);
/*
Alpha composite overlayImage over image, assumes matching dimensions
Ensures alpha channel, if missing.
*/
VImage AlphaComposite(VImage image, VImage overlayImage);
/*
Cutout src over dst with given gravity.
*/
VImage Cutout(VImage src, VImage dst, const int gravity);
VImage EnsureAlpha(VImage image);
/*
* Tint an image using the specified chroma, preserving the original image luminance
@@ -107,6 +91,12 @@ namespace sharp {
*/
VImage Linear(VImage image, double const a, double const b);
/*
* Recomb with a Matrix of the given bands/channel size.
* Eg. RGB will be a 3x3 matrix.
*/
VImage Recomb(VImage image, std::unique_ptr<double[]> const &matrix);
} // namespace sharp
#endif // SRC_OPERATIONS_H_

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -297,7 +297,7 @@ class PipelineWorker : public Nan::AsyncWorker {
}
// Ensure we're using a device-independent colour space
if (sharp::HasProfile(image)) {
if (sharp::HasProfile(image) && image.interpretation() != VIPS_INTERPRETATION_LABS) {
// Convert to sRGB using embedded profile
try {
image = image.icc_transform(
@@ -343,30 +343,19 @@ class PipelineWorker : public Nan::AsyncWorker {
image = image.colourspace(VIPS_INTERPRETATION_B_W);
}
// Ensure image has an alpha channel when there is an overlay with an alpha channel
VImage overlayImage;
ImageType overlayImageType = ImageType::UNKNOWN;
bool shouldOverlayWithAlpha = FALSE;
if (baton->overlay != nullptr) {
std::tie(overlayImage, overlayImageType) = OpenInput(baton->overlay, baton->accessMethod);
if (HasAlpha(overlayImage)) {
shouldOverlayWithAlpha = !baton->overlayCutout;
if (!HasAlpha(image)) {
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
}
}
bool const shouldResize = xfactor != 1.0 || yfactor != 1.0;
bool const shouldBlur = baton->blurSigma != 0.0;
bool const shouldConv = baton->convKernelWidth * baton->convKernelHeight > 0;
bool const shouldSharpen = baton->sharpenSigma != 0.0;
bool const shouldApplyMedian = baton->medianSize > 0;
bool const shouldComposite = !baton->composite.empty();
if (shouldComposite && !HasAlpha(image)) {
image = sharp::EnsureAlpha(image);
}
bool const shouldPremultiplyAlpha = HasAlpha(image) &&
(shouldResize || shouldBlur || shouldConv || shouldSharpen || shouldOverlayWithAlpha);
(shouldResize || shouldBlur || shouldConv || shouldSharpen || shouldComposite);
// Premultiply image alpha channel before all transformations to avoid
// dark fringing around bright pixels
@@ -381,10 +370,19 @@ class PipelineWorker : public Nan::AsyncWorker {
vips_enum_from_nick(nullptr, VIPS_TYPE_KERNEL, baton->kernel.data()));
if (
kernel != VIPS_KERNEL_NEAREST && kernel != VIPS_KERNEL_CUBIC && kernel != VIPS_KERNEL_LANCZOS2 &&
kernel != VIPS_KERNEL_LANCZOS3
kernel != VIPS_KERNEL_LANCZOS3 && kernel != VIPS_KERNEL_MITCHELL
) {
throw vips::VError("Unknown kernel");
}
// Ensure shortest edge is at least 1 pixel
if (image.width() / xfactor < 0.5) {
xfactor = 2 * image.width();
baton->width = 1;
}
if (image.height() / yfactor < 0.5) {
yfactor = 2 * image.height();
baton->height = 1;
}
image = image.resize(1.0 / xfactor, VImage::option()
->set("vscale", 1.0 / yfactor)
->set("kernel", kernel));
@@ -525,77 +523,77 @@ class PipelineWorker : public Nan::AsyncWorker {
baton->convKernel);
}
// Recomb
if (baton->recombMatrix != NULL) {
image = sharp::Recomb(image, baton->recombMatrix);
}
// Sharpen
if (shouldSharpen) {
image = sharp::Sharpen(image, baton->sharpenSigma, baton->sharpenFlat, baton->sharpenJagged);
}
// Composite with overlay, if present
if (baton->overlay != nullptr) {
// Verify overlay image is within current dimensions
if (overlayImage.width() > image.width() || overlayImage.height() > image.height()) {
throw vips::VError("Overlay image must have same dimensions or smaller");
}
// Check if overlay is tiled
if (baton->overlayTile) {
int const overlayImageWidth = overlayImage.width();
int const overlayImageHeight = overlayImage.height();
int across = 0;
int down = 0;
// Use gravity in overlay
if (overlayImageWidth <= baton->width) {
across = static_cast<int>(ceil(static_cast<double>(image.width()) / overlayImageWidth));
// Composite
if (shouldComposite) {
for (Composite *composite : baton->composite) {
VImage compositeImage;
ImageType compositeImageType = ImageType::UNKNOWN;
std::tie(compositeImage, compositeImageType) = OpenInput(composite->input, baton->accessMethod);
// Verify within current dimensions
if (compositeImage.width() > image.width() || compositeImage.height() > image.height()) {
throw vips::VError("Image to composite must have same dimensions or smaller");
}
if (overlayImageHeight <= baton->height) {
down = static_cast<int>(ceil(static_cast<double>(image.height()) / overlayImageHeight));
}
if (across != 0 || down != 0) {
int left;
int top;
overlayImage = overlayImage.replicate(across, down);
if (baton->overlayXOffset >= 0 && baton->overlayYOffset >= 0) {
// the overlayX/YOffsets will now be used to CalculateCrop for extract_area
std::tie(left, top) = sharp::CalculateCrop(
overlayImage.width(), overlayImage.height(), image.width(), image.height(),
baton->overlayXOffset, baton->overlayYOffset);
} else {
// the overlayGravity will now be used to CalculateCrop for extract_area
std::tie(left, top) = sharp::CalculateCrop(
overlayImage.width(), overlayImage.height(), image.width(), image.height(), baton->overlayGravity);
// Check if overlay is tiled
if (composite->tile) {
int across = 0;
int down = 0;
// Use gravity in overlay
if (compositeImage.width() <= baton->width) {
across = static_cast<int>(ceil(static_cast<double>(image.width()) / compositeImage.width()));
}
overlayImage = overlayImage.extract_area(left, top, image.width(), image.height());
}
// the overlayGravity was used for extract_area, therefore set it back to its default value of 0
baton->overlayGravity = 0;
}
if (baton->overlayCutout) {
// 'cut out' the image, premultiplication is not required
image = sharp::Cutout(overlayImage, image, baton->overlayGravity);
} else {
// Ensure overlay is sRGB
overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB);
// Ensure overlay matches premultiplication state
if (shouldPremultiplyAlpha) {
// Ensure overlay has alpha channel
if (!HasAlpha(overlayImage)) {
double const multiplier = sharp::Is16Bit(overlayImage.interpretation()) ? 256.0 : 1.0;
overlayImage = overlayImage.bandjoin(
VImage::new_matrix(overlayImage.width(), overlayImage.height()).new_from_image(255 * multiplier));
if (compositeImage.height() <= baton->height) {
down = static_cast<int>(ceil(static_cast<double>(image.height()) / compositeImage.height()));
}
overlayImage = overlayImage.premultiply();
if (across != 0 || down != 0) {
int left;
int top;
compositeImage = compositeImage.replicate(across, down);
if (composite->left >= 0 && composite->top >= 0) {
std::tie(left, top) = sharp::CalculateCrop(
compositeImage.width(), compositeImage.height(), image.width(), image.height(),
composite->left, composite->top);
} else {
std::tie(left, top) = sharp::CalculateCrop(
compositeImage.width(), compositeImage.height(), image.width(), image.height(), composite->gravity);
}
compositeImage = compositeImage.extract_area(left, top, image.width(), image.height());
}
// gravity was used for extract_area, set it back to its default value of 0
composite->gravity = 0;
}
// Ensure image to composite is sRGB with premultiplied alpha
compositeImage = compositeImage.colourspace(VIPS_INTERPRETATION_sRGB);
if (!HasAlpha(compositeImage)) {
compositeImage = sharp::EnsureAlpha(compositeImage);
}
compositeImage = compositeImage.premultiply();
// Calculate position
int left;
int top;
if (baton->overlayXOffset >= 0 && baton->overlayYOffset >= 0) {
// Composite images at given offsets
if (composite->left >= 0 && composite->top >= 0) {
// Composite image at given offsets
std::tie(left, top) = sharp::CalculateCrop(image.width(), image.height(),
overlayImage.width(), overlayImage.height(), baton->overlayXOffset, baton->overlayYOffset);
compositeImage.width(), compositeImage.height(), composite->left, composite->top);
} else {
// Composite images with given gravity
// Composite image with given gravity
std::tie(left, top) = sharp::CalculateCrop(image.width(), image.height(),
overlayImage.width(), overlayImage.height(), baton->overlayGravity);
compositeImage.width(), compositeImage.height(), composite->gravity);
}
image = sharp::Composite(image, overlayImage, left, top);
// Composite
image = image.composite2(compositeImage, composite->mode, VImage::option()
->set("premultiplied", TRUE)
->set("x", left)
->set("y", top));
}
}
@@ -612,8 +610,8 @@ class PipelineWorker : public Nan::AsyncWorker {
baton->premultiplied = shouldPremultiplyAlpha;
// Gamma decoding (brighten)
if (baton->gamma >= 1 && baton->gamma <= 3) {
image = sharp::Gamma(image, baton->gamma);
if (baton->gammaOut >= 1 && baton->gammaOut <= 3) {
image = sharp::Gamma(image, baton->gammaOut);
}
// Linear adjustment (a * in + b)
@@ -663,6 +661,11 @@ class PipelineWorker : public Nan::AsyncWorker {
image = sharp::RemoveAlpha(image);
}
// Ensure alpha channel, if missing
if (baton->ensureAlpha) {
image = sharp::EnsureAlpha(image);
}
// Convert image to sRGB, if not already
if (sharp::Is16Bit(image.interpretation())) {
image = image.cast(VIPS_FORMAT_USHORT);
@@ -716,14 +719,15 @@ class PipelineWorker : public Nan::AsyncWorker {
(inputImageType == ImageType::PNG || inputImageType == ImageType::GIF || inputImageType == ImageType::SVG))) {
// Write PNG to buffer
sharp::AssertImageTypeDimensions(image, ImageType::PNG);
// Strip profile
if (!baton->withMetadata) {
vips_image_remove(image.get_image(), VIPS_META_ICC_NAME);
}
VipsArea *area = VIPS_AREA(image.pngsave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("interlace", baton->pngProgressive)
->set("compression", baton->pngCompressionLevel)
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)));
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)
->set("palette", baton->pngPalette)
->set("Q", baton->pngQuality)
->set("colours", baton->pngColours)
->set("dither", baton->pngDither)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
area->free_fn = nullptr;
@@ -758,6 +762,10 @@ class PipelineWorker : public Nan::AsyncWorker {
->set("squash", baton->tiffSquash)
->set("compression", baton->tiffCompression)
->set("predictor", baton->tiffPredictor)
->set("pyramid", baton->tiffPyramid)
->set("tile", baton->tiffTile)
->set("tile_height", baton->tiffTileHeight)
->set("tile_width", baton->tiffTileWidth)
->set("xres", baton->tiffXres)
->set("yres", baton->tiffYres)));
baton->bufferOut = static_cast<char*>(area->data);
@@ -771,6 +779,7 @@ class PipelineWorker : public Nan::AsyncWorker {
if (baton->greyscale || image.interpretation() == VIPS_INTERPRETATION_B_W) {
// Extract first band for greyscale image
image = image[0];
baton->channels = 1;
}
if (image.format() != VIPS_FORMAT_UCHAR) {
// Cast pixels to uint8 (unsigned char)
@@ -824,14 +833,15 @@ class PipelineWorker : public Nan::AsyncWorker {
(inputImageType == ImageType::PNG || inputImageType == ImageType::GIF || inputImageType == ImageType::SVG))) {
// Write PNG to file
sharp::AssertImageTypeDimensions(image, ImageType::PNG);
// Strip profile
if (!baton->withMetadata) {
vips_image_remove(image.get_image(), VIPS_META_ICC_NAME);
}
image.pngsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("strip", !baton->withMetadata)
->set("interlace", baton->pngProgressive)
->set("compression", baton->pngCompressionLevel)
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE));
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)
->set("palette", baton->pngPalette)
->set("Q", baton->pngQuality)
->set("colours", baton->pngColours)
->set("dither", baton->pngDither));
baton->formatOut = "png";
} else if (baton->formatOut == "webp" || (mightMatchInput && isWebp) ||
(willMatchInput && inputImageType == ImageType::WEBP)) {
@@ -856,6 +866,10 @@ class PipelineWorker : public Nan::AsyncWorker {
->set("squash", baton->tiffSquash)
->set("compression", baton->tiffCompression)
->set("predictor", baton->tiffPredictor)
->set("pyramid", baton->tiffPyramid)
->set("tile", baton->tiffTile)
->set("tile_height", baton->tiffTileHeight)
->set("tile_width", baton->tiffTileWidth)
->set("xres", baton->tiffXres)
->set("yres", baton->tiffYres));
baton->formatOut = "tiff";
@@ -999,13 +1013,17 @@ class PipelineWorker : public Nan::AsyncWorker {
GetFromPersistent(index);
return index + 1;
});
// Delete baton
delete baton->input;
delete baton->overlay;
delete baton->boolean;
for_each(baton->joinChannelIn.begin(), baton->joinChannelIn.end(),
[this](sharp::InputDescriptor *joinChannelIn) {
delete joinChannelIn;
});
for (Composite *composite : baton->composite) {
delete composite->input;
delete composite;
}
for (sharp::InputDescriptor *input : baton->joinChannelIn) {
delete input;
}
delete baton;
// Handle warnings
@@ -1152,14 +1170,21 @@ NAN_METHOD(pipeline) {
// Tint chroma
baton->tintA = AttrTo<double>(options, "tintA");
baton->tintB = AttrTo<double>(options, "tintB");
// Overlay options
if (HasAttr(options, "overlay")) {
baton->overlay = CreateInputDescriptor(AttrAs<v8::Object>(options, "overlay"), buffersToPersist);
baton->overlayGravity = AttrTo<int32_t>(options, "overlayGravity");
baton->overlayXOffset = AttrTo<int32_t>(options, "overlayXOffset");
baton->overlayYOffset = AttrTo<int32_t>(options, "overlayYOffset");
baton->overlayTile = AttrTo<bool>(options, "overlayTile");
baton->overlayCutout = AttrTo<bool>(options, "overlayCutout");
// Composite
v8::Local<v8::Array> compositeArray = Nan::Get(options, Nan::New("composite").ToLocalChecked())
.ToLocalChecked().As<v8::Array>();
int const compositeArrayLength = AttrTo<uint32_t>(compositeArray, "length");
for (int i = 0; i < compositeArrayLength; i++) {
v8::Local<v8::Object> compositeObject = Nan::Get(compositeArray, i).ToLocalChecked().As<v8::Object>();
Composite *composite = new Composite;
composite->input = CreateInputDescriptor(AttrAs<v8::Object>(compositeObject, "input"), buffersToPersist);
composite->mode = static_cast<VipsBlendMode>(
vips_enum_from_nick(nullptr, VIPS_TYPE_BLEND_MODE, AttrAsStr(compositeObject, "blend").data()));
composite->gravity = AttrTo<uint32_t>(compositeObject, "gravity");
composite->left = AttrTo<int32_t>(compositeObject, "left");
composite->top = AttrTo<int32_t>(compositeObject, "top");
composite->tile = AttrTo<bool>(compositeObject, "tile");
baton->composite.push_back(composite);
}
// Resize options
baton->withoutEnlargement = AttrTo<bool>(options, "withoutEnlargement");
@@ -1193,6 +1218,7 @@ NAN_METHOD(pipeline) {
baton->thresholdGrayscale = AttrTo<bool>(options, "thresholdGrayscale");
baton->trimThreshold = AttrTo<double>(options, "trimThreshold");
baton->gamma = AttrTo<double>(options, "gamma");
baton->gammaOut = AttrTo<double>(options, "gammaOut");
baton->linearA = AttrTo<double>(options, "linearA");
baton->linearB = AttrTo<double>(options, "linearB");
baton->greyscale = AttrTo<bool>(options, "greyscale");
@@ -1212,6 +1238,7 @@ NAN_METHOD(pipeline) {
baton->extractChannel = AttrTo<int32_t>(options, "extractChannel");
baton->removeAlpha = AttrTo<bool>(options, "removeAlpha");
baton->ensureAlpha = AttrTo<bool>(options, "ensureAlpha");
if (HasAttr(options, "boolean")) {
baton->boolean = CreateInputDescriptor(AttrAs<v8::Object>(options, "boolean"), buffersToPersist);
baton->booleanOp = sharp::GetBooleanOperation(AttrAsStr(options, "booleanOp"));
@@ -1232,6 +1259,13 @@ NAN_METHOD(pipeline) {
baton->convKernel[i] = AttrTo<double>(kdata, i);
}
}
if (HasAttr(options, "recombMatrix")) {
baton->recombMatrix = std::unique_ptr<double[]>(new double[9]);
v8::Local<v8::Array> recombMatrix = AttrAs<v8::Array>(options, "recombMatrix");
for (unsigned int i = 0; i < 9; i++) {
baton->recombMatrix[i] = AttrTo<double>(recombMatrix, i);
}
}
baton->colourspace = sharp::GetInterpretation(AttrAsStr(options, "colourspace"));
if (baton->colourspace == VIPS_INTERPRETATION_ERROR) {
baton->colourspace = VIPS_INTERPRETATION_sRGB;
@@ -1253,12 +1287,20 @@ NAN_METHOD(pipeline) {
baton->pngProgressive = AttrTo<bool>(options, "pngProgressive");
baton->pngCompressionLevel = AttrTo<uint32_t>(options, "pngCompressionLevel");
baton->pngAdaptiveFiltering = AttrTo<bool>(options, "pngAdaptiveFiltering");
baton->pngPalette = AttrTo<bool>(options, "pngPalette");
baton->pngQuality = AttrTo<uint32_t>(options, "pngQuality");
baton->pngColours = AttrTo<uint32_t>(options, "pngColours");
baton->pngDither = AttrTo<double>(options, "pngDither");
baton->webpQuality = AttrTo<uint32_t>(options, "webpQuality");
baton->webpAlphaQuality = AttrTo<uint32_t>(options, "webpAlphaQuality");
baton->webpLossless = AttrTo<bool>(options, "webpLossless");
baton->webpNearLossless = AttrTo<bool>(options, "webpNearLossless");
baton->tiffQuality = AttrTo<uint32_t>(options, "tiffQuality");
baton->tiffPyramid = AttrTo<bool>(options, "tiffPyramid");
baton->tiffSquash = AttrTo<bool>(options, "tiffSquash");
baton->tiffTile = AttrTo<bool>(options, "tiffTile");
baton->tiffTileWidth = AttrTo<uint32_t>(options, "tiffTileWidth");
baton->tiffTileHeight = AttrTo<uint32_t>(options, "tiffTileHeight");
baton->tiffXres = AttrTo<double>(options, "tiffXres");
baton->tiffYres = AttrTo<double>(options, "tiffYres");
// tiff compression options

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -34,6 +34,23 @@ enum class Canvas {
IGNORE_ASPECT
};
struct Composite {
sharp::InputDescriptor *input;
VipsBlendMode mode;
int gravity;
int left;
int top;
bool tile;
Composite():
input(nullptr),
mode(VIPS_BLEND_MODE_OVER),
gravity(0),
left(-1),
top(-1),
tile(false) {}
};
struct PipelineBaton {
sharp::InputDescriptor *input;
std::string iccProfilePath;
@@ -42,12 +59,7 @@ struct PipelineBaton {
std::string fileOut;
void *bufferOut;
size_t bufferOutLength;
sharp::InputDescriptor *overlay;
int overlayGravity;
int overlayXOffset;
int overlayYOffset;
bool overlayTile;
bool overlayCutout;
std::vector<Composite *> composite;
std::vector<sharp::InputDescriptor *> joinChannelIn;
int topOffsetPre;
int leftOffsetPre;
@@ -87,6 +99,7 @@ struct PipelineBaton {
double linearA;
double linearB;
double gamma;
double gammaOut;
bool greyscale;
bool normalise;
bool useExifOrientation;
@@ -114,6 +127,10 @@ struct PipelineBaton {
bool pngProgressive;
int pngCompressionLevel;
bool pngAdaptiveFiltering;
bool pngPalette;
int pngQuality;
int pngColours;
double pngDither;
int webpQuality;
int webpAlphaQuality;
bool webpNearLossless;
@@ -121,7 +138,11 @@ struct PipelineBaton {
int tiffQuality;
VipsForeignTiffCompression tiffCompression;
VipsForeignTiffPredictor tiffPredictor;
bool tiffPyramid;
bool tiffSquash;
bool tiffTile;
int tiffTileHeight;
int tiffTileWidth;
double tiffXres;
double tiffYres;
std::string err;
@@ -137,6 +158,7 @@ struct PipelineBaton {
VipsOperationBoolean bandBoolOp;
int extractChannel;
bool removeAlpha;
bool ensureAlpha;
VipsInterpretation colourspace;
int tileSize;
int tileOverlap;
@@ -145,17 +167,12 @@ struct PipelineBaton {
std::string tileFormat;
int tileAngle;
VipsForeignDzDepth tileDepth;
std::unique_ptr<double[]> recombMatrix;
PipelineBaton():
input(nullptr),
limitInputPixels(0),
bufferOutLength(0),
overlay(nullptr),
overlayGravity(0),
overlayXOffset(-1),
overlayYOffset(-1),
overlayTile(false),
overlayCutout(false),
topOffsetPre(-1),
topOffsetPost(-1),
channels(0),
@@ -209,11 +226,19 @@ struct PipelineBaton {
pngProgressive(false),
pngCompressionLevel(9),
pngAdaptiveFiltering(false),
pngPalette(false),
pngQuality(100),
pngColours(256),
pngDither(1.0),
webpQuality(80),
tiffQuality(80),
tiffCompression(VIPS_FOREIGN_TIFF_COMPRESSION_JPEG),
tiffPredictor(VIPS_FOREIGN_TIFF_PREDICTOR_HORIZONTAL),
tiffPyramid(false),
tiffSquash(false),
tiffTile(false),
tiffTileHeight(256),
tiffTileWidth(256),
tiffXres(1.0),
tiffYres(1.0),
withMetadata(false),
@@ -227,6 +252,7 @@ struct PipelineBaton {
bandBoolOp(VIPS_OPERATION_BOOLEAN_LAST),
extractChannel(-1),
removeAlpha(false),
ensureAlpha(false),
colourspace(VIPS_INTERPRETATION_LAST),
tileSize(256),
tileOverlap(0),

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

View File

@@ -1,4 +1,4 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
// Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.

Binary file not shown.

After

Width:  |  Height:  |  Size: 82 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 77 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 85 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 209 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 175 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 222 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 197 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 197 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 194 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 192 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

View File

@@ -70,6 +70,7 @@ module.exports = {
inputJpgCenteredImage: getPath('centered_image.jpeg'),
inputJpgRandom: getPath('random.jpg'), // convert -size 200x200 xc: +noise Random random.jpg
inputJpgThRandom: getPath('thRandom.jpg'), // convert random.jpg -channel G -threshold 5% -separate +channel -negate thRandom.jpg
inputJpgLossless: getPath('testimgl.jpg'), // Lossless JPEG from ftp://ftp.fu-berlin.de/unix/X11/graphics/ImageMagick/delegates/ljpeg-6b.tar.gz
inputPng: getPath('50020484-00001.png'), // http://c.searspartsdirect.com/lis_png/PLDM/50020484-00001.png
inputPngWithTransparency: getPath('blackbug.png'), // public domain
@@ -99,6 +100,7 @@ module.exports = {
inputTiff8BitDepth: getPath('8bit_depth.tiff'),
inputGif: getPath('Crash_test.gif'), // http://upload.wikimedia.org/wikipedia/commons/e/e3/Crash_test.gif
inputGifGreyPlusAlpha: getPath('grey-plus-alpha.gif'), // http://i.imgur.com/gZ5jlmE.gif
inputGifAnimated: getPath('rotating-squares.gif'), // CC0 https://loading.io/spinner/blocks/-rotating-squares-preloader-gif
inputSvg: getPath('check.svg'), // http://dev.w3.org/SVG/tools/svgweb/samples/svg-files/check.svg
inputSvgWithEmbeddedImages: getPath('struct-image-04-t.svg'), // https://dev.w3.org/SVG/profiles/1.2T/test/svg/struct-image-04-t.svg

BIN
test/fixtures/rotating-squares.gif vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

BIN
test/fixtures/testimgl.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@@ -147,6 +147,47 @@
...
fun:WebPDecode
}
{
cond_libwebp_generic
Memcheck:Cond
obj:/usr/lib/x86_64-linux-gnu/libwebp.so.6.0.2
}
# tiff
{
param_tiff_write_encoded_tile
Memcheck:Param
write(buf)
fun:write
...
fun:TIFFWriteEncodedTile
}
# gsf
{
param_gsf_output_write
Memcheck:Param
write(buf)
fun:write
...
fun:gsf_output_write
}
{
value_gsf_output_write_crc32_little
Memcheck:Value8
fun:crc32_little
...
fun:gsf_output_write
}
# fontconfig
{
leak_fontconfig_FcConfigSubstituteWithPat
Memcheck:Leak
match-leak-kinds: definite,indirect
...
fun:FcConfigSubstituteWithPat
}
# libvips
{
@@ -197,6 +238,11 @@
...
fun:vips_region_prepare_to
}
{
cond_libvips_vips_stats_scan
Memcheck:Cond
fun:vips_stats_scan
}
{
value_libvips_vips_region_fill
Memcheck:Value8
@@ -204,6 +250,17 @@
fun:vips_region_fill
fun:vips_region_prepare
}
{
value_libvips_vips_hist_find_uchar_scan
Memcheck:Value8
fun:vips_hist_find_uchar_scan
}
{
value_libvips_write_webp_image
Memcheck:Value8
...
fun:write_webp_image
}
{
leak_libvips_init
Memcheck:Leak
@@ -377,6 +434,70 @@
...
fun:_ZN4node12NodePlatformC1EiPN2v817TracingControllerE
}
{
param_nodejs_delayed_task_scheduler
Memcheck:Param
epoll_ctl(event)
fun:epoll_ctl
fun:uv__io_poll
fun:uv_run
fun:_ZZN4node20BackgroundTaskRunner20DelayedTaskScheduler5StartEvENUlPvE_4_FUNES2_
}
{
param_nodejs_isolate_data
Memcheck:Param
epoll_ctl(event)
fun:epoll_ctl
fun:uv__io_poll
fun:uv_run
fun:_ZN4node5StartEPN2v87IsolateEPNS_11IsolateDataERKSt6vectorISsSaISsEES9_
}
{
param_nodejs_try_init_and_run_loop
Memcheck:Param
epoll_ctl(event)
fun:epoll_ctl
fun:uv__io_poll
fun:uv_run
fun:_ZN4node17SyncProcessRunner23TryInitializeAndRunLoopEN2v85LocalINS1_5ValueEEE
}
{
param_nodejs_run_exit_handlers
Memcheck:Param
epoll_ctl(event)
fun:epoll_ctl
fun:uv__io_poll
fun:uv_run
fun:_ZN4node7tracing5AgentD1Ev
fun:_ZN4node5._215D1Ev
fun:__run_exit_handlers
}
{
leak_nodejs_crypto_entropy_source
Memcheck:Leak
...
fun:_ZN4node6crypto13EntropySourceEPhm
}
{
leak_nodejs_debug_options
Memcheck:Leak
...
fun:_ZN4node9inspector5Agent5StartERKSsSt10shared_ptrINS_12DebugOptionsEEb
}
{
leak_nodejs_start
Memcheck:Leak
match-leak-kinds: definite
fun:_Znwm
fun:_ZN4node5StartEiPPc
}
{
leak_nodejs_start_background_task_runner
Memcheck:Leak
match-leak-kinds: possible
...
fun:_ZN4node20BackgroundTaskRunnerC1Ei
}
{
leak_nan_FunctionCallbackInfo
Memcheck:Leak

View File

@@ -22,7 +22,7 @@ const median = function (values) {
// List of files
fs.readdirSync(userDataDir).forEach(function (file) {
// Contents of file
const lines = fs.readFileSync(path.join(userDataDir, file), {encoding: 'utf-8'}).split(/\r\n/);
const lines = fs.readFileSync(path.join(userDataDir, file), { encoding: 'utf-8' }).split(/\r\n/);
// First line = number of entries
const entries = parseInt(lines[0], 10);
// Verify number of entries

View File

@@ -115,6 +115,7 @@ describe('Alpha transparency', function () {
fixtures.inputWebP
].map(function (input) {
return sharp(input)
.resize(10)
.removeAlpha()
.toBuffer({ resolveWithObject: true })
.then(function (result) {
@@ -122,4 +123,24 @@ describe('Alpha transparency', function () {
});
}));
});
it('Ensures alpha from fixtures without transparency, ignores those with', function () {
return Promise.all([
fixtures.inputPngWithTransparency,
fixtures.inputPngWithTransparency16bit,
fixtures.inputWebPWithTransparency,
fixtures.inputJpg,
fixtures.inputPng,
fixtures.inputWebP
].map(function (input) {
return sharp(input)
.resize(10)
.ensureAlpha()
.png()
.toBuffer({ resolveWithObject: true })
.then(function (result) {
assert.strictEqual(4, result.info.channels);
});
}));
});
});

298
test/unit/composite.js Normal file
View File

@@ -0,0 +1,298 @@
'use strict';
const assert = require('assert');
const fixtures = require('../fixtures');
const sharp = require('../../');
const red = { r: 255, g: 0, b: 0, alpha: 0.5 };
const green = { r: 0, g: 255, b: 0, alpha: 0.5 };
const blue = { r: 0, g: 0, b: 255, alpha: 0.5 };
const redRect = {
create: {
width: 80,
height: 60,
channels: 4,
background: red
}
};
const greenRect = {
create: {
width: 40,
height: 40,
channels: 4,
background: green
}
};
const blueRect = {
create: {
width: 60,
height: 40,
channels: 4,
background: blue
}
};
const blends = [
'over',
'xor',
'saturate',
'dest-over'
];
// Test
describe('composite', () => {
it('blend', () => Promise.all(
blends.map(blend => {
const filename = `composite.blend.${blend}.png`;
const actual = fixtures.path(`output.${filename}`);
const expected = fixtures.expected(filename);
return sharp(redRect)
.composite([{
input: blueRect,
blend
}])
.toFile(actual)
.then(() => {
fixtures.assertMaxColourDistance(actual, expected);
});
})
));
it('multiple', () => {
const filename = 'composite-multiple.png';
const actual = fixtures.path(`output.${filename}`);
const expected = fixtures.expected(filename);
return sharp(redRect)
.composite([{
input: blueRect,
gravity: 'northeast'
}, {
input: greenRect,
gravity: 'southwest'
}])
.toFile(actual)
.then(() => {
fixtures.assertMaxColourDistance(actual, expected);
});
});
it('zero offset', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
top: 0,
left: 0
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-0.jpg'), data, done);
});
});
it('offset and gravity', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
left: 10,
top: 10,
gravity: 4
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-with-gravity.jpg'), data, done);
});
});
it('offset, gravity and tile', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
left: 10,
top: 10,
gravity: 4,
tile: true
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-with-gravity-tile.jpg'), data, done);
});
});
it('offset and tile', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
left: 10,
top: 10,
tile: true
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-with-tile.jpg'), data, done);
});
});
it('cutout via dest-in', done => {
sharp(fixtures.inputJpg)
.resize(300, 300)
.composite([{
input: Buffer.from('<svg><rect x="0" y="0" width="200" height="200" rx="50" ry="50"/></svg>'),
density: 96,
blend: 'dest-in',
cutout: true
}])
.png()
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(300, info.width);
assert.strictEqual(300, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('composite-cutout.png'), data, done);
});
});
describe('numeric gravity', () => {
Object.keys(sharp.gravity).forEach(gravity => {
it(gravity, done => {
sharp(fixtures.inputJpg)
.resize(80)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
gravity: gravity
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected(`overlay-gravity-${gravity}.jpg`), data, done);
});
});
});
});
describe('string gravity', () => {
Object.keys(sharp.gravity).forEach(gravity => {
it(gravity, done => {
const expected = fixtures.expected('overlay-gravity-' + gravity + '.jpg');
sharp(fixtures.inputJpg)
.resize(80)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
gravity: sharp.gravity[gravity]
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(expected, data, done);
});
});
});
});
describe('tile and gravity', () => {
Object.keys(sharp.gravity).forEach(gravity => {
it(gravity, done => {
const expected = fixtures.expected('overlay-tile-gravity-' + gravity + '.jpg');
sharp(fixtures.inputJpg)
.resize(80)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
tile: true,
gravity: gravity
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(expected, data, done);
});
});
});
});
describe('validation', () => {
it('missing images', () => {
assert.throws(() => {
sharp().composite();
}, /Expected array for images to composite but received undefined of type undefined/);
});
it('invalid images', () => {
assert.throws(() => {
sharp().composite(['invalid']);
}, /Expected object for image to composite but received invalid of type string/);
});
it('missing input', () => {
assert.throws(() => {
sharp().composite([{}]);
}, /Unsupported input/);
});
it('invalid blend', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', blend: 'invalid' }]);
}, /Expected valid blend name for blend but received invalid of type string/);
});
it('invalid tile', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', tile: 'invalid' }]);
}, /Expected boolean for tile but received invalid of type string/);
});
it('invalid left', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', left: 0.5 }]);
}, /Expected positive integer for left but received 0.5 of type number/);
});
it('invalid top', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', top: -1 }]);
}, /Expected positive integer for top but received -1 of type number/);
});
it('left but no top', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', left: 1 }]);
}, /Expected both left and top to be set/);
});
it('top but no left', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', top: 1 }]);
}, /Expected both left and top to be set/);
});
it('invalid gravity', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', gravity: 'invalid' }]);
}, /Expected valid gravity for gravity but received invalid of type string/);
});
});
});

View File

@@ -1,73 +0,0 @@
'use strict';
const assert = require('assert');
const fixtures = require('../fixtures');
const sharp = require('../../');
describe('Deprecated background', function () {
it('Flatten to RGB orange', function (done) {
sharp(fixtures.inputPngWithTransparency)
.flatten()
.background({r: 255, g: 102, b: 0})
.resize(400, 300)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(400, info.width);
assert.strictEqual(300, info.height);
fixtures.assertSimilar(fixtures.expected('flatten-orange.jpg'), data, done);
});
});
it('Flatten to CSS/hex orange', function (done) {
sharp(fixtures.inputPngWithTransparency)
.flatten()
.background('#ff6600')
.resize(400, 300)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(400, info.width);
assert.strictEqual(300, info.height);
fixtures.assertSimilar(fixtures.expected('flatten-orange.jpg'), data, done);
});
});
it('Flatten 16-bit PNG with transparency to orange', function (done) {
const output = fixtures.path('output.flatten-rgb16-orange.jpg');
sharp(fixtures.inputPngWithTransparency16bit)
.flatten()
.background({r: 255, g: 102, b: 0})
.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual(32, info.width);
assert.strictEqual(32, info.height);
fixtures.assertMaxColourDistance(output, fixtures.expected('flatten-rgb16-orange.jpg'), 25);
done();
});
});
it('Ignored for JPEG', function (done) {
sharp(fixtures.inputJpg)
.background('#ff0000')
.flatten()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
done();
});
});
it('extend all sides equally with RGB', function (done) {
sharp(fixtures.inputJpg)
.resize(120)
.background({r: 255, g: 0, b: 0})
.extend(10)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(140, info.width);
assert.strictEqual(118, info.height);
fixtures.assertSimilar(fixtures.expected('extend-equal.jpg'), data, done);
});
});
});

View File

@@ -1,279 +0,0 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Deprecated crop', function () {
[
{
name: 'North',
width: 320,
height: 80,
gravity: sharp.gravity.north,
fixture: 'gravity-north.jpg'
},
{
name: 'East',
width: 80,
height: 320,
gravity: sharp.gravity.east,
fixture: 'gravity-east.jpg'
},
{
name: 'South',
width: 320,
height: 80,
gravity: sharp.gravity.south,
fixture: 'gravity-south.jpg'
},
{
name: 'West',
width: 80,
height: 320,
gravity: sharp.gravity.west,
fixture: 'gravity-west.jpg'
},
{
name: 'Center',
width: 320,
height: 80,
gravity: sharp.gravity.center,
fixture: 'gravity-center.jpg'
},
{
name: 'Centre',
width: 80,
height: 320,
gravity: sharp.gravity.centre,
fixture: 'gravity-centre.jpg'
},
{
name: 'Default (centre)',
width: 80,
height: 320,
gravity: undefined,
fixture: 'gravity-centre.jpg'
},
{
name: 'Northeast',
width: 320,
height: 80,
gravity: sharp.gravity.northeast,
fixture: 'gravity-north.jpg'
},
{
name: 'Northeast',
width: 80,
height: 320,
gravity: sharp.gravity.northeast,
fixture: 'gravity-east.jpg'
},
{
name: 'Southeast',
width: 320,
height: 80,
gravity: sharp.gravity.southeast,
fixture: 'gravity-south.jpg'
},
{
name: 'Southeast',
width: 80,
height: 320,
gravity: sharp.gravity.southeast,
fixture: 'gravity-east.jpg'
},
{
name: 'Southwest',
width: 320,
height: 80,
gravity: sharp.gravity.southwest,
fixture: 'gravity-south.jpg'
},
{
name: 'Southwest',
width: 80,
height: 320,
gravity: sharp.gravity.southwest,
fixture: 'gravity-west.jpg'
},
{
name: 'Northwest',
width: 320,
height: 80,
gravity: sharp.gravity.northwest,
fixture: 'gravity-north.jpg'
},
{
name: 'Northwest',
width: 80,
height: 320,
gravity: sharp.gravity.northwest,
fixture: 'gravity-west.jpg'
}
].forEach(function (settings) {
it(settings.name + ' gravity', function (done) {
sharp(fixtures.inputJpg)
.resize(settings.width, settings.height)
.crop(settings.gravity)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(settings.width, info.width);
assert.strictEqual(settings.height, info.height);
fixtures.assertSimilar(fixtures.expected(settings.fixture), data, done);
});
});
});
it('Allows specifying the gravity as a string', function (done) {
sharp(fixtures.inputJpg)
.resize(80, 320)
.crop('east')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
fixtures.assertSimilar(fixtures.expected('gravity-east.jpg'), data, done);
});
});
it('Invalid values fail', function () {
assert.throws(function () {
sharp().crop(9);
}, /Expected valid crop id\/name\/strategy for crop but received 9 of type number/);
assert.throws(function () {
sharp().crop(1.1);
}, /Expected valid crop id\/name\/strategy for crop but received 1.1 of type number/);
assert.throws(function () {
sharp().crop(-1);
}, /Expected valid crop id\/name\/strategy for crop but received -1 of type number/);
assert.throws(function () {
sharp().crop('zoinks');
}, /Expected valid crop id\/name\/strategy for crop but received zoinks of type string/);
});
it('Uses default value when none specified', function () {
assert.doesNotThrow(function () {
sharp().crop();
});
});
it('Skip crop when post-resize dimensions are at target', function () {
return sharp(fixtures.inputJpg)
.resize(1600, 1200)
.toBuffer()
.then(function (input) {
return sharp(input)
.resize(1110)
.crop(sharp.strategy.attention)
.toBuffer({ resolveWithObject: true })
.then(function (result) {
assert.strictEqual(1110, result.info.width);
assert.strictEqual(832, result.info.height);
assert.strictEqual(undefined, result.info.cropOffsetLeft);
assert.strictEqual(undefined, result.info.cropOffsetTop);
});
});
});
describe('Entropy-based strategy', function () {
it('JPEG', function (done) {
sharp(fixtures.inputJpg)
.resize(80, 320)
.crop(sharp.strategy.entropy)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
assert.strictEqual(-117, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy-entropy.jpg'), data, done);
});
});
it('PNG', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80)
.crop(sharp.strategy.entropy)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(-80, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
it('supports the strategy passed as a string', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80)
.crop('entropy')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(-80, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
});
describe('Attention strategy', function () {
it('JPEG', function (done) {
sharp(fixtures.inputJpg)
.resize(80, 320)
.crop(sharp.strategy.attention)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
assert.strictEqual(-143, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy-attention.jpg'), data, done);
});
});
it('PNG', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80)
.crop(sharp.strategy.attention)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
it('supports the strategy passed as a string', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80)
.crop('attention')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
});
});

View File

@@ -1,440 +0,0 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Deprecated embed', function () {
it('Allows specifying the gravity as a string', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.embed('center')
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
fixtures.assertSimilar(fixtures.expected('embed-3-into-3.png'), data, done);
});
});
it('JPEG within PNG, no alpha channel', function (done) {
sharp(fixtures.inputJpg)
.embed()
.resize(320, 240)
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-3-into-3.png'), data, done);
});
});
it('JPEG within WebP, to include alpha channel', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed()
.webp()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-3-into-4.webp'), data, done);
});
});
it('PNG with alpha channel', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(50, 50)
.embed()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(50, info.width);
assert.strictEqual(50, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-4-into-4.png'), data, done);
});
});
it('16-bit PNG with alpha channel', function (done) {
sharp(fixtures.inputPngWithTransparency16bit)
.resize(32, 16)
.embed()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-16bit.png'), data, done);
});
});
it('16-bit PNG with alpha channel onto RGBA', function (done) {
sharp(fixtures.inputPngWithTransparency16bit)
.resize(32, 16)
.embed()
.background({r: 0, g: 0, b: 0, alpha: 0})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-16bit-rgba.png'), data, done);
});
});
it('PNG with 2 channels', function (done) {
sharp(fixtures.inputPngWithGreyAlpha)
.resize(32, 16)
.embed()
.background({r: 0, g: 0, b: 0, alpha: 0})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-2channel.png'), data, done);
});
});
it('Enlarge and embed', function (done) {
sharp(fixtures.inputPngWithOneColor)
.embed()
.resize(320, 240)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-enlarge.png'), data, done);
});
});
it('Embed invalid param values should fail', function () {
assert.throws(function () {
sharp().embed(-1);
});
assert.throws(function () {
sharp().embed(8.1);
});
assert.throws(function () {
sharp().embed(9);
});
assert.throws(function () {
sharp().embed(1000000);
});
assert.throws(function () {
sharp().embed(false);
});
assert.throws(function () {
sharp().embed('vallejo');
});
});
it('Embed gravity horizontal northwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.northwest)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a1-nw.png'), data, done);
});
});
it('Embed gravity horizontal north', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.north)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a2-n.png'), data, done);
});
});
it('Embed gravity horizontal northeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.northeast)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a3-ne.png'), data, done);
});
});
it('Embed gravity horizontal east', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.east)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a4-e.png'), data, done);
});
});
it('Embed gravity horizontal southeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.southeast)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a5-se.png'), data, done);
});
});
it('Embed gravity horizontal south', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.south)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a6-s.png'), data, done);
});
});
it('Embed gravity horizontal southwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.southwest)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a7-sw.png'), data, done);
});
});
it('Embed gravity horizontal west', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.west)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a8-w.png'), data, done);
});
});
it('Embed gravity horizontal center', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.center)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a9-c.png'), data, done);
});
});
it('Embed gravity vertical northwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.northwest)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/1-nw.png'), data, done);
});
});
it('Embed gravity vertical north', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.north)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/2-n.png'), data, done);
});
});
it('Embed gravity vertical northeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.northeast)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/3-ne.png'), data, done);
});
});
it('Embed gravity vertical east', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.east)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/4-e.png'), data, done);
});
});
it('Embed gravity vertical southeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.southeast)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/5-se.png'), data, done);
});
});
it('Embed gravity vertical south', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.south)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/6-s.png'), data, done);
});
});
it('Embed gravity vertical southwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.southwest)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/7-sw.png'), data, done);
});
});
it('Embed gravity vertical west', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.west)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/8-w.png'), data, done);
});
});
it('Embed gravity vertical center', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200)
.background({r: 0, g: 0, b: 0, alpha: 0})
.embed(sharp.gravity.center)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/9-c.png'), data, done);
});
});
});

View File

@@ -1,261 +0,0 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Deprecated resize-related functions', function () {
it('Max width or height considering ratio (portrait)', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 320)
.max()
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(243, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Min width or height considering ratio (portrait)', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 320)
.min()
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(422, info.height);
done();
});
});
it('Max width or height considering ratio (landscape)', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.max()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Provide only one dimension with max, should default to crop', function (done) {
sharp(fixtures.inputJpg)
.resize(320)
.max()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Min width or height considering ratio (landscape)', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.min()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(392, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Provide only one dimension with min, should default to crop', function (done) {
sharp(fixtures.inputJpg)
.resize(320)
.min()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Do not enlarge when input width is already less than output width', function (done) {
sharp(fixtures.inputJpg)
.resize(2800)
.withoutEnlargement()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Do not enlarge when input height is already less than output height', function (done) {
sharp(fixtures.inputJpg)
.resize(null, 2300)
.withoutEnlargement()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Do enlarge when input width is less than output width', function (done) {
sharp(fixtures.inputJpg)
.resize(2800)
.withoutEnlargement(false)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2800, info.width);
assert.strictEqual(2286, info.height);
done();
});
});
it('Downscale width and height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Downscale width, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Downscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(null, 320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Upscale width and height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(3000, 3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3000, info.width);
assert.strictEqual(3000, info.height);
done();
});
});
it('Upscale width, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3000, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Upscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(null, 3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(3000, info.height);
done();
});
});
it('Downscale width, upscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(3000, info.height);
done();
});
});
it('Upscale width, downscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(3000, 320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3000, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Identity transform, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
});

View File

@@ -54,7 +54,7 @@ describe('Extend', function () {
});
it('partial object fails', function () {
assert.throws(function () {
sharp().extend({top: 1});
sharp().extend({ top: 1 });
});
});

View File

@@ -6,10 +6,9 @@ const sharp = require('../../');
const fixtures = require('../fixtures');
describe('failOnError', function () {
it('handles truncated JPEG by default', function (done) {
sharp(fixtures.inputJpgTruncated)
it('handles truncated JPEG', function (done) {
sharp(fixtures.inputJpgTruncated, { failOnError: false })
.resize(320, 240)
// .toFile(fixtures.expected('truncated.jpg'), done);
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
@@ -19,10 +18,9 @@ describe('failOnError', function () {
});
});
it('handles truncated PNG by default', function (done) {
sharp(fixtures.inputPngTruncated)
it('handles truncated PNG', function (done) {
sharp(fixtures.inputPngTruncated, { failOnError: false })
.resize(320, 240)
// .toFile(fixtures.expected('truncated.png'), done);
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
@@ -46,26 +44,26 @@ describe('failOnError', function () {
});
});
it('returns errors to callback for truncated JPEG when failOnError is set', function (done) {
sharp(fixtures.inputJpgTruncated, { failOnError: true }).toBuffer(function (err, data, info) {
it('returns errors to callback for truncated JPEG', function (done) {
sharp(fixtures.inputJpgTruncated).toBuffer(function (err, data, info) {
assert.ok(err.message.includes('VipsJpeg: Premature end of JPEG file'), err);
assert.equal(data, null);
assert.equal(info, null);
assert.strictEqual(data, null);
assert.strictEqual(info, null);
done();
});
});
it('returns errors to callback for truncated PNG when failOnError is set', function (done) {
sharp(fixtures.inputPngTruncated, { failOnError: true }).toBuffer(function (err, data, info) {
it('returns errors to callback for truncated PNG', function (done) {
sharp(fixtures.inputPngTruncated).toBuffer(function (err, data, info) {
assert.ok(err.message.includes('vipspng: libpng read error'), err);
assert.equal(data, null);
assert.equal(info, null);
assert.strictEqual(data, null);
assert.strictEqual(info, null);
done();
});
});
it('rejects promises for truncated JPEG when failOnError is set', function (done) {
sharp(fixtures.inputJpgTruncated, { failOnError: true })
it('rejects promises for truncated JPEG', function (done) {
sharp(fixtures.inputJpgTruncated)
.toBuffer()
.then(() => {
throw new Error('Expected rejection');

View File

@@ -44,6 +44,19 @@ describe('Gamma correction', function () {
});
});
it('input value of 2.2, output value of 3.0', function (done) {
sharp(fixtures.inputJpgWithGammaHoliness)
.resize(129, 111)
.gamma(2.2, 3.0)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(129, info.width);
assert.strictEqual(111, info.height);
fixtures.assertSimilar(fixtures.expected('gamma-in-2.2-out-3.0.jpg'), data, { threshold: 6 }, done);
});
});
it('alpha transparency', function (done) {
sharp(fixtures.inputPngOverlayLayer1)
.resize(320)
@@ -57,9 +70,15 @@ describe('Gamma correction', function () {
});
});
it('invalid value', function () {
it('invalid first parameter value', function () {
assert.throws(function () {
sharp(fixtures.inputJpgWithGammaHoliness).gamma(4);
});
});
it('invalid second parameter value', function () {
assert.throws(function () {
sharp(fixtures.inputJpgWithGammaHoliness).gamma(2.2, 4);
});
});
});

64
test/unit/gif.js Normal file
View File

@@ -0,0 +1,64 @@
'use strict';
const fs = require('fs');
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('GIF input', () => {
it('GIF Buffer to JPEG Buffer', () =>
sharp(fs.readFileSync(fixtures.inputGif))
.resize(8, 4)
.jpeg()
.toBuffer({ resolveWithObject: true })
.then(({ data, info }) => {
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(8, info.width);
assert.strictEqual(4, info.height);
})
);
it('2 channel GIF file to PNG Buffer', () =>
sharp(fixtures.inputGifGreyPlusAlpha)
.resize(8, 4)
.png()
.toBuffer({ resolveWithObject: true })
.then(({ data, info }) => {
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('png', info.format);
assert.strictEqual(8, info.width);
assert.strictEqual(4, info.height);
assert.strictEqual(4, info.channels);
})
);
it('Animated GIF first page to PNG', () =>
sharp(fixtures.inputGifAnimated)
.toBuffer({ resolveWithObject: true })
.then(({ data, info }) => {
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('png', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(4, info.channels);
})
);
it('Animated GIF all pages to PNG "toilet roll"', () =>
sharp(fixtures.inputGifAnimated, { pages: -1 })
.toBuffer({ resolveWithObject: true })
.then(({ data, info }) => {
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('png', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(2400, info.height);
assert.strictEqual(4, info.channels);
})
);
});

File diff suppressed because it is too large Load Diff

View File

@@ -138,7 +138,7 @@ describe('Image channel insertion', function () {
it('Invalid raw buffer description', function () {
assert.throws(function () {
sharp().joinChannel(fs.readFileSync(fixtures.inputPng), {raw: {}});
sharp().joinChannel(fs.readFileSync(fixtures.inputPng), { raw: {} });
});
});

262
test/unit/jpeg.js Normal file
View File

@@ -0,0 +1,262 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('JPEG', function () {
it('JPEG quality', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ quality: 70 })
.toBuffer(function (err, buffer70) {
if (err) throw err;
sharp(fixtures.inputJpg)
.resize(320, 240)
.toBuffer(function (err, buffer80) {
if (err) throw err;
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ quality: 90 })
.toBuffer(function (err, buffer90) {
if (err) throw err;
assert(buffer70.length < buffer80.length);
assert(buffer80.length < buffer90.length);
done();
});
});
});
});
describe('Invalid JPEG quality', function () {
[-1, 88.2, 'test'].forEach(function (quality) {
it(quality.toString(), function () {
assert.throws(function () {
sharp().jpeg({ quality: quality });
});
});
});
});
describe('Invalid JPEG quantisation table', function () {
[-1, 88.2, 'test'].forEach(function (table) {
it(table.toString(), function () {
assert.throws(function () {
sharp().jpeg({ quantisationTable: table });
});
});
});
});
it('Progressive JPEG image', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ progressive: false })
.toBuffer(function (err, nonProgressiveData, nonProgressiveInfo) {
if (err) throw err;
assert.strictEqual(true, nonProgressiveData.length > 0);
assert.strictEqual(nonProgressiveData.length, nonProgressiveInfo.size);
assert.strictEqual('jpeg', nonProgressiveInfo.format);
assert.strictEqual(320, nonProgressiveInfo.width);
assert.strictEqual(240, nonProgressiveInfo.height);
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ progressive: true })
.toBuffer(function (err, progressiveData, progressiveInfo) {
if (err) throw err;
assert.strictEqual(true, progressiveData.length > 0);
assert.strictEqual(progressiveData.length, progressiveInfo.size);
assert.strictEqual(false, progressiveData.length === nonProgressiveData.length);
assert.strictEqual('jpeg', progressiveInfo.format);
assert.strictEqual(320, progressiveInfo.width);
assert.strictEqual(240, progressiveInfo.height);
done();
});
});
});
it('Without chroma subsampling generates larger file', function (done) {
// First generate with chroma subsampling (default)
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ chromaSubsampling: '4:2:0' })
.toBuffer(function (err, withChromaSubsamplingData, withChromaSubsamplingInfo) {
if (err) throw err;
assert.strictEqual(true, withChromaSubsamplingData.length > 0);
assert.strictEqual(withChromaSubsamplingData.length, withChromaSubsamplingInfo.size);
assert.strictEqual('jpeg', withChromaSubsamplingInfo.format);
assert.strictEqual(320, withChromaSubsamplingInfo.width);
assert.strictEqual(240, withChromaSubsamplingInfo.height);
// Then generate without
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ chromaSubsampling: '4:4:4' })
.toBuffer(function (err, withoutChromaSubsamplingData, withoutChromaSubsamplingInfo) {
if (err) throw err;
assert.strictEqual(true, withoutChromaSubsamplingData.length > 0);
assert.strictEqual(withoutChromaSubsamplingData.length, withoutChromaSubsamplingInfo.size);
assert.strictEqual('jpeg', withoutChromaSubsamplingInfo.format);
assert.strictEqual(320, withoutChromaSubsamplingInfo.width);
assert.strictEqual(240, withoutChromaSubsamplingInfo.height);
assert.strictEqual(true, withChromaSubsamplingData.length < withoutChromaSubsamplingData.length);
done();
});
});
});
it('Invalid JPEG chromaSubsampling value throws error', function () {
assert.throws(function () {
sharp().jpeg({ chromaSubsampling: '4:2:2' });
});
});
it('Trellis quantisation', function (done) {
// First generate without
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ trellisQuantisation: false })
.toBuffer(function (err, withoutData, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withoutData.length > 0);
assert.strictEqual(withoutData.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Then generate with
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ trellisQuantization: true })
.toBuffer(function (err, withData, withInfo) {
if (err) throw err;
assert.strictEqual(true, withData.length > 0);
assert.strictEqual(withData.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
// Verify image is same (as mozjpeg may not be present) size or less
assert.strictEqual(true, withData.length <= withoutData.length);
done();
});
});
});
it('Overshoot deringing', function (done) {
// First generate without
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ overshootDeringing: false })
.toBuffer(function (err, withoutData, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withoutData.length > 0);
assert.strictEqual(withoutData.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Then generate with
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ overshootDeringing: true })
.toBuffer(function (err, withData, withInfo) {
if (err) throw err;
assert.strictEqual(true, withData.length > 0);
assert.strictEqual(withData.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
done();
});
});
});
it('Optimise scans generates different output length', function (done) {
// First generate without
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimiseScans: false })
.toBuffer(function (err, withoutData, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withoutData.length > 0);
assert.strictEqual(withoutData.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Then generate with
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimizeScans: true })
.toBuffer(function (err, withData, withInfo) {
if (err) throw err;
assert.strictEqual(true, withData.length > 0);
assert.strictEqual(withData.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
// Verify image is of a different size (progressive output even without mozjpeg)
assert.notStrictEqual(withData.length, withoutData.length);
done();
});
});
});
it('Optimise coding generates smaller output length', function (done) {
// First generate with optimize coding enabled (default)
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg()
.toBuffer(function (err, withOptimiseCoding, withInfo) {
if (err) throw err;
assert.strictEqual(true, withOptimiseCoding.length > 0);
assert.strictEqual(withOptimiseCoding.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
// Then generate with coding disabled
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimizeCoding: false })
.toBuffer(function (err, withoutOptimiseCoding, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withoutOptimiseCoding.length > 0);
assert.strictEqual(withoutOptimiseCoding.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Verify optimised image is of a smaller size
assert.strictEqual(true, withOptimiseCoding.length < withoutOptimiseCoding.length);
done();
});
});
});
it('Specifying quantisation table provides different JPEG', function (done) {
// First generate with default quantisation table
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimiseCoding: false })
.toBuffer(function (err, withDefaultQuantisationTable, withInfo) {
if (err) throw err;
assert.strictEqual(true, withDefaultQuantisationTable.length > 0);
assert.strictEqual(withDefaultQuantisationTable.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
// Then generate with different quantisation table
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimiseCoding: false, quantisationTable: 3 })
.toBuffer(function (err, withQuantTable3, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withQuantTable3.length > 0);
assert.strictEqual(withQuantTable3.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Verify image is same (as mozjpeg may not be present) size or less
assert.strictEqual(true, withQuantTable3.length <= withDefaultQuantisationTable.length);
done();
});
});
});
});

View File

@@ -103,6 +103,29 @@ describe('Image metadata', function () {
});
});
it('Multipage TIFF', function (done) {
sharp(fixtures.inputTiffMultipage).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual('tiff', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(2464, metadata.width);
assert.strictEqual(3248, metadata.height);
assert.strictEqual('b-w', metadata.space);
assert.strictEqual(1, metadata.channels);
assert.strictEqual('uchar', metadata.depth);
assert.strictEqual(300, metadata.density);
assert.strictEqual('undefined', typeof metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(2, metadata.pages);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual(1, metadata.orientation);
assert.strictEqual('undefined', typeof metadata.exif);
assert.strictEqual('undefined', typeof metadata.icc);
done();
});
});
it('PNG', function (done) {
sharp(fixtures.inputPng).metadata(function (err, metadata) {
if (err) throw err;
@@ -434,6 +457,7 @@ describe('Image metadata', function () {
sharp(fixtures.inputJpgWithCorruptHeader)
.metadata(function (err) {
assert.strictEqual(true, !!err);
assert.strictEqual(true, /Input file has corrupt header: VipsJpeg: Premature end of JPEG file/.test(err.message));
done();
});
});
@@ -442,6 +466,16 @@ describe('Image metadata', function () {
sharp(fs.readFileSync(fixtures.inputJpgWithCorruptHeader))
.metadata(function (err) {
assert.strictEqual(true, !!err);
assert.strictEqual(true, /Input buffer has corrupt header: VipsJpeg: Premature end of JPEG file/.test(err.message));
done();
});
});
it('Unsupported lossless JPEG passes underlying error message', function (done) {
sharp(fixtures.inputJpgLossless)
.metadata(function (err) {
assert.strictEqual(true, !!err);
assert.strictEqual(true, /Input file has corrupt header: VipsJpeg: Unsupported JPEG process: SOF type 0xc3/.test(err.message));
done();
});
});
@@ -449,12 +483,12 @@ describe('Image metadata', function () {
describe('Invalid withMetadata parameters', function () {
it('String orientation', function () {
assert.throws(function () {
sharp().withMetadata({orientation: 'zoinks'});
sharp().withMetadata({ orientation: 'zoinks' });
});
});
it('Negative orientation', function () {
assert.throws(function () {
sharp().withMetadata({orientation: -1});
sharp().withMetadata({ orientation: -1 });
});
});
it('Zero orientation', function () {
@@ -464,7 +498,7 @@ describe('Image metadata', function () {
});
it('Too large orientation', function () {
assert.throws(function () {
sharp().withMetadata({orientation: 9});
sharp().withMetadata({ orientation: 9 });
});
});
});

View File

@@ -140,37 +140,35 @@ describe('Overlays', function () {
});
});
if (sharp.format.webp.input.file) {
it('Composite WebP onto JPEG', function (done) {
const paths = getPaths('overlay-jpeg-with-webp', 'jpg');
it('Composite WebP onto JPEG', function (done) {
const paths = getPaths('overlay-jpeg-with-webp', 'jpg');
sharp(fixtures.inputJpg)
.resize(300, 300)
.overlayWith(fixtures.inputWebPWithTransparency)
.toFile(paths.actual, function (error, info) {
if (error) return done(error);
fixtures.assertMaxColourDistance(paths.actual, paths.expected, 102);
done();
});
});
}
it('Composite JPEG onto PNG, no premultiply', function (done) {
sharp(fixtures.inputPngOverlayLayer1)
.overlayWith(fixtures.inputJpgWithLandscapeExif1)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(false, info.premultiplied);
sharp(fixtures.inputJpg)
.resize(300, 300)
.overlayWith(fixtures.inputWebPWithTransparency)
.toFile(paths.actual, function (error, info) {
if (error) return done(error);
fixtures.assertMaxColourDistance(paths.actual, paths.expected, 102);
done();
});
});
it('Composite opaque JPEG onto JPEG, no premultiply', function (done) {
it('Composite JPEG onto PNG, ensure premultiply', function (done) {
sharp(fixtures.inputPngOverlayLayer1)
.overlayWith(fixtures.inputJpgWithLandscapeExif1)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, info.premultiplied);
done();
});
});
it('Composite opaque JPEG onto JPEG, ensure premultiply', function (done) {
sharp(fixtures.inputJpg)
.overlayWith(fixtures.inputJpgWithLandscapeExif1)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(false, info.premultiplied);
assert.strictEqual(true, info.premultiplied);
done();
});
});
@@ -409,12 +407,6 @@ describe('Overlays', function () {
});
});
it('Overlay with invalid cutout option', function () {
assert.throws(function () {
sharp().overlayWith('ignore', { cutout: 1 });
});
});
it('Overlay with invalid tile option', function () {
assert.throws(function () {
sharp().overlayWith('ignore', { tile: 1 });
@@ -580,18 +572,17 @@ describe('Overlays', function () {
});
});
it('Composite JPEG onto JPEG, no premultiply', function (done) {
it('Composite JPEG onto JPEG', function (done) {
sharp(fixtures.inputJpg)
.resize(480, 320)
.overlayWith(fixtures.inputJpgBooleanTest)
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(480, info.width);
assert.strictEqual(320, info.height);
assert.strictEqual(3, info.channels);
assert.strictEqual(false, info.premultiplied);
assert.strictEqual(true, info.premultiplied);
fixtures.assertSimilar(fixtures.expected('overlay-jpeg-with-jpeg.jpg'), data, done);
});
});

View File

@@ -16,16 +16,24 @@ describe('Platform-detection', function () {
delete process.env.npm_config_platform;
});
it('Can override ARM version via npm_config_armv', function () {
it('Can override ARM version via --arm-version', function () {
process.env.npm_config_arch = 'arm';
process.env.npm_config_armv = 'test';
process.env.npm_config_arm_version = 'test';
assert.strictEqual('armvtest', platform().split('-')[1]);
delete process.env.npm_config_armv;
delete process.env.npm_config_arm_version;
delete process.env.npm_config_arch;
});
it('Can override ARM64 version via --arm-version', function () {
process.env.npm_config_arch = 'arm64';
process.env.npm_config_arm_version = 'test';
assert.strictEqual('arm64vtest', platform().split('-')[1]);
delete process.env.npm_config_arm_version;
delete process.env.npm_config_arch;
});
it('Can detect ARM version via process.config', function () {
process.env.npm_config_arch = 'armhf';
process.env.npm_config_arch = 'arm';
const armVersion = process.config.variables.arm_version;
process.config.variables.arm_version = 'test';
assert.strictEqual('armvtest', platform().split('-')[1]);
@@ -41,7 +49,7 @@ describe('Platform-detection', function () {
it('Defaults to ARMv8 for 64-bit', function () {
process.env.npm_config_arch = 'arm64';
assert.strictEqual('armv8', platform().split('-')[1]);
assert.strictEqual('arm64v8', platform().split('-')[1]);
delete process.env.npm_config_arch;
});
});

171
test/unit/png.js Normal file
View File

@@ -0,0 +1,171 @@
'use strict';
const fs = require('fs');
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('PNG', function () {
it('compression level is valid', function () {
assert.doesNotThrow(function () {
sharp().png({ compressionLevel: 0 });
});
});
it('compression level is invalid', function () {
assert.throws(function () {
sharp().png({ compressionLevel: -1 });
});
});
it('default compressionLevel generates smaller file than compressionLevel=6', function (done) {
// First generate with default compressionLevel
sharp(fixtures.inputPng)
.resize(320, 240)
.png()
.toBuffer(function (err, defaultData, defaultInfo) {
if (err) throw err;
assert.strictEqual(true, defaultData.length > 0);
assert.strictEqual('png', defaultInfo.format);
// Then generate with compressionLevel=6
sharp(fixtures.inputPng)
.resize(320, 240)
.png({ compressionLevel: 6 })
.toBuffer(function (err, largerData, largerInfo) {
if (err) throw err;
assert.strictEqual(true, largerData.length > 0);
assert.strictEqual('png', largerInfo.format);
assert.strictEqual(true, defaultData.length < largerData.length);
done();
});
});
});
it('without adaptiveFiltering generates smaller file', function (done) {
// First generate with adaptive filtering
sharp(fixtures.inputPng)
.resize(320, 240)
.png({ adaptiveFiltering: true })
.toBuffer(function (err, adaptiveData, adaptiveInfo) {
if (err) throw err;
assert.strictEqual(true, adaptiveData.length > 0);
assert.strictEqual(adaptiveData.length, adaptiveInfo.size);
assert.strictEqual('png', adaptiveInfo.format);
assert.strictEqual(320, adaptiveInfo.width);
assert.strictEqual(240, adaptiveInfo.height);
// Then generate without
sharp(fixtures.inputPng)
.resize(320, 240)
.png({ adaptiveFiltering: false })
.toBuffer(function (err, withoutAdaptiveData, withoutAdaptiveInfo) {
if (err) throw err;
assert.strictEqual(true, withoutAdaptiveData.length > 0);
assert.strictEqual(withoutAdaptiveData.length, withoutAdaptiveInfo.size);
assert.strictEqual('png', withoutAdaptiveInfo.format);
assert.strictEqual(320, withoutAdaptiveInfo.width);
assert.strictEqual(240, withoutAdaptiveInfo.height);
assert.strictEqual(true, withoutAdaptiveData.length < adaptiveData.length);
done();
});
});
});
it('Invalid PNG adaptiveFiltering value throws error', function () {
assert.throws(function () {
sharp().png({ adaptiveFiltering: 1 });
});
});
it('Progressive PNG image', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.png({ progressive: false })
.toBuffer(function (err, nonProgressiveData, nonProgressiveInfo) {
if (err) throw err;
assert.strictEqual(true, nonProgressiveData.length > 0);
assert.strictEqual(nonProgressiveData.length, nonProgressiveInfo.size);
assert.strictEqual('png', nonProgressiveInfo.format);
assert.strictEqual(320, nonProgressiveInfo.width);
assert.strictEqual(240, nonProgressiveInfo.height);
sharp(nonProgressiveData)
.png({ progressive: true })
.toBuffer(function (err, progressiveData, progressiveInfo) {
if (err) throw err;
assert.strictEqual(true, progressiveData.length > 0);
assert.strictEqual(progressiveData.length, progressiveInfo.size);
assert.strictEqual(true, progressiveData.length > nonProgressiveData.length);
assert.strictEqual('png', progressiveInfo.format);
assert.strictEqual(320, progressiveInfo.width);
assert.strictEqual(240, progressiveInfo.height);
done();
});
});
});
it('Valid PNG libimagequant palette value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().png({ palette: false });
});
});
it('Invalid PNG libimagequant palette value throws error', function () {
assert.throws(function () {
sharp().png({ palette: 'fail' });
});
});
it('Valid PNG libimagequant quality value produces image of same size or smaller', function () {
const inputPngBuffer = fs.readFileSync(fixtures.inputPng);
return Promise.all([
sharp(inputPngBuffer).resize(10).png({ palette: true, quality: 80 }).toBuffer(),
sharp(inputPngBuffer).resize(10).png({ palette: true, quality: 100 }).toBuffer()
]).then(function (data) {
assert.strictEqual(true, data[0].length <= data[1].length);
});
});
it('Invalid PNG libimagequant quality value throws error', function () {
assert.throws(function () {
sharp().png({ palette: true, quality: 101 });
});
});
it('Valid PNG libimagequant colours value produces image of same size or smaller', function () {
const inputPngBuffer = fs.readFileSync(fixtures.inputPng);
return Promise.all([
sharp(inputPngBuffer).resize(10).png({ palette: true, colours: 100 }).toBuffer(),
sharp(inputPngBuffer).resize(10).png({ palette: true, colours: 200 }).toBuffer()
]).then(function (data) {
assert.strictEqual(true, data[0].length <= data[1].length);
});
});
it('Invalid PNG libimagequant colours value throws error', function () {
assert.throws(function () {
sharp().png({ palette: true, colours: -1 });
});
});
it('Invalid PNG libimagequant colors value throws error', function () {
assert.throws(function () {
sharp().png({ palette: true, colors: 0.1 });
});
});
it('Valid PNG libimagequant dither value produces image of same size or smaller', function () {
const inputPngBuffer = fs.readFileSync(fixtures.inputPng);
return Promise.all([
sharp(inputPngBuffer).resize(10).png({ palette: true, dither: 0.1 }).toBuffer(),
sharp(inputPngBuffer).resize(10).png({ palette: true, dither: 0.9 }).toBuffer()
]).then(function (data) {
assert.strictEqual(true, data[0].length <= data[1].length);
});
});
it('Invalid PNG libimagequant dither value throws error', function () {
assert.throws(function () {
sharp().png({ palette: true, dither: 'fail' });
});
});
});

170
test/unit/raw.js Normal file
View File

@@ -0,0 +1,170 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Raw pixel data', function () {
describe('Raw pixel input', function () {
it('Missing options', function () {
assert.throws(function () {
sharp({ raw: {} });
});
});
it('Incomplete options', function () {
assert.throws(function () {
sharp({ raw: { width: 1, height: 1 } });
});
});
it('Invalid channels', function () {
assert.throws(function () {
sharp({ raw: { width: 1, height: 1, channels: 5 } });
});
});
it('Invalid height', function () {
assert.throws(function () {
sharp({ raw: { width: 1, height: 0, channels: 4 } });
});
});
it('Invalid width', function () {
assert.throws(function () {
sharp({ raw: { width: 'zoinks', height: 1, channels: 4 } });
});
});
it('RGB', function (done) {
// Convert to raw pixel data
sharp(fixtures.inputJpg)
.resize(256)
.raw()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(256, info.width);
assert.strictEqual(209, info.height);
assert.strictEqual(3, info.channels);
// Convert back to JPEG
sharp(data, {
raw: {
width: info.width,
height: info.height,
channels: info.channels
} })
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(256, info.width);
assert.strictEqual(209, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.inputJpg, data, done);
});
});
});
it('RGBA', function (done) {
// Convert to raw pixel data
sharp(fixtures.inputPngOverlayLayer1)
.resize(256)
.raw()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(256, info.width);
assert.strictEqual(192, info.height);
assert.strictEqual(4, info.channels);
// Convert back to PNG
sharp(data, {
raw: {
width: info.width,
height: info.height,
channels: info.channels
} })
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(256, info.width);
assert.strictEqual(192, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.inputPngOverlayLayer1, data, { threshold: 7 }, done);
});
});
});
it('JPEG to raw Stream and back again', function (done) {
const width = 32;
const height = 24;
const writable = sharp({
raw: {
width,
height,
channels: 3
}
});
writable
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(24, info.height);
done();
});
sharp(fixtures.inputJpg)
.resize(width, height)
.raw()
.pipe(writable);
});
});
describe('Ouput raw, uncompressed image data', function () {
it('1 channel greyscale image', function (done) {
sharp(fixtures.inputJpg)
.greyscale()
.resize(32, 24)
.raw()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(32 * 24 * 1, info.size);
assert.strictEqual(data.length, info.size);
assert.strictEqual('raw', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(24, info.height);
assert.strictEqual(1, info.channels);
done();
});
});
it('3 channel colour image without transparency', function (done) {
sharp(fixtures.inputJpg)
.resize(32, 24)
.toFormat('raw')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(32 * 24 * 3, info.size);
assert.strictEqual(data.length, info.size);
assert.strictEqual('raw', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(24, info.height);
done();
});
});
it('4 channel colour image with transparency', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(32, 24)
.toFormat(sharp.format.raw)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(32 * 24 * 4, info.size);
assert.strictEqual(data.length, info.size);
assert.strictEqual('raw', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(24, info.height);
done();
});
});
});
});

135
test/unit/recomb.js Normal file
View File

@@ -0,0 +1,135 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Recomb', function () {
it('applies a sepia filter using recomb', function (done) {
const output = fixtures.path('output.recomb-sepia.jpg');
sharp(fixtures.inputJpgWithLandscapeExif1)
.recomb([
[0.3588, 0.7044, 0.1368],
[0.299, 0.587, 0.114],
[0.2392, 0.4696, 0.0912]
])
.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(600, info.width);
assert.strictEqual(450, info.height);
fixtures.assertMaxColourDistance(
output,
fixtures.expected('Landscape_1-recomb-sepia.jpg'),
17
);
done();
});
});
it('applies a sepia filter using recomb to an PNG with Alpha', function (done) {
const output = fixtures.path('output.recomb-sepia.png');
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.recomb([
[0.3588, 0.7044, 0.1368],
[0.299, 0.587, 0.114],
[0.2392, 0.4696, 0.0912]
])
.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(1024, info.width);
assert.strictEqual(768, info.height);
fixtures.assertMaxColourDistance(
output,
fixtures.expected('alpha-recomb-sepia.png'),
17
);
done();
});
});
it('applies a different sepia filter using recomb', function (done) {
const output = fixtures.path('output.recomb-sepia2.jpg');
sharp(fixtures.inputJpgWithLandscapeExif1)
.recomb([
[0.393, 0.769, 0.189],
[0.349, 0.686, 0.168],
[0.272, 0.534, 0.131]
])
.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(600, info.width);
assert.strictEqual(450, info.height);
fixtures.assertMaxColourDistance(
output,
fixtures.expected('Landscape_1-recomb-sepia2.jpg'),
17
);
done();
});
});
it('increases the saturation of the image', function (done) {
const saturationLevel = 1;
const output = fixtures.path('output.recomb-saturation.jpg');
sharp(fixtures.inputJpgWithLandscapeExif1)
.recomb([
[
saturationLevel + 1 - 0.2989,
-0.587 * saturationLevel,
-0.114 * saturationLevel
],
[
-0.2989 * saturationLevel,
saturationLevel + 1 - 0.587,
-0.114 * saturationLevel
],
[
-0.2989 * saturationLevel,
-0.587 * saturationLevel,
saturationLevel + 1 - 0.114
]
])
.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(600, info.width);
assert.strictEqual(450, info.height);
fixtures.assertMaxColourDistance(
output,
fixtures.expected('Landscape_1-recomb-saturation.jpg'),
37
);
done();
});
});
describe('invalid matrix specification', function () {
it('missing', function () {
assert.throws(function () {
sharp(fixtures.inputJpg).recomb();
});
});
it('incorrect flat data', function () {
assert.throws(function () {
sharp(fixtures.inputJpg).recomb([1, 2, 3, 4, 5, 6, 7, 8, 9]);
});
});
it('incorrect sub size', function () {
assert.throws(function () {
sharp(fixtures.inputJpg).recomb([
[1, 2, 3, 4],
[5, 6, 7, 8],
[1, 2, 9, 6]
]);
});
});
it('incorrect top size', function () {
assert.throws(function () {
sharp(fixtures.inputJpg).recomb([[1, 2, 3, 4], [5, 6, 7, 8]]);
});
});
});
});

View File

@@ -116,7 +116,7 @@ describe('Resize fit=contain', function () {
});
});
it.skip('TIFF in LAB colourspace onto RGBA background', function (done) {
it('TIFF in LAB colourspace onto RGBA background', function (done) {
sharp(fixtures.inputTiffCielab)
.resize(64, 128, {
fit: 'contain',

View File

@@ -497,6 +497,7 @@ describe('Resize dimensions', function () {
[
sharp.kernel.nearest,
sharp.kernel.cubic,
sharp.kernel.mitchell,
sharp.kernel.lanczos2,
sharp.kernel.lanczos3
].forEach(function (kernel) {
@@ -524,6 +525,40 @@ describe('Resize dimensions', function () {
});
});
it('Ensure shortest edge (height) is at least 1 pixel', function () {
return sharp({
create: {
width: 10,
height: 2,
channels: 3,
background: 'red'
}
})
.resize(2)
.toBuffer({ resolveWithObject: true })
.then(function (output) {
assert.strictEqual(2, output.info.width);
assert.strictEqual(1, output.info.height);
});
});
it('Ensure shortest edge (width) is at least 1 pixel', function () {
return sharp({
create: {
width: 2,
height: 10,
channels: 3,
background: 'red'
}
})
.resize(null, 2)
.toBuffer({ resolveWithObject: true })
.then(function (output) {
assert.strictEqual(1, output.info.width);
assert.strictEqual(2, output.info.height);
});
});
it('unknown kernel throws', function () {
assert.throws(function () {
sharp().resize(null, null, { kernel: 'unknown' });

View File

@@ -25,7 +25,7 @@ describe('Rotation', function () {
it('Rotate by 30 degrees with semi-transparent background', function (done) {
sharp(fixtures.inputJpg)
.rotate(30, {background: { r: 255, g: 0, b: 0, alpha: 0.5 }})
.rotate(30, { background: { r: 255, g: 0, b: 0, alpha: 0.5 } })
.resize(320)
.png()
.toBuffer(function (err, data, info) {
@@ -39,7 +39,7 @@ describe('Rotation', function () {
it('Rotate by 30 degrees with solid background', function (done) {
sharp(fixtures.inputJpg)
.rotate(30, {background: { r: 255, g: 0, b: 0, alpha: 0.5 }})
.rotate(30, { background: { r: 255, g: 0, b: 0, alpha: 0.5 } })
.resize(320)
.toBuffer(function (err, data, info) {
if (err) throw err;
@@ -208,7 +208,7 @@ describe('Rotation', function () {
sharp(fixtures.inputJpgWithExif)
.rotate()
.resize(320)
.withMetadata({orientation: 3})
.withMetadata({ orientation: 3 })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);

77
test/unit/svg.js Normal file
View File

@@ -0,0 +1,77 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('SVG input', function () {
it('Convert SVG to PNG at default 72DPI', function (done) {
sharp(fixtures.inputSvg)
.resize(1024)
.extract({ left: 290, top: 760, width: 40, height: 40 })
.toFormat('png')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(40, info.width);
assert.strictEqual(40, info.height);
fixtures.assertSimilar(fixtures.expected('svg72.png'), data, function (err) {
if (err) throw err;
sharp(data).metadata(function (err, info) {
if (err) throw err;
assert.strictEqual(72, info.density);
done();
});
});
});
});
it('Convert SVG to PNG at 1200DPI', function (done) {
sharp(fixtures.inputSvg, { density: 1200 })
.resize(1024)
.extract({ left: 290, top: 760, width: 40, height: 40 })
.toFormat('png')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(40, info.width);
assert.strictEqual(40, info.height);
fixtures.assertSimilar(fixtures.expected('svg1200.png'), data, function (err) {
if (err) throw err;
sharp(data).metadata(function (err, info) {
if (err) throw err;
assert.strictEqual(1200, info.density);
done();
});
});
});
});
it('Convert SVG to PNG at 14.4DPI', function (done) {
sharp(fixtures.inputSvg, { density: 14.4 })
.toFormat('png')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(20, info.width);
assert.strictEqual(20, info.height);
fixtures.assertSimilar(fixtures.expected('svg14.4.png'), data, function (err) {
if (err) throw err;
done();
});
});
});
it('Convert SVG with embedded images to PNG, respecting dimensions, autoconvert to PNG', function (done) {
sharp(fixtures.inputSvgWithEmbeddedImages)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(480, info.width);
assert.strictEqual(360, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('svg-embedded.png'), data, done);
});
});
});

View File

@@ -132,7 +132,7 @@ describe('Threshold', function () {
it('color threshold', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.threshold(128, {'grayscale': false})
.threshold(128, { grayscale: false })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);

424
test/unit/tiff.js Normal file
View File

@@ -0,0 +1,424 @@
'use strict';
const fs = require('fs');
const assert = require('assert');
const rimraf = require('rimraf');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('TIFF', function () {
it('Load TIFF from Buffer', function (done) {
const inputTiffBuffer = fs.readFileSync(fixtures.inputTiff);
sharp(inputTiffBuffer)
.resize(320, 240)
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('Load multi-page TIFF from file', function (done) {
sharp(fixtures.inputTiffMultipage) // defaults to page 0
.jpeg()
.toBuffer(function (err, defaultData, defaultInfo) {
if (err) throw err;
assert.strictEqual(true, defaultData.length > 0);
assert.strictEqual(defaultData.length, defaultInfo.size);
assert.strictEqual('jpeg', defaultInfo.format);
sharp(fixtures.inputTiffMultipage, { page: 1 }) // 50%-scale copy of page 0
.jpeg()
.toBuffer(function (err, scaledData, scaledInfo) {
if (err) throw err;
assert.strictEqual(true, scaledData.length > 0);
assert.strictEqual(scaledData.length, scaledInfo.size);
assert.strictEqual('jpeg', scaledInfo.format);
assert.strictEqual(defaultInfo.width, scaledInfo.width * 2);
assert.strictEqual(defaultInfo.height, scaledInfo.height * 2);
done();
});
});
});
it('Load multi-page TIFF from Buffer', function (done) {
const inputTiffBuffer = fs.readFileSync(fixtures.inputTiffMultipage);
sharp(inputTiffBuffer) // defaults to page 0
.jpeg()
.toBuffer(function (err, defaultData, defaultInfo) {
if (err) throw err;
assert.strictEqual(true, defaultData.length > 0);
assert.strictEqual(defaultData.length, defaultInfo.size);
assert.strictEqual('jpeg', defaultInfo.format);
sharp(inputTiffBuffer, { page: 1 }) // 50%-scale copy of page 0
.jpeg()
.toBuffer(function (err, scaledData, scaledInfo) {
if (err) throw err;
assert.strictEqual(true, scaledData.length > 0);
assert.strictEqual(scaledData.length, scaledInfo.size);
assert.strictEqual('jpeg', scaledInfo.format);
assert.strictEqual(defaultInfo.width, scaledInfo.width * 2);
assert.strictEqual(defaultInfo.height, scaledInfo.height * 2);
done();
});
});
});
it('Save TIFF to Buffer', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 240)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('tiff', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('Invalid TIFF quality throws error', function () {
assert.throws(function () {
sharp().tiff({ quality: 101 });
});
});
it('Missing TIFF quality does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff();
});
});
it('Not squashing TIFF to a bit depth of 1 should not change the file size', function (done) {
const startSize = fs.statSync(fixtures.inputTiff8BitDepth).size;
sharp(fixtures.inputTiff8BitDepth)
.toColourspace('b-w') // can only squash 1 band uchar images
.tiff({
squash: false,
compression: 'none',
predictor: 'none'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size === startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('Squashing TIFF to a bit depth of 1 should significantly reduce file size', function (done) {
const startSize = fs.statSync(fixtures.inputTiff8BitDepth).size;
sharp(fixtures.inputTiff8BitDepth)
.toColourspace('b-w') // can only squash 1 band uchar images
.tiff({
squash: true,
compression: 'none',
predictor: 'none'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < (startSize / 2));
rimraf(fixtures.outputTiff, done);
});
});
it('Invalid TIFF squash value throws error', function () {
assert.throws(function () {
sharp().tiff({ squash: 'true' });
});
});
it('TIFF setting xres and yres on file', function (done) {
const res = 1000.0; // inputTiff has a dpi of 300 (res*2.54)
sharp(fixtures.inputTiff)
.tiff({
xres: (res),
yres: (res)
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
sharp(fixtures.outputTiff).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual(metadata.density, res * 2.54); // convert to dpi
rimraf(fixtures.outputTiff, done);
});
});
});
it('TIFF setting xres and yres on buffer', function (done) {
const res = 1000.0; // inputTiff has a dpi of 300 (res*2.54)
sharp(fixtures.inputTiff)
.tiff({
xres: (res),
yres: (res)
})
.toBuffer(function (err, data, info) {
if (err) throw err;
sharp(data).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual(metadata.density, res * 2.54); // convert to dpi
done();
});
});
});
it('TIFF invalid xres value should throw an error', function () {
assert.throws(function () {
sharp().tiff({ xres: '1000.0' });
});
});
it('TIFF invalid yres value should throw an error', function () {
assert.throws(function () {
sharp().tiff({ yres: '1000.0' });
});
});
it('TIFF lzw compression with horizontal predictor shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'lzw',
predictor: 'horizontal'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF ccittfax4 compression shrinks b-w test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiff).size;
sharp(fixtures.inputTiff)
.toColourspace('b-w')
.tiff({
squash: true,
compression: 'ccittfax4'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF deflate compression with horizontal predictor shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'deflate',
predictor: 'horizontal'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF deflate compression with float predictor shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'deflate',
predictor: 'float'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF deflate compression without predictor shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'deflate',
predictor: 'none'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF jpeg compression shrinks test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'jpeg'
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF none compression does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ compression: 'none' });
});
});
it('TIFF lzw compression does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ compression: 'lzw' });
});
});
it('TIFF deflate compression does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ compression: 'deflate' });
});
});
it('TIFF invalid compression option throws', function () {
assert.throws(function () {
sharp().tiff({ compression: 0 });
});
});
it('TIFF invalid compression option throws', function () {
assert.throws(function () {
sharp().tiff({ compression: 'a' });
});
});
it('TIFF invalid predictor option throws', function () {
assert.throws(function () {
sharp().tiff({ predictor: 'a' });
});
});
it('TIFF horizontal predictor does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ predictor: 'horizontal' });
});
});
it('TIFF float predictor does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ predictor: 'float' });
});
});
it('TIFF none predictor does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ predictor: 'none' });
});
});
it('TIFF tiled pyramid image without compression enlarges test file', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'none',
pyramid: true,
tile: true,
tileHeight: 256,
tileWidth: 256
})
.toFile(fixtures.outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size > startSize);
rimraf(fixtures.outputTiff, done);
});
});
it('TIFF pyramid true value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ pyramid: true });
});
});
it('Invalid TIFF pyramid value throws error', function () {
assert.throws(function () {
sharp().tiff({ pyramid: 'true' });
});
});
it('Invalid TIFF tile value throws error', function () {
assert.throws(function () {
sharp().tiff({ tile: 'true' });
});
});
it('TIFF tile true value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ tile: true });
});
});
it('Valid TIFF tileHeight value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ tileHeight: 512 });
});
});
it('Valid TIFF tileWidth value does not throw error', function () {
assert.doesNotThrow(function () {
sharp().tiff({ tileWidth: 512 });
});
});
it('Invalid TIFF tileHeight value throws error', function () {
assert.throws(function () {
sharp().tiff({ tileHeight: '256' });
});
});
it('Invalid TIFF tileWidth value throws error', function () {
assert.throws(function () {
sharp().tiff({ tileWidth: '256' });
});
});
it('Invalid TIFF tileHeight value throws error', function () {
assert.throws(function () {
sharp().tiff({ tileHeight: 0 });
});
});
it('Invalid TIFF tileWidth value throws error', function () {
assert.throws(function () {
sharp().tiff({ tileWidth: 0 });
});
});
it('TIFF file input with invalid page fails gracefully', function (done) {
sharp(fixtures.inputTiffMultipage, { page: 2 })
.toBuffer(function (err) {
assert.strictEqual(true, !!err);
done();
});
});
it('TIFF buffer input with invalid page fails gracefully', function (done) {
sharp(fs.readFileSync(fixtures.inputTiffMultipage), { page: 2 })
.toBuffer(function (err) {
assert.strictEqual(true, !!err);
done();
});
});
});

78
test/unit/webp.js Normal file
View File

@@ -0,0 +1,78 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('WebP', function () {
it('WebP output', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.toFormat(sharp.format.webp)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('Invalid WebP quality throws error', function () {
assert.throws(function () {
sharp().webp({ quality: 101 });
});
});
it('Invalid WebP alpha quality throws error', function () {
assert.throws(function () {
sharp().webp({ alphaQuality: 101 });
});
});
it('should work for webp alpha quality', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({ alphaQuality: 80 })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
fixtures.assertSimilar(fixtures.expected('webp-alpha-80.webp'), data, done);
});
});
it('should work for webp lossless', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({ lossless: true })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
fixtures.assertSimilar(fixtures.expected('webp-lossless.webp'), data, done);
});
});
it('should work for webp near-lossless', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({ nearLossless: true, quality: 50 })
.toBuffer(function (err50, data50, info50) {
if (err50) throw err50;
assert.strictEqual(true, data50.length > 0);
assert.strictEqual('webp', info50.format);
fixtures.assertSimilar(fixtures.expected('webp-near-lossless-50.webp'), data50, done);
});
});
it('should use near-lossless when both lossless and nearLossless are specified', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({ nearLossless: true, quality: 50, lossless: true })
.toBuffer(function (err50, data50, info50) {
if (err50) throw err50;
assert.strictEqual(true, data50.length > 0);
assert.strictEqual('webp', info50.format);
fixtures.assertSimilar(fixtures.expected('webp-near-lossless-50.webp'), data50, done);
});
});
});