Compare commits

..

51 Commits

Author SHA1 Message Date
Lovell Fuller
019e6a1bfe Release v0.21.0 2018-10-04 13:03:30 +01:00
Lovell Fuller
1565e58fcf Update benchmark results ahead of v0.21.0
Remove lwip and images as they lack Node 10 support
2018-10-04 12:41:01 +01:00
Lovell Fuller
c22e2a17ef Update benchmark dependencies 2018-10-04 11:01:09 +01:00
Lovell Fuller
fd2a10ccea Threshold trim tests for non-turbo libjpeg 2018-10-02 20:33:26 +01:00
Lovell Fuller
0725378257 Add trimOffsetLeft, trimOffsetTop to trim response #914 2018-10-02 20:16:00 +01:00
Lovell Fuller
c431909f35 Refresh resize docs to ensure options are present 2018-10-02 18:45:08 +01:00
Lovell Fuller
db4df6f0b2 Add size to metadata response (Stream/Buffer only) #695 2018-10-02 18:05:08 +01:00
Lovell Fuller
17f942c802 Add chromaSubsampling and isProgressive to metadata #1186 2018-10-02 17:11:25 +01:00
Lovell Fuller
60438ebfe5 Ensure precision of trim threshold, update docs #914 2018-10-02 12:45:37 +01:00
Lovell Fuller
21fbe546b8 Switch from custom trim op to vips_find_trim #914 2018-10-02 11:24:32 +01:00
Lovell Fuller
11900945eb Bump dependency versions to latest 2018-10-02 11:23:49 +01:00
Lovell Fuller
ea5270221b Add new leak suppression for nodejs/libuv 2018-10-02 11:23:15 +01:00
Lovell Fuller
a64844689e Deprecate background, add op-specific prop to resize/extend/flatten #1392 2018-10-01 20:58:55 +01:00
Lovell Fuller
6007e13a22 Improve/increase installation error handling 2018-10-01 11:06:12 +01:00
Lovell Fuller
c3274e480b Deprecate crop, embed, ignoreAspectRatio, max, min, withoutEnlargement.
These become options of the resize operation instead. #1135
2018-09-30 20:16:27 +01:00
Miguel Aragón
3c54eeda5b Use more universal English to improve global understanding 2018-09-28 16:04:56 +01:00
Lovell Fuller
6236e4b97d Changelog entry and credit for #1385 2018-09-27 21:01:41 +01:00
freezy
796738da65 Add support for arbitrary rotation angle via vips_rotate (#1385) 2018-09-27 18:00:36 +01:00
freezy
37d385fafa Move background extraction into separate method (#1383) 2018-09-24 10:00:00 +01:00
Lovell Fuller
db2af42ee7 File extend, extract and trim ops under 'resize' #1135
Should make them easier to find in the docs
2018-09-22 14:52:08 +01:00
Lovell Fuller
24b42ef192 Tests: Move all setup to named file 2018-09-22 13:54:20 +01:00
Lovell Fuller
2ce166ab0a Update links to libvips, now in its own GitHub org 2018-09-21 20:33:01 +01:00
Lovell Fuller
71755b69e4 Remove duplicate libvips version/platform check 2018-09-21 20:20:40 +01:00
Lovell Fuller
1106aac2d8 Tests: tweak colour thresholds for (non-turbo) libjpeg compat 2018-09-21 19:51:47 +01:00
Lovell Fuller
93aac660a3 Tests: avoid shrink-on-load for (non-turbo) libjpeg compat 2018-09-21 19:34:52 +01:00
Lovell Fuller
0ce8ad7130 Enable CI on OS X 2018-09-20 10:41:24 +01:00
Lovell Fuller
deacd553bf Enable SIMD convolution by default #1213 2018-09-19 21:42:40 +01:00
Lovell Fuller
c8ff7e11a9 Upgrade to libvips v8.7.0
Drop Node 4 support
Add experimental musl prebuild for Node 8 and 10
2018-09-19 21:38:09 +01:00
Lovell Fuller
4cff62258c Improve smartcrop saliency testing/reporting 2018-09-05 22:49:31 +01:00
Lovell Fuller
0144358afb Release v0.20.8 2018-09-05 08:44:01 +01:00
Lovell Fuller
136097efe7 Downgrade nyc for continued Node 4 support 2018-09-04 17:07:10 +01:00
Lovell Fuller
374c6959d7 Changelog and credit for #1358 #1362 2018-09-04 16:39:24 +01:00
Axel Eirola
7d48a5ccf4 Allow floating point density input (#1362)
Metadata output will still remain integer
2018-09-01 08:58:30 +01:00
ajhool
bf3254cb16 Install: avoid race conditions when creating directories (#1358) 2018-08-29 09:20:26 +01:00
Lovell Fuller
5bed3a7d52 Release v0.20.7 2018-08-21 11:50:14 +01:00
Lovell Fuller
ece111280b Use copy+unlink if rename fails during install #1345 2018-08-20 15:14:31 +01:00
Lovell Fuller
a15a9b956b Release v0.20.6 2018-08-20 11:40:10 +01:00
Lovell Fuller
42860c2f83 Changelog, credit and doc refresh for #1342 2018-08-19 10:43:25 +01:00
Alun Davies
b5b95e5ae1 Expose depth option for tile-based output (#1342) 2018-08-18 15:09:53 +01:00
Lovell Fuller
d705cffdd6 Ensure extractChannel works with 16-bit images #1330 2018-08-12 20:22:39 +01:00
Rodrigo Alviani
23a4bc103e Docs: correct quality option in overlayWith example (#1325) 2018-08-08 08:42:18 +01:00
Lovell Fuller
c14434f9e7 Add removeAlpha op, removes alpha channel if any #1248 2018-08-07 20:32:11 +01:00
Lovell Fuller
25bd2cea3e Add experimental entropy field to stats response 2018-08-06 15:41:27 +01:00
Lovell Fuller
532de4ecab Cache libvips binaries to reduce re-install time #1301 2018-08-05 10:31:41 +01:00
Lovell Fuller
bfdd27eeef Doc refresh and dependency bumps 2018-08-05 09:42:09 +01:00
Lovell Fuller
bd9f238ab4 Improve install time error messages for FreeBSD #1310 2018-08-04 22:27:32 +01:00
Lovell Fuller
75556bb57c Ensure vendor platform mismatch throws error #1303 2018-08-04 21:34:11 +01:00
thegareth
2de062a34a Docs: update the "make a transparent image" example (#1316)
Alpha for colour is between 0-1, not 0-255.
2018-08-02 09:42:25 +01:00
Lovell Fuller
4589b15dea Changelog and credit for #1285 #1290 2018-07-10 16:12:16 +01:00
Sylvain Dumont
8b75ce6786 Allow full WebP alphaQuality range of 0-100 (#1290) 2018-07-10 15:58:17 +01:00
Espen Hovlandsdal
7bbc5176a1 Expose mozjpeg quant_table flag (#1285) 2018-07-10 15:56:05 +01:00
85 changed files with 3985 additions and 1585 deletions

View File

@@ -1,34 +1,58 @@
language: node_js
matrix: matrix:
include: include:
- os: linux - name: "Linux (glibc) - Node 6"
dist: trusty os: linux
sudo: false
node_js: "4"
- os: linux
dist: trusty dist: trusty
sudo: false sudo: false
language: node_js
node_js: "6" node_js: "6"
- os: linux - name: "Linux (glibc) - Node 8"
os: linux
dist: trusty dist: trusty
sudo: false sudo: false
language: node_js
node_js: "8" node_js: "8"
- os: linux - name: "Linux (glibc) - Node 10"
os: linux
dist: trusty dist: trusty
sudo: false sudo: false
language: node_js
node_js: "10" node_js: "10"
- os: osx after_success:
osx_image: xcode8.3 - npm install coveralls
node_js: "4" - cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js
- os: osx - name: "Linux (musl) - Node 8"
osx_image: xcode8.3 os: linux
dist: trusty
sudo: true
language: minimal
before_install:
- sudo docker run -dit --name sharp --env CI --env PREBUILD_TOKEN --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp node:8-alpine
- sudo docker exec sharp apk add build-base git python2 --update-cache
install: sudo docker exec sharp sh -c "npm install --unsafe-perm"
script: sudo docker exec sharp sh -c "npm test"
- name: "Linux (musl) - Node 10"
os: linux
dist: trusty
sudo: true
language: minimal
before_install:
- sudo docker run -dit --name sharp --env CI --env PREBUILD_TOKEN --volume "${PWD}:/mnt/sharp" --workdir /mnt/sharp node:10-alpine
- sudo docker exec sharp apk add build-base git python2 --update-cache
install: sudo docker exec sharp sh -c "npm install --unsafe-perm"
script: sudo docker exec sharp sh -c "npm test"
- name: "OS X - Node 6"
os: osx
osx_image: xcode9.2
language: node_js
node_js: "6" node_js: "6"
- os: osx - name: "OS X - Node 8"
osx_image: xcode8.3 os: osx
osx_image: xcode9.2
language: node_js
node_js: "8" node_js: "8"
- os: osx - name: "OS X - Node 10"
osx_image: xcode8.3 os: osx
osx_image: xcode9.2
language: node_js
node_js: "10" node_js: "10"
after_success:
- npm install coveralls
- cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js

View File

@@ -14,7 +14,7 @@ New bugs are assigned a `triage` label whilst under investigation.
If a [similar request](https://github.com/lovell/sharp/labels/enhancement) exists, it's probably fastest to add a comment to it about your requirement. If a [similar request](https://github.com/lovell/sharp/labels/enhancement) exists, it's probably fastest to add a comment to it about your requirement.
Implementation is usually straightforward if _libvips_ [already supports](https://jcupitt.github.io/libvips/API/current/) the feature you need. Implementation is usually straightforward if _libvips_ [already supports](https://libvips.github.io/libvips/API/current/) the feature you need.
## Submit a Pull Request to fix a bug ## Submit a Pull Request to fix a bug

View File

@@ -21,8 +21,8 @@ Lanczos resampling ensures quality is not sacrificed for speed.
As well as image resizing, operations such as As well as image resizing, operations such as
rotation, extraction, compositing and gamma correction are available. rotation, extraction, compositing and gamma correction are available.
Most modern 64-bit OS X, Windows and Linux (glibc) systems running Most modern 64-bit OS X, Windows and Linux systems running
Node versions 4, 6, 8 and 10 Node versions 6, 8 and 10
do not require any additional install or runtime dependencies. do not require any additional install or runtime dependencies.
## Examples ## Examples
@@ -78,7 +78,7 @@ Visit [sharp.pixelplumbing.com](http://sharp.pixelplumbing.com/) for complete
A [guide for contributors](https://github.com/lovell/sharp/blob/master/CONTRIBUTING.md) A [guide for contributors](https://github.com/lovell/sharp/blob/master/CONTRIBUTING.md)
covers reporting bugs, requesting features and submitting code changes. covers reporting bugs, requesting features and submitting code changes.
### Licence ### Licensing
Copyright 2013, 2014, 2015, 2016, 2017, 2018 Lovell Fuller and contributors. Copyright 2013, 2014, 2015, 2016, 2017, 2018 Lovell Fuller and contributors.

View File

@@ -4,7 +4,6 @@ build: off
platform: x64 platform: x64
environment: environment:
matrix: matrix:
- nodejs_version: "4"
- nodejs_version: "6" - nodejs_version: "6"
- nodejs_version: "8" - nodejs_version: "8"
- nodejs_version: "10" - nodejs_version: "10"

View File

@@ -128,9 +128,11 @@
'../vendor/lib/libcairo.so', '../vendor/lib/libcairo.so',
'../vendor/lib/libcroco-0.6.so', '../vendor/lib/libcroco-0.6.so',
'../vendor/lib/libexif.so', '../vendor/lib/libexif.so',
'../vendor/lib/libexpat.so',
'../vendor/lib/libffi.so', '../vendor/lib/libffi.so',
'../vendor/lib/libfontconfig.so', '../vendor/lib/libfontconfig.so',
'../vendor/lib/libfreetype.so', '../vendor/lib/libfreetype.so',
'../vendor/lib/libfribidi.so',
'../vendor/lib/libgdk_pixbuf-2.0.so', '../vendor/lib/libgdk_pixbuf-2.0.so',
'../vendor/lib/libgif.so', '../vendor/lib/libgif.so',
'../vendor/lib/libgio-2.0.so', '../vendor/lib/libgio-2.0.so',
@@ -138,6 +140,7 @@
'../vendor/lib/libgsf-1.so', '../vendor/lib/libgsf-1.so',
'../vendor/lib/libgthread-2.0.so', '../vendor/lib/libgthread-2.0.so',
'../vendor/lib/libharfbuzz.so', '../vendor/lib/libharfbuzz.so',
'../vendor/lib/libharfbuzz-subset.so.0',
'../vendor/lib/libjpeg.so', '../vendor/lib/libjpeg.so',
'../vendor/lib/liblcms2.so', '../vendor/lib/liblcms2.so',
'../vendor/lib/liborc-0.4.so', '../vendor/lib/liborc-0.4.so',
@@ -149,6 +152,8 @@
'../vendor/lib/librsvg-2.so', '../vendor/lib/librsvg-2.so',
'../vendor/lib/libtiff.so', '../vendor/lib/libtiff.so',
'../vendor/lib/libwebp.so', '../vendor/lib/libwebp.so',
'../vendor/lib/libwebpdemux.so',
'../vendor/lib/libwebpmux.so',
'../vendor/lib/libxml2.so', '../vendor/lib/libxml2.so',
'../vendor/lib/libz.so', '../vendor/lib/libz.so',
# Ensure runtime linking is relative to sharp.node # Ensure runtime linking is relative to sharp.node

View File

@@ -1,5 +1,21 @@
<!-- Generated by documentation.js. Update this documentation by updating the source code. --> <!-- Generated by documentation.js. Update this documentation by updating the source code. -->
## removeAlpha
Remove alpha channel, if any. This is a no-op if the image does not have an alpha channel.
### Examples
```javascript
sharp('rgba.png')
.removeAlpha()
.toFile('rgb.png', function(err, info) {
// rgb.png is a 3 channel image without an alpha channel
});
```
Returns **Sharp**
## extractChannel ## extractChannel
Extract a single channel from a multi-channel image. Extract a single channel from a multi-channel image.

View File

@@ -1,23 +1,5 @@
<!-- Generated by documentation.js. Update this documentation by updating the source code. --> <!-- Generated by documentation.js. Update this documentation by updating the source code. -->
## background
Set the background for the `embed`, `flatten` and `extend` operations.
The default background is `{r: 0, g: 0, b: 0, alpha: 1}`, black without transparency.
Delegates to the _color_ module, which can throw an Error
but is liberal in what it accepts, clipping values to sensible min/max.
The alpha value is a float between `0` (transparent) and `1` (opaque).
### Parameters
- `rgba` **([String][1] \| [Object][2])** parsed by the [color][3] module to extract values for red, green, blue and alpha.
- Throws **[Error][4]** Invalid parameter
Returns **Sharp**
## tint ## tint
Tint the image using the provided chroma while preserving the image luminance. Tint the image using the provided chroma while preserving the image luminance.
@@ -94,4 +76,4 @@ Returns **Sharp**
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean [5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[6]: https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L568 [6]: https://github.com/libvips/libvips/blob/master/libvips/iofuncs/enumtypes.c#L568

View File

@@ -18,7 +18,7 @@ If the overlay image contains an alpha channel then composition with premultipli
- `options.left` **[Number][4]?** the pixel offset from the left edge. - `options.left` **[Number][4]?** the pixel offset from the left edge.
- `options.tile` **[Boolean][5]** set to true to repeat the overlay image across the entire image with the given `gravity`. (optional, default `false`) - `options.tile` **[Boolean][5]** set to true to repeat the overlay image across the entire image with the given `gravity`. (optional, default `false`)
- `options.cutout` **[Boolean][5]** set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another. (optional, default `false`) - `options.cutout` **[Boolean][5]** set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another. (optional, default `false`)
- `options.density` **[Number][4]** integral number representing the DPI for vector overlay image. (optional, default `72`) - `options.density` **[Number][4]** number representing the DPI for vector overlay image. (optional, default `72`)
- `options.raw` **[Object][3]?** describes overlay when using raw pixel data. - `options.raw` **[Object][3]?** describes overlay when using raw pixel data.
- `options.raw.width` **[Number][4]?** - `options.raw.width` **[Number][4]?**
- `options.raw.height` **[Number][4]?** - `options.raw.height` **[Number][4]?**
@@ -40,8 +40,7 @@ sharp('input.png')
.overlayWith('overlay.png', { gravity: sharp.gravity.southeast } ) .overlayWith('overlay.png', { gravity: sharp.gravity.southeast } )
.sharpen() .sharpen()
.withMetadata() .withMetadata()
.quality(90) .webp( { quality: 90 } )
.webp()
.toBuffer() .toBuffer()
.then(function(outputBuffer) { .then(function(outputBuffer) {
// outputBuffer contains upside down, 300px wide, alpha channel flattened // outputBuffer contains upside down, 300px wide, alpha channel flattened

View File

@@ -12,7 +12,7 @@
- `options.failOnError` **[Boolean][4]** by default apply a "best effort" - `options.failOnError` **[Boolean][4]** by default apply a "best effort"
to decode images, even if the data is corrupt or invalid. Set this flag to true to decode images, even if the data is corrupt or invalid. Set this flag to true
if you'd rather halt processing and raise an error when loading invalid images. (optional, default `false`) if you'd rather halt processing and raise an error when loading invalid images. (optional, default `false`)
- `options.density` **[Number][5]** integral number representing the DPI for vector images. (optional, default `72`) - `options.density` **[Number][5]** number representing the DPI for vector images. (optional, default `72`)
- `options.page` **[Number][5]** page number to extract for multi-page input (GIF, TIFF) (optional, default `0`) - `options.page` **[Number][5]** page number to extract for multi-page input (GIF, TIFF) (optional, default `0`)
- `options.raw` **[Object][3]?** describes raw pixel input image data. See `raw()` for pixel ordering. - `options.raw` **[Object][3]?** describes raw pixel input image data. See `raw()` for pixel ordering.
- `options.raw.width` **[Number][5]?** - `options.raw.width` **[Number][5]?**
@@ -55,7 +55,7 @@ sharp({
width: 300, width: 300,
height: 200, height: 200,
channels: 4, channels: 4,
background: { r: 255, g: 0, b: 0, alpha: 128 } background: { r: 255, g: 0, b: 0, alpha: 0.5 }
} }
}) })
.png() .png()

View File

@@ -25,12 +25,15 @@ Fast access to (uncached) image metadata without decoding any compressed image d
A Promises/A+ promise is returned when `callback` is not provided. A Promises/A+ promise is returned when `callback` is not provided.
- `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg` - `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
- `size`: Total size of image in bytes, for Stream and Buffer input only
- `width`: Number of pixels wide (EXIF orientation is not taken into consideration) - `width`: Number of pixels wide (EXIF orientation is not taken into consideration)
- `height`: Number of pixels high (EXIF orientation is not taken into consideration) - `height`: Number of pixels high (EXIF orientation is not taken into consideration)
- `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...][1] - `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...][1]
- `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK - `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
- `depth`: Name of pixel depth format e.g. `uchar`, `char`, `ushort`, `float` [...][2] - `depth`: Name of pixel depth format e.g. `uchar`, `char`, `ushort`, `float` [...][2]
- `density`: Number of pixels per inch (DPI), if present - `density`: Number of pixels per inch (DPI), if present
- `chromaSubsampling`: String containing JPEG chroma subsampling, `4:2:0` or `4:4:4` for RGB, `4:2:0:4` or `4:4:4:4` for CMYK
- `isProgressive`: Boolean indicating whether the image is interlaced using a progressive scan
- `hasProfile`: Boolean indicating the presence of an embedded ICC profile - `hasProfile`: Boolean indicating the presence of an embedded ICC profile
- `hasAlpha`: Boolean indicating the presence of an alpha transparency channel - `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
- `orientation`: Number value of the EXIF Orientation header, if present - `orientation`: Number value of the EXIF Orientation header, if present
@@ -79,6 +82,7 @@ A Promise is returned when `callback` is not provided.
- `maxX` (x-coordinate of one of the pixel where the maximum lies) - `maxX` (x-coordinate of one of the pixel where the maximum lies)
- `maxY` (y-coordinate of one of the pixel where the maximum lies) - `maxY` (y-coordinate of one of the pixel where the maximum lies)
- `isOpaque`: Value to identify if the image is opaque or transparent, based on the presence and use of alpha channel - `isOpaque`: Value to identify if the image is opaque or transparent, based on the presence and use of alpha channel
- `entropy`: Histogram-based estimation of greyscale entropy, discarding alpha channel if any (experimental)
### Parameters ### Parameters
@@ -125,9 +129,9 @@ The default behaviour _before_ function call is `false`, meaning the libvips acc
Returns **Sharp** Returns **Sharp**
[1]: https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L636 [1]: https://github.com/libvips/libvips/blob/master/libvips/iofuncs/enumtypes.c#L636
[2]: https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L672 [2]: https://github.com/libvips/libvips/blob/master/libvips/iofuncs/enumtypes.c#L672
[3]: https://www.npmjs.com/package/icc [3]: https://www.npmjs.com/package/icc

View File

@@ -5,9 +5,12 @@
Rotate the output image by either an explicit angle Rotate the output image by either an explicit angle
or auto-orient based on the EXIF `Orientation` tag. or auto-orient based on the EXIF `Orientation` tag.
If an angle is provided, it is converted to a valid 90/180/270deg rotation. If an angle is provided, it is converted to a valid positive degree rotation.
For example, `-450` will produce a 270deg rotation. For example, `-450` will produce a 270deg rotation.
When rotating by an angle other than a multiple of 90,
the background colour can be provided with the `background` option.
If no angle is provided, it is determined from the EXIF data. If no angle is provided, it is determined from the EXIF data.
Mirroring is supported and may infer the use of a flip operation. Mirroring is supported and may infer the use of a flip operation.
@@ -18,7 +21,9 @@ for example `rotate(x).extract(y)` will produce a different result to `extract(y
### Parameters ### Parameters
- `angle` **[Number][1]** angle of rotation, must be a multiple of 90. (optional, default `auto`) - `angle` **[Number][1]** angle of rotation. (optional, default `auto`)
- `options` **[Object][2]?** if present, is an Object with optional attributes.
- `options.background` **([String][3] \| [Object][2])** parsed by the [color][4] module to extract values for red, green, blue and alpha. (optional, default `"#000000"`)
### Examples ### Examples
@@ -34,47 +39,7 @@ const pipeline = sharp()
readableStream.pipe(pipeline); readableStream.pipe(pipeline);
``` ```
- Throws **[Error][2]** Invalid parameters - Throws **[Error][5]** Invalid parameters
Returns **Sharp**
## extract
Extract a region of the image.
- Use `extract` before `resize` for pre-resize extraction.
- Use `extract` after `resize` for post-resize extraction.
- Use `extract` before and after for both.
### Parameters
- `options` **[Object][3]**
- `options.left` **[Number][1]** zero-indexed offset from left edge
- `options.top` **[Number][1]** zero-indexed offset from top edge
- `options.width` **[Number][1]** dimension of extracted image
- `options.height` **[Number][1]** dimension of extracted image
### Examples
```javascript
sharp(input)
.extract({ left: left, top: top, width: width, height: height })
.toFile(output, function(err) {
// Extract a region of the input image, saving in the same format.
});
```
```javascript
sharp(input)
.extract({ left: leftOffsetPre, top: topOffsetPre, width: widthPre, height: heightPre })
.resize(width, height)
.extract({ left: leftOffsetPost, top: topOffsetPost, width: widthPost, height: heightPost })
.toFile(output, function(err) {
// Extract a region, resize, then extract from the resized image
});
```
- Throws **[Error][2]** Invalid parameters
Returns **Sharp** Returns **Sharp**
@@ -85,7 +50,7 @@ The use of `flip` implies the removal of the EXIF `Orientation` tag, if any.
### Parameters ### Parameters
- `flip` **[Boolean][4]** (optional, default `true`) - `flip` **[Boolean][6]** (optional, default `true`)
Returns **Sharp** Returns **Sharp**
@@ -96,7 +61,7 @@ The use of `flop` implies the removal of the EXIF `Orientation` tag, if any.
### Parameters ### Parameters
- `flop` **[Boolean][4]** (optional, default `true`) - `flop` **[Boolean][6]** (optional, default `true`)
Returns **Sharp** Returns **Sharp**
@@ -114,7 +79,7 @@ Separate control over the level of sharpening in "flat" and "jagged" areas is av
- `jagged` **[Number][1]** the level of sharpening to apply to "jagged" areas. (optional, default `2.0`) - `jagged` **[Number][1]** the level of sharpening to apply to "jagged" areas. (optional, default `2.0`)
- Throws **[Error][2]** Invalid parameters - Throws **[Error][5]** Invalid parameters
Returns **Sharp** Returns **Sharp**
@@ -128,7 +93,7 @@ When used without parameters the default window is 3x3.
- `size` **[Number][1]** square mask size: size x size (optional, default `3`) - `size` **[Number][1]** square mask size: size x size (optional, default `3`)
- Throws **[Error][2]** Invalid parameters - Throws **[Error][5]** Invalid parameters
Returns **Sharp** Returns **Sharp**
@@ -143,59 +108,17 @@ When a `sigma` is provided, performs a slower, more accurate Gaussian blur.
- `sigma` **[Number][1]?** a value between 0.3 and 1000 representing the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`. - `sigma` **[Number][1]?** a value between 0.3 and 1000 representing the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`.
- Throws **[Error][2]** Invalid parameters - Throws **[Error][5]** Invalid parameters
Returns **Sharp**
## extend
Extends/pads the edges of the image with the colour provided to the `background` method.
This operation will always occur after resizing and extraction, if any.
### Parameters
- `extend` **([Number][1] \| [Object][3])** single pixel count to add to all edges or an Object with per-edge counts
- `extend.top` **[Number][1]?**
- `extend.left` **[Number][1]?**
- `extend.bottom` **[Number][1]?**
- `extend.right` **[Number][1]?**
### Examples
```javascript
// Resize to 140 pixels wide, then add 10 transparent pixels
// to the top, left and right edges and 20 to the bottom edge
sharp(input)
.resize(140)
.background({r: 0, g: 0, b: 0, alpha: 0})
.extend({top: 10, bottom: 20, left: 10, right: 10})
...
```
- Throws **[Error][2]** Invalid parameters
Returns **Sharp** Returns **Sharp**
## flatten ## flatten
Merge alpha transparency channel, if any, with `background`. Merge alpha transparency channel, if any, with a background.
### Parameters ### Parameters
- `flatten` **[Boolean][4]** (optional, default `true`) - `options`
Returns **Sharp**
## trim
Trim "boring" pixels from all edges that contain values within a percentage similarity of the top-left pixel.
### Parameters
- `tolerance` **[Number][1]** value between 1 and 99 representing the percentage similarity. (optional, default `10`)
- Throws **[Error][2]** Invalid parameters
Returns **Sharp** Returns **Sharp**
@@ -212,7 +135,7 @@ when applying a gamma correction.
- `gamma` **[Number][1]** value between 1.0 and 3.0. (optional, default `2.2`) - `gamma` **[Number][1]** value between 1.0 and 3.0. (optional, default `2.2`)
- Throws **[Error][2]** Invalid parameters - Throws **[Error][5]** Invalid parameters
Returns **Sharp** Returns **Sharp**
@@ -222,7 +145,7 @@ Produce the "negative" of the image.
### Parameters ### Parameters
- `negate` **[Boolean][4]** (optional, default `true`) - `negate` **[Boolean][6]** (optional, default `true`)
Returns **Sharp** Returns **Sharp**
@@ -232,7 +155,7 @@ Enhance output image contrast by stretching its luminance to cover the full dyna
### Parameters ### Parameters
- `normalise` **[Boolean][4]** (optional, default `true`) - `normalise` **[Boolean][6]** (optional, default `true`)
Returns **Sharp** Returns **Sharp**
@@ -242,7 +165,7 @@ Alternative spelling of normalise.
### Parameters ### Parameters
- `normalize` **[Boolean][4]** (optional, default `true`) - `normalize` **[Boolean][6]** (optional, default `true`)
Returns **Sharp** Returns **Sharp**
@@ -252,10 +175,10 @@ Convolve the image with the specified kernel.
### Parameters ### Parameters
- `kernel` **[Object][3]** - `kernel` **[Object][2]**
- `kernel.width` **[Number][1]** width of the kernel in pixels. - `kernel.width` **[Number][1]** width of the kernel in pixels.
- `kernel.height` **[Number][1]** width of the kernel in pixels. - `kernel.height` **[Number][1]** width of the kernel in pixels.
- `kernel.kernel` **[Array][5]&lt;[Number][1]>** Array of length `width*height` containing the kernel values. - `kernel.kernel` **[Array][7]&lt;[Number][1]>** Array of length `width*height` containing the kernel values.
- `kernel.scale` **[Number][1]** the scale of the kernel in pixels. (optional, default `sum`) - `kernel.scale` **[Number][1]** the scale of the kernel in pixels. (optional, default `sum`)
- `kernel.offset` **[Number][1]** the offset of the kernel in pixels. (optional, default `0`) - `kernel.offset` **[Number][1]** the offset of the kernel in pixels. (optional, default `0`)
@@ -275,7 +198,7 @@ sharp(input)
}); });
``` ```
- Throws **[Error][2]** Invalid parameters - Throws **[Error][5]** Invalid parameters
Returns **Sharp** Returns **Sharp**
@@ -286,12 +209,12 @@ Any pixel value greather than or equal to the threshold value will be set to 255
### Parameters ### Parameters
- `threshold` **[Number][1]** a value in the range 0-255 representing the level at which the threshold will be applied. (optional, default `128`) - `threshold` **[Number][1]** a value in the range 0-255 representing the level at which the threshold will be applied. (optional, default `128`)
- `options` **[Object][3]?** - `options` **[Object][2]?**
- `options.greyscale` **[Boolean][4]** convert to single channel greyscale. (optional, default `true`) - `options.greyscale` **[Boolean][6]** convert to single channel greyscale. (optional, default `true`)
- `options.grayscale` **[Boolean][4]** alternative spelling for greyscale. (optional, default `true`) - `options.grayscale` **[Boolean][6]** alternative spelling for greyscale. (optional, default `true`)
- Throws **[Error][2]** Invalid parameters - Throws **[Error][5]** Invalid parameters
Returns **Sharp** Returns **Sharp**
@@ -304,16 +227,16 @@ the selected bitwise boolean `operation` between the corresponding pixels of the
### Parameters ### Parameters
- `operand` **([Buffer][6] \| [String][7])** Buffer containing image data or String containing the path to an image file. - `operand` **([Buffer][8] \| [String][3])** Buffer containing image data or String containing the path to an image file.
- `operator` **[String][7]** one of `and`, `or` or `eor` to perform that bitwise operation, like the C logic operators `&`, `|` and `^` respectively. - `operator` **[String][3]** one of `and`, `or` or `eor` to perform that bitwise operation, like the C logic operators `&`, `|` and `^` respectively.
- `options` **[Object][3]?** - `options` **[Object][2]?**
- `options.raw` **[Object][3]?** describes operand when using raw pixel data. - `options.raw` **[Object][2]?** describes operand when using raw pixel data.
- `options.raw.width` **[Number][1]?** - `options.raw.width` **[Number][1]?**
- `options.raw.height` **[Number][1]?** - `options.raw.height` **[Number][1]?**
- `options.raw.channels` **[Number][1]?** - `options.raw.channels` **[Number][1]?**
- Throws **[Error][2]** Invalid parameters - Throws **[Error][5]** Invalid parameters
Returns **Sharp** Returns **Sharp**
@@ -327,20 +250,22 @@ Apply the linear formula a \* input + b to the image (levels adjustment)
- `b` **[Number][1]** offset (optional, default `0.0`) - `b` **[Number][1]** offset (optional, default `0.0`)
- Throws **[Error][2]** Invalid parameters - Throws **[Error][5]** Invalid parameters
Returns **Sharp** Returns **Sharp**
[1]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number [1]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error [2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object [3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean [4]: https://www.npmjs.org/package/color
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array [5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
[6]: https://nodejs.org/api/buffer.html [6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String [7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array
[8]: https://nodejs.org/api/buffer.html

View File

@@ -121,6 +121,8 @@ Use these JPEG options for output image.
- `options.optimizeScans` **[Boolean][6]** alternative spelling of optimiseScans (optional, default `false`) - `options.optimizeScans` **[Boolean][6]** alternative spelling of optimiseScans (optional, default `false`)
- `options.optimiseCoding` **[Boolean][6]** optimise Huffman coding tables (optional, default `true`) - `options.optimiseCoding` **[Boolean][6]** optimise Huffman coding tables (optional, default `true`)
- `options.optimizeCoding` **[Boolean][6]** alternative spelling of optimiseCoding (optional, default `true`) - `options.optimizeCoding` **[Boolean][6]** alternative spelling of optimiseCoding (optional, default `true`)
- `options.quantisationTable` **[Number][8]** quantization table to use, integer 0-8, requires mozjpeg (optional, default `0`)
- `options.quantizationTable` **[Number][8]** alternative spelling of quantisationTable (optional, default `0`)
- `options.force` **[Boolean][6]** force JPEG output, otherwise attempt to use input format (optional, default `true`) - `options.force` **[Boolean][6]** force JPEG output, otherwise attempt to use input format (optional, default `true`)
### Examples ### Examples
@@ -276,6 +278,7 @@ Warning: multiple sharp instances concurrently producing tile output can expose
- `tile.size` **[Number][8]** tile size in pixels, a value between 1 and 8192. (optional, default `256`) - `tile.size` **[Number][8]** tile size in pixels, a value between 1 and 8192. (optional, default `256`)
- `tile.overlap` **[Number][8]** tile overlap in pixels, a value between 0 and 8192. (optional, default `0`) - `tile.overlap` **[Number][8]** tile overlap in pixels, a value between 0 and 8192. (optional, default `0`)
- `tile.angle` **[Number][8]** tile angle of rotation, must be a multiple of 90. (optional, default `0`) - `tile.angle` **[Number][8]** tile angle of rotation, must be a multiple of 90. (optional, default `0`)
- `tile.depth` **[String][1]?** how deep to make the pyramid, possible values are `onepixel`, `onetile` or `one`, default based on layout.
- `tile.container` **[String][1]** tile container, with value `fs` (filesystem) or `zip` (compressed file). (optional, default `'fs'`) - `tile.container` **[String][1]** tile container, with value `fs` (filesystem) or `zip` (compressed file). (optional, default `'fs'`)
- `tile.layout` **[String][1]** filesystem layout, possible values are `dz`, `zoomify` or `google`. (optional, default `'dz'`) - `tile.layout` **[String][1]** filesystem layout, possible values are `dz`, `zoomify` or `google`. (optional, default `'dz'`)

View File

@@ -2,181 +2,231 @@
## resize ## resize
Resize image to `width` x `height`. Resize image to `width`, `height` or `width x height`.
By default, the resized image is centre cropped to the exact size specified.
Possible kernels are: When both a `width` and `height` are provided, the possible methods by which the image should **fit** these are:
- `nearest`: Use [nearest neighbour interpolation][1]. - `cover`: Crop to cover both provided dimensions (the default).
- `cubic`: Use a [Catmull-Rom spline][2]. - `contain`: Embed within both provided dimensions.
- `lanczos2`: Use a [Lanczos kernel][3] with `a=2`. - `fill`: Ignore the aspect ratio of the input and stretch to both provided dimensions.
- `lanczos3`: Use a Lanczos kernel with `a=3` (the default). - `inside`: Preserving aspect ratio, resize the image to be as large as possible while ensuring its dimensions are less than or equal to both those specified.
- `outside`: Preserving aspect ratio, resize the image to be as small as possible while ensuring its dimensions are greater than or equal to both those specified.
Some of these values are based on the [object-fit][1] CSS property.
### Parameters When using a `fit` of `cover` or `contain`, the default **position** is `centre`. Other options are:
- `width` **[Number][4]?** pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height. - `sharp.position`: `top`, `right top`, `right`, `right bottom`, `bottom`, `left bottom`, `left`, `left top`.
- `height` **[Number][4]?** pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width. - `sharp.gravity`: `north`, `northeast`, `east`, `southeast`, `south`, `southwest`, `west`, `northwest`, `center` or `centre`.
- `options` **[Object][5]?** - `sharp.strategy`: `cover` only, dynamically crop using either the `entropy` or `attention` strategy.
- `options.kernel` **[String][6]** the kernel to use for image reduction. (optional, default `'lanczos3'`) Some of these values are based on the [object-position][2] CSS property.
- `options.fastShrinkOnLoad` **[Boolean][7]** take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images. (optional, default `true`)
### Examples
```javascript
sharp(inputBuffer)
.resize(200, 300, {
kernel: sharp.kernel.nearest
})
.background('white')
.embed()
.toFile('output.tiff')
.then(function() {
// output.tiff is a 200 pixels wide and 300 pixels high image
// containing a nearest-neighbour scaled version, embedded on a white canvas,
// of the image data in inputBuffer
});
```
- Throws **[Error][8]** Invalid parameters
Returns **Sharp**
## crop
Crop the resized image to the exact size specified, the default behaviour.
Possible attributes of the optional `sharp.gravity` are `north`, `northeast`, `east`, `southeast`, `south`,
`southwest`, `west`, `northwest`, `center` and `centre`.
The experimental strategy-based approach resizes so one dimension is at its target length The experimental strategy-based approach resizes so one dimension is at its target length
then repeatedly ranks edge regions, discarding the edge with the lowest score based on the selected strategy. then repeatedly ranks edge regions, discarding the edge with the lowest score based on the selected strategy.
- `entropy`: focus on the region with the highest [Shannon entropy][9]. - `entropy`: focus on the region with the highest [Shannon entropy][3].
- `attention`: focus on the region with the highest luminance frequency, colour saturation and presence of skin tones. - `attention`: focus on the region with the highest luminance frequency, colour saturation and presence of skin tones.
Possible interpolation kernels are:
- `nearest`: Use [nearest neighbour interpolation][4].
- `cubic`: Use a [Catmull-Rom spline][5].
- `lanczos2`: Use a [Lanczos kernel][6] with `a=2`.
- `lanczos3`: Use a Lanczos kernel with `a=3` (the default).
### Parameters ### Parameters
- `crop` **[String][6]** A member of `sharp.gravity` to crop to an edge/corner or `sharp.strategy` to crop dynamically. (optional, default `'centre'`) - `width` **[Number][7]?** pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height.
- `height` **[Number][7]?** pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
- `options` **[Object][8]?**
- `options.width` **[String][9]?** alternative means of specifying `width`. If both are present this take priority.
- `options.height` **[String][9]?** alternative means of specifying `height`. If both are present this take priority.
- `options.fit` **[String][9]** how the image should be resized to fit both provided dimensions, one of `cover`, `contain`, `fill`, `inside` or `outside`. (optional, default `'cover'`)
- `options.position` **[String][9]** position, gravity or strategy to use when `fit` is `cover` or `contain`. (optional, default `'centre'`)
- `options.background` **([String][9] \| [Object][8])** background colour when using a `fit` of `contain`, parsed by the [color][10] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
- `options.kernel` **[String][9]** the kernel to use for image reduction. (optional, default `'lanczos3'`)
- `options.withoutEnlargement` **[Boolean][11]** do not enlarge if the width _or_ height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option. (optional, default `false`)
- `options.fastShrinkOnLoad` **[Boolean][11]** take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images. (optional, default `true`)
### Examples ### Examples
```javascript
sharp(input)
.resize({ width: 100 })
.toBuffer()
.then(data => {
// 100 pixels wide, auto-scaled height
});
```
```javascript
sharp(input)
.resize({ height: 100 })
.toBuffer()
.then(data => {
// 100 pixels high, auto-scaled width
});
```
```javascript
sharp(input)
.resize(200, 300, {
kernel: sharp.kernel.nearest,
fit: 'contain',
position: 'right top',
background: { r: 255, g: 255, b: 255, alpha: 0.5 }
})
.toFile('output.png')
.then(() => {
// output.png is a 200 pixels wide and 300 pixels high image
// containing a nearest-neighbour scaled version
// contained within the north-east corner of a semi-transparent white canvas
});
```
```javascript ```javascript
const transformer = sharp() const transformer = sharp()
.resize(200, 200) .resize({
.crop(sharp.strategy.entropy) width: 200,
.on('error', function(err) { height: 200,
console.log(err); fit: sharp.fit.cover,
position: sharp.strategy.entropy
}); });
// Read image data from readableStream // Read image data from readableStream
// Write 200px square auto-cropped image data to writableStream // Write 200px square auto-cropped image data to writableStream
readableStream.pipe(transformer).pipe(writableStream); readableStream
.pipe(transformer)
.pipe(writableStream);
``` ```
- Throws **[Error][8]** Invalid parameters
Returns **Sharp**
## embed
Preserving aspect ratio, resize the image to the maximum `width` or `height` specified
then embed on a background of the exact `width` and `height` specified.
If the background contains an alpha value then WebP and PNG format output images will
contain an alpha channel, even when the input image does not.
### Parameters
- `embed` **[String][6]** A member of `sharp.gravity` to embed to an edge/corner. (optional, default `'centre'`)
### Examples
```javascript ```javascript
sharp('input.gif') sharp(input)
.resize(200, 300) .resize(200, 200, {
.background({r: 0, g: 0, b: 0, alpha: 0}) fit: sharp.fit.inside,
.embed() withoutEnlargement: true
.toFormat(sharp.format.webp) })
.toBuffer(function(err, outputBuffer) {
if (err) {
throw err;
}
// outputBuffer contains WebP image data of a 200 pixels wide and 300 pixels high
// containing a scaled version, embedded on a transparent canvas, of input.gif
});
```
- Throws **[Error][8]** Invalid parameters
Returns **Sharp**
## max
Preserving aspect ratio, resize the image to be as large as possible
while ensuring its dimensions are less than or equal to the `width` and `height` specified.
Both `width` and `height` must be provided via `resize` otherwise the behaviour will default to `crop`.
### Examples
```javascript
sharp(inputBuffer)
.resize(200, 200)
.max()
.toFormat('jpeg') .toFormat('jpeg')
.toBuffer() .toBuffer()
.then(function(outputBuffer) { .then(function(outputBuffer) {
// outputBuffer contains JPEG image data no wider than 200 pixels and no higher // outputBuffer contains JPEG image data
// than 200 pixels regardless of the inputBuffer image dimensions // no wider and no higher than 200 pixels
// and no larger than the input image
}); });
``` ```
Returns **Sharp** - Throws **[Error][12]** Invalid parameters
## min
Preserving aspect ratio, resize the image to be as small as possible
while ensuring its dimensions are greater than or equal to the `width` and `height` specified.
Both `width` and `height` must be provided via `resize` otherwise the behaviour will default to `crop`.
Returns **Sharp** Returns **Sharp**
## ignoreAspectRatio ## extend
Ignoring the aspect ratio of the input, stretch the image to Extends/pads the edges of the image with the provided background colour.
the exact `width` and/or `height` provided via `resize`. This operation will always occur after resizing and extraction, if any.
Returns **Sharp**
## withoutEnlargement
Do not enlarge the output image if the input image width _or_ height are already less than the required dimensions.
This is equivalent to GraphicsMagick's `>` geometry option:
"_change the dimensions of the image only if its width or height exceeds the geometry specification_".
Use with `max()` to preserve the image's aspect ratio.
The default behaviour _before_ function call is `false`, meaning the image will be enlarged.
### Parameters ### Parameters
- `withoutEnlargement` **[Boolean][7]** (optional, default `true`) - `extend` **([Number][7] \| [Object][8])** single pixel count to add to all edges or an Object with per-edge counts
- `extend.top` **[Number][7]?**
- `extend.left` **[Number][7]?**
- `extend.bottom` **[Number][7]?**
- `extend.right` **[Number][7]?**
- `extend.background` **([String][9] \| [Object][8])** background colour, parsed by the [color][10] module, defaults to black without transparency. (optional, default `{r:0,g:0,b:0,alpha:1}`)
### Examples
```javascript
// Resize to 140 pixels wide, then add 10 transparent pixels
// to the top, left and right edges and 20 to the bottom edge
sharp(input)
.resize(140)
.)
.extend({
top: 10,
bottom: 20,
left: 10,
right: 10
background: { r: 0, g: 0, b: 0, alpha: 0 }
})
...
```
- Throws **[Error][12]** Invalid parameters
Returns **Sharp** Returns **Sharp**
[1]: http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation ## extract
[2]: https://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline Extract a region of the image.
[3]: https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel - Use `extract` before `resize` for pre-resize extraction.
- Use `extract` after `resize` for post-resize extraction.
- Use `extract` before and after for both.
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number ### Parameters
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object - `options` **[Object][8]**
- `options.left` **[Number][7]** zero-indexed offset from left edge
- `options.top` **[Number][7]** zero-indexed offset from top edge
- `options.width` **[Number][7]** dimension of extracted image
- `options.height` **[Number][7]** dimension of extracted image
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String ### Examples
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean ```javascript
sharp(input)
.extract({ left: left, top: top, width: width, height: height })
.toFile(output, function(err) {
// Extract a region of the input image, saving in the same format.
});
```
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error ```javascript
sharp(input)
.extract({ left: leftOffsetPre, top: topOffsetPre, width: widthPre, height: heightPre })
.resize(width, height)
.extract({ left: leftOffsetPost, top: topOffsetPost, width: widthPost, height: heightPost })
.toFile(output, function(err) {
// Extract a region, resize, then extract from the resized image
});
```
[9]: https://en.wikipedia.org/wiki/Entropy_%28information_theory%29 - Throws **[Error][12]** Invalid parameters
Returns **Sharp**
## trim
Trim "boring" pixels from all edges that contain values similar to the top-left pixel.
The `info` response Object will contain `trimOffsetLeft` and `trimOffsetTop` properties.
### Parameters
- `threshold` **[Number][7]** the allowed difference from the top-left pixel, a number greater than zero. (optional, default `10`)
- Throws **[Error][12]** Invalid parameters
Returns **Sharp**
[1]: https://developer.mozilla.org/en-US/docs/Web/CSS/object-fit
[2]: https://developer.mozilla.org/en-US/docs/Web/CSS/object-position
[3]: https://en.wikipedia.org/wiki/Entropy_%28information_theory%29
[4]: http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation
[5]: https://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline
[6]: https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[9]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[10]: https://www.npmjs.org/package/color
[11]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[12]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error

View File

@@ -77,23 +77,20 @@ Requires libvips to have been compiled with liborc support.
Improves the performance of `resize`, `blur` and `sharpen` operations Improves the performance of `resize`, `blur` and `sharpen` operations
by taking advantage of the SIMD vector unit of the CPU, e.g. Intel SSE and ARM NEON. by taking advantage of the SIMD vector unit of the CPU, e.g. Intel SSE and ARM NEON.
This feature is currently off by default but future versions may reverse this.
Versions of liborc prior to 0.4.25 are known to segfault under heavy load.
### Parameters ### Parameters
- `simd` **[Boolean][2]** (optional, default `false`) - `simd` **[Boolean][2]** (optional, default `true`)
### Examples ### Examples
```javascript ```javascript
const simd = sharp.simd(); const simd = sharp.simd();
// simd is `true` if SIMD is currently enabled // simd is `true` if the runtime use of liborc is currently enabled
``` ```
```javascript ```javascript
const simd = sharp.simd(true); const simd = sharp.simd(false);
// attempts to enable the use of SIMD, returning true if available // prevent libvips from using liborc at runtime
``` ```
Returns **[Boolean][2]** Returns **[Boolean][2]**

View File

@@ -1,9 +1,96 @@
# Changelog # Changelog
### v0.21 - "*teeth*"
Requires libvips v8.7.0.
#### v0.21.0 - 4<sup>th</sup> October 2018
* Deprecate the following resize-related functions:
`crop`, `embed`, `ignoreAspectRatio`, `max`, `min` and `withoutEnlargement`.
Access to these is now via options passed to the `resize` function.
For example:
`embed('north')` is now `resize(width, height, { fit: 'contain', position: 'north' })`,
`crop('attention')` is now `resize(width, height, { fit: 'cover', position: 'attention' })`,
`max().withoutEnlargement()` is now `resize(width, height, { fit: 'inside', withoutEnlargement: true })`.
[#1135](https://github.com/lovell/sharp/issues/1135)
* Deprecate the `background` function.
Per-operation `background` options added to `resize`, `extend` and `flatten` operations.
[#1392](https://github.com/lovell/sharp/issues/1392)
* Add `size` to `metadata` response (Stream and Buffer input only).
[#695](https://github.com/lovell/sharp/issues/695)
* Switch from custom trim operation to `vips_find_trim`.
[#914](https://github.com/lovell/sharp/issues/914)
* Add `chromaSubsampling` and `isProgressive` properties to `metadata` response.
[#1186](https://github.com/lovell/sharp/issues/1186)
* Drop Node 4 support.
[#1212](https://github.com/lovell/sharp/issues/1212)
* Enable SIMD convolution by default.
[#1213](https://github.com/lovell/sharp/issues/1213)
* Add experimental prebuilt binaries for musl-based Linux.
[#1379](https://github.com/lovell/sharp/issues/1379)
* Add support for arbitrary rotation angle via vips_rotate.
[#1385](https://github.com/lovell/sharp/pull/1385)
[@freezy](https://github.com/freezy)
### v0.20 - "*prebuild*" ### v0.20 - "*prebuild*"
Requires libvips v8.6.1. Requires libvips v8.6.1.
#### v0.20.8 - 5<sup>th</sup> September 2018
* Avoid race conditions when creating directories during installation.
[#1358](https://github.com/lovell/sharp/pull/1358)
[@ajhool](https://github.com/ajhool)
* Accept floating point values for input density parameter.
[#1362](https://github.com/lovell/sharp/pull/1362)
[@aeirola](https://github.com/aeirola)
#### v0.20.7 - 21<sup>st</sup> August 2018
* Use copy+unlink if rename operation fails during installation.
[#1345](https://github.com/lovell/sharp/issues/1345)
#### v0.20.6 - 20<sup>th</sup> August 2018
* Add removeAlpha operation to remove alpha channel, if any.
[#1248](https://github.com/lovell/sharp/issues/1248)
* Expose mozjpeg quant_table flag.
[#1285](https://github.com/lovell/sharp/pull/1285)
[@rexxars](https://github.com/rexxars)
* Allow full WebP alphaQuality range of 0-100.
[#1290](https://github.com/lovell/sharp/pull/1290)
[@sylvaindumont](https://github.com/sylvaindumont)
* Cache libvips binaries to reduce re-install time.
[#1301](https://github.com/lovell/sharp/issues/1301)
* Ensure vendor platform mismatch throws error at install time.
[#1303](https://github.com/lovell/sharp/issues/1303)
* Improve install time error messages for FreeBSD users.
[#1310](https://github.com/lovell/sharp/issues/1310)
* Ensure extractChannel works with 16-bit images.
[#1330](https://github.com/lovell/sharp/issues/1330)
* Expose depth option for tile-based output.
[#1342](https://github.com/lovell/sharp/pull/1342)
[@alundavies](https://github.com/alundavies)
* Add experimental entropy field to stats response.
#### v0.20.5 - 27<sup>th</sup> June 2018 #### v0.20.5 - 27<sup>th</sup> June 2018
* Expose libjpeg optimize_coding flag. * Expose libjpeg optimize_coding flag.

View File

@@ -13,8 +13,8 @@ Lanczos resampling ensures quality is not sacrificed for speed.
As well as image resizing, operations such as As well as image resizing, operations such as
rotation, extraction, compositing and gamma correction are available. rotation, extraction, compositing and gamma correction are available.
Most 64-bit OS X, Windows and Linux (glibc) systems running Most modern 64-bit OS X, Windows and Linux systems running
Node versions 4, 6, 8 and 10 Node versions 6, 8 and 10
do not require any additional install or runtime dependencies. do not require any additional install or runtime dependencies.
[![Test Coverage](https://coveralls.io/repos/lovell/sharp/badge.png?branch=master)](https://coveralls.io/r/lovell/sharp?branch=master) [![Test Coverage](https://coveralls.io/repos/lovell/sharp/badge.png?branch=master)](https://coveralls.io/r/lovell/sharp?branch=master)
@@ -37,7 +37,7 @@ and [Leaflet](https://github.com/turban/Leaflet.Zoomify).
### Fast ### Fast
This module is powered by the blazingly fast This module is powered by the blazingly fast
[libvips](https://github.com/jcupitt/libvips) image processing library, [libvips](https://github.com/libvips/libvips) image processing library,
originally created in 1989 at Birkbeck College originally created in 1989 at Birkbeck College
and currently maintained by and currently maintained by
[John Cupitt](https://github.com/jcupitt). [John Cupitt](https://github.com/jcupitt).
@@ -113,10 +113,16 @@ the help and code contributions of the following people:
* [Thomas Parisot](https://github.com/oncletom) * [Thomas Parisot](https://github.com/oncletom)
* [Nathan Graves](https://github.com/woolite64) * [Nathan Graves](https://github.com/woolite64)
* [Tom Lokhorst](https://github.com/tomlokhorst) * [Tom Lokhorst](https://github.com/tomlokhorst)
* [Espen Hovlandsdal](https://github.com/rexxars)
* [Sylvain Dumont](https://github.com/sylvaindumont)
* [Alun Davies](https://github.com/alundavies)
* [Aidan Hoolachan](https://github.com/ajhool)
* [Axel Eirola](https://github.com/aeirola)
* [Freezy](https://github.com/freezy)
Thank you! Thank you!
### Licence ### Licensing
Copyright 2013, 2014, 2015, 2016, 2017, 2018 Lovell Fuller and contributors. Copyright 2013, 2014, 2015, 2016, 2017, 2018 Lovell Fuller and contributors.

View File

@@ -15,7 +15,7 @@ yarn add sharp
### Building from source ### Building from source
Pre-compiled binaries for sharp are provided for use with Pre-compiled binaries for sharp are provided for use with
Node versions 4, 6, 8 and 10 on Node versions 6, 8 and 10 on
64-bit Windows, OS X and Linux platforms. 64-bit Windows, OS X and Linux platforms.
Sharp will be built from source at install time when: Sharp will be built from source at install time when:
@@ -27,7 +27,7 @@ Sharp will be built from source at install time when:
Building from source requires: Building from source requires:
* C++11 compatible compiler such as gcc 4.8+, clang 3.0+ or MSVC 2013+ * C++11 compatible compiler such as gcc 4.8+, clang 3.0+ or MSVC 2013+
* [node-gyp](https://github.com/TooTallNate/node-gyp#installation) and its dependencies (includes Python) * [node-gyp](https://github.com/nodejs/node-gyp#installation) and its dependencies (includes Python 2.7)
## libvips ## libvips
@@ -36,13 +36,14 @@ Building from source requires:
[![Ubuntu 16.04 Build Status](https://travis-ci.org/lovell/sharp.png?branch=master)](https://travis-ci.org/lovell/sharp) [![Ubuntu 16.04 Build Status](https://travis-ci.org/lovell/sharp.png?branch=master)](https://travis-ci.org/lovell/sharp)
libvips and its dependencies are fetched and stored within `node_modules/sharp/vendor` during `npm install`. libvips and its dependencies are fetched and stored within `node_modules/sharp/vendor` during `npm install`.
This involves an automated HTTPS download of approximately 7MB. This involves an automated HTTPS download of approximately 8MB.
Most recent Linux-based operating systems with glibc running on x64 and ARMv6+ CPUs should "just work", e.g.: Most Linux-based (glibc, musl) operating systems running on x64 and ARMv6+ CPUs should "just work", e.g.:
* Debian 7+ * Debian 7+
* Ubuntu 14.04+ * Ubuntu 14.04+
* Centos 7+ * Centos 7+
* Alpine 3.8+ (Node 8 and 10)
* Fedora * Fedora
* openSUSE 13.2+ * openSUSE 13.2+
* Archlinux * Archlinux
@@ -61,9 +62,9 @@ and `LD_LIBRARY_PATH` at runtime.
This allows the use of newer versions of libvips with older versions of sharp. This allows the use of newer versions of libvips with older versions of sharp.
For 32-bit Intel CPUs and older Linux-based operating systems such as Centos 6, For 32-bit Intel CPUs and older Linux-based operating systems such as Centos 6,
it is recommended to install a system-wide installation of libvips from source: compiling libvips from source is recommended.
https://jcupitt.github.io/libvips/install.html#building-libvips-from-a-source-tarball [https://libvips.github.io/libvips/install.html#building-libvips-from-a-source-tarball](https://libvips.github.io/libvips/install.html#building-libvips-from-a-source-tarball)
#### Alpine Linux #### Alpine Linux
@@ -71,7 +72,7 @@ libvips is available in the
[testing repository](https://pkgs.alpinelinux.org/packages?name=vips-dev): [testing repository](https://pkgs.alpinelinux.org/packages?name=vips-dev):
```sh ```sh
apk add vips-dev fftw-dev --update-cache --repository https://dl-3.alpinelinux.org/alpine/edge/testing/ apk add vips-dev fftw-dev build-base --update-cache --repository https://dl-3.alpinelinux.org/alpine/edge/testing/
``` ```
The smaller stack size of musl libc means The smaller stack size of musl libc means
@@ -94,7 +95,7 @@ that it can be located using `pkg-config --modversion vips-cpp`.
[![Windows x64 Build Status](https://ci.appveyor.com/api/projects/status/pgtul704nkhhg6sg)](https://ci.appveyor.com/project/lovell/sharp) [![Windows x64 Build Status](https://ci.appveyor.com/api/projects/status/pgtul704nkhhg6sg)](https://ci.appveyor.com/project/lovell/sharp)
libvips and its dependencies are fetched and stored within `node_modules\sharp\vendor` during `npm install`. libvips and its dependencies are fetched and stored within `node_modules\sharp\vendor` during `npm install`.
This involves an automated HTTPS download of approximately 12MB. This involves an automated HTTPS download of approximately 13MB.
Only 64-bit (x64) `node.exe` is supported. Only 64-bit (x64) `node.exe` is supported.
@@ -117,9 +118,6 @@ https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=193528
### Heroku ### Heroku
libvips and its dependencies are fetched and stored within `node_modules\sharp\vendor` during `npm install`.
This involves an automated HTTPS download of approximately 7MB.
Set [NODE_MODULES_CACHE](https://devcenter.heroku.com/articles/nodejs-support#cache-behavior) Set [NODE_MODULES_CACHE](https://devcenter.heroku.com/articles/nodejs-support#cache-behavior)
to `false` when using the `yarn` package manager. to `false` when using the `yarn` package manager.
@@ -154,12 +152,13 @@ can be built using Docker.
```sh ```sh
rm -rf node_modules/sharp rm -rf node_modules/sharp
docker run -v "$PWD":/var/task lambci/lambda:build-nodejs6.10 npm install docker run -v "$PWD":/var/task lambci/lambda:build-nodejs8.10 npm install
``` ```
Set the Lambda runtime to Node.js 6.10. Set the Lambda runtime to Node.js 8.10.
To get the best performance select the largest memory available. A 1536 MB function provides ~12x more CPU time than a 128 MB function. To get the best performance select the largest memory available.
A 1536 MB function provides ~12x more CPU time than a 128 MB function.
### NW.js ### NW.js
@@ -171,7 +170,7 @@ nw-gyp rebuild --arch=x64 --target=[your nw version]
node node_modules/sharp/install/dll-copy node node_modules/sharp/install/dll-copy
``` ```
See also http://docs.nwjs.io/en/latest/For%20Users/Advanced/Use%20Native%20Node%20Modules/ [http://docs.nwjs.io/en/latest/For%20Users/Advanced/Use%20Native%20Node%20Modules/](http://docs.nwjs.io/en/latest/For%20Users/Advanced/Use%20Native%20Node%20Modules/)
### Build tools ### Build tools
@@ -199,28 +198,6 @@ and [Valgrind](http://valgrind.org/) have been used to test
the most popular web-based formats, as well as libvips itself, the most popular web-based formats, as well as libvips itself,
you are advised to perform your own testing and sandboxing. you are advised to perform your own testing and sandboxing.
ImageMagick in particular has a relatively large attack surface,
which can be partially mitigated with a
[policy.xml](http://www.imagemagick.org/script/resources.php)
configuration file to prevent the use of coders known to be vulnerable.
```xml
<policymap>
<policy domain="coder" rights="none" pattern="EPHEMERAL" />
<policy domain="coder" rights="none" pattern="URL" />
<policy domain="coder" rights="none" pattern="HTTPS" />
<policy domain="coder" rights="none" pattern="MVG" />
<policy domain="coder" rights="none" pattern="MSL" />
<policy domain="coder" rights="none" pattern="TEXT" />
<policy domain="coder" rights="none" pattern="SHOW" />
<policy domain="coder" rights="none" pattern="WIN" />
<policy domain="coder" rights="none" pattern="PLT" />
</policymap>
```
Set the `MAGICK_CONFIGURE_PATH` environment variable
to the directory containing the `policy.xml` file.
### Pre-compiled libvips binaries ### Pre-compiled libvips binaries
This module will attempt to download a pre-compiled bundle of libvips This module will attempt to download a pre-compiled bundle of libvips
@@ -236,7 +213,8 @@ SHARP_IGNORE_GLOBAL_LIBVIPS=1 npm install sharp
``` ```
Should you need to manually download and inspect these files, Should you need to manually download and inspect these files,
you can do so via https://github.com/lovell/sharp-libvips/releases you can do so via
[https://github.com/lovell/sharp-libvips/releases](https://github.com/lovell/sharp-libvips/releases)
Should you wish to install these from your own location, Should you wish to install these from your own location,
set the `SHARP_DIST_BASE_URL` environment variable, e.g. set the `SHARP_DIST_BASE_URL` environment variable, e.g.
@@ -265,6 +243,8 @@ Use of libraries under the terms of the LGPLv3 is via the
| expat | MIT Licence | | expat | MIT Licence |
| fontconfig | [fontconfig Licence](https://cgit.freedesktop.org/fontconfig/tree/COPYING) (BSD-like) | | fontconfig | [fontconfig Licence](https://cgit.freedesktop.org/fontconfig/tree/COPYING) (BSD-like) |
| freetype | [freetype Licence](http://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/FTL.TXT) (BSD-like) | | freetype | [freetype Licence](http://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/FTL.TXT) (BSD-like) |
| fribidi | LGPLv3 |
| gettext | LGPLv3 |
| giflib | MIT Licence | | giflib | MIT Licence |
| glib | LGPLv3 | | glib | LGPLv3 |
| harfbuzz | MIT Licence | | harfbuzz | MIT Licence |

View File

@@ -3,19 +3,17 @@
### Test environment ### Test environment
* AWS EC2 eu-west-1 [c5.large](https://aws.amazon.com/ec2/instance-types/c5/) (2x Xeon Platinum 8124M CPU @ 3.00GHz) * AWS EC2 eu-west-1 [c5.large](https://aws.amazon.com/ec2/instance-types/c5/) (2x Xeon Platinum 8124M CPU @ 3.00GHz)
* Ubuntu 17.10 (hvm:ebs-ssd, 20180102, ami-0741d47e) * Ubuntu 18.04 (hvm-ssd/ubuntu-bionic-18.04-amd64-server-20180912 ami-00035f41c82244dab)
* Node.js v8.9.4 * Node.js v10.11.0
### The contenders ### The contenders
* [jimp](https://www.npmjs.com/package/jimp) v0.2.28 - Image processing in pure JavaScript. Bilinear interpolation only. * [jimp](https://www.npmjs.com/package/jimp) v0.5.3 - Image processing in pure JavaScript. Provides bicubic interpolation.
* [pajk-lwip](https://www.npmjs.com/package/pajk-lwip) v0.2.0 (fork) - Wrapper around CImg that compiles dependencies from source. * [mapnik](https://www.npmjs.org/package/mapnik) v4.0.1 - Whilst primarily a map renderer, Mapnik contains bitmap image utilities.
* [mapnik](https://www.npmjs.org/package/mapnik) v3.6.2 - Whilst primarily a map renderer, Mapnik contains bitmap image utilities.
* [imagemagick-native](https://www.npmjs.com/package/imagemagick-native) v1.9.3 - Wrapper around libmagick++, supports Buffers only. * [imagemagick-native](https://www.npmjs.com/package/imagemagick-native) v1.9.3 - Wrapper around libmagick++, supports Buffers only.
* [imagemagick](https://www.npmjs.com/package/imagemagick) v0.1.3 - Supports filesystem only and "*has been unmaintained for a long time*". * [imagemagick](https://www.npmjs.com/package/imagemagick) v0.1.3 - Supports filesystem only and "*has been unmaintained for a long time*".
* [gm](https://www.npmjs.com/package/gm) v1.23.1 - Fully featured wrapper around GraphicsMagick's `gm` command line utility. * [gm](https://www.npmjs.com/package/gm) v1.23.1 - Fully featured wrapper around GraphicsMagick's `gm` command line utility.
* [images](https://www.npmjs.com/package/images) v3.0.1 - Compiles dependencies from source. Provides bicubic interpolation. * sharp v0.21.0 / libvips v8.7.0 - Caching within libvips disabled to ensure a fair comparison.
* sharp v0.19.0 / libvips v8.6.1 - Caching within libvips disabled to ensure a fair comparison.
### The task ### The task
@@ -27,19 +25,14 @@ then compress to JPEG at a "quality" setting of 80.
| Module | Input | Output | Ops/sec | Speed-up | | Module | Input | Output | Ops/sec | Speed-up |
| :----------------- | :----- | :----- | ------: | -------: | | :----------------- | :----- | :----- | ------: | -------: |
| jimp (bilinear) | buffer | buffer | 1.14 | 1.0 | | jimp | buffer | buffer | 0.71 | 1.0 |
| lwip | buffer | buffer | 1.86 | 1.6 | | mapnik | buffer | buffer | 3.32 | 4.7 |
| mapnik | buffer | buffer | 3.34 | 2.9 | | gm | buffer | buffer | 3.97 | 5.6 |
| imagemagick-native | buffer | buffer | 4.13 | 3.6 | | imagemagick-native | buffer | buffer | 4.06 | 5.7 |
| gm | buffer | buffer | 4.21 | 3.7 | | imagemagick | file | file | 4.24 | 6.0 |
| gm | file | file | 4.27 | 3.7 | | sharp | stream | stream | 25.30 | 35.6 |
| imagemagick | file | file | 4.67 | 4.1 | | sharp | file | file | 26.17 | 36.9 |
| images (bicubic) | file | file | 6.22 | 5.5 | | sharp | buffer | buffer | 26.45 | 37.3 |
| sharp | stream | stream | 24.43 | 21.4 |
| sharp | file | file | 25.97 | 22.7 |
| sharp | file | buffer | 26.00 | 22.8 |
| sharp | buffer | file | 26.33 | 23.0 |
| sharp | buffer | buffer | 26.43 | 23.1 |
Greater libvips performance can be expected with caching enabled (default) Greater libvips performance can be expected with caching enabled (default)
and using 8+ core machines, especially those with larger L1/L2 CPU caches. and using 8+ core machines, especially those with larger L1/L2 CPU caches.
@@ -57,7 +50,7 @@ brew install mapnik
``` ```
```sh ```sh
sudo apt-get install imagemagick libmagick++-dev graphicsmagick mapnik-dev sudo apt-get install imagemagick libmagick++-dev graphicsmagick libmapnik-dev
``` ```
```sh ```sh

View File

@@ -4,6 +4,7 @@ const fs = require('fs');
const path = require('path'); const path = require('path');
const copyFileSync = require('fs-copy-file-sync'); const copyFileSync = require('fs-copy-file-sync');
const libvips = require('../lib/libvips');
const npmLog = require('npmlog'); const npmLog = require('npmlog');
if (process.platform === 'win32') { if (process.platform === 'win32') {
@@ -11,8 +12,8 @@ if (process.platform === 'win32') {
const buildReleaseDir = path.join(buildDir, 'Release'); const buildReleaseDir = path.join(buildDir, 'Release');
npmLog.info('sharp', `Creating ${buildReleaseDir}`); npmLog.info('sharp', `Creating ${buildReleaseDir}`);
try { try {
fs.mkdirSync(buildDir); libvips.mkdirSync(buildDir);
fs.mkdirSync(buildReleaseDir); libvips.mkdirSync(buildReleaseDir);
} catch (err) {} } catch (err) {}
const vendorLibDir = path.join(__dirname, '..', 'vendor', 'lib'); const vendorLibDir = path.join(__dirname, '..', 'vendor', 'lib');
npmLog.info('sharp', `Copying DLLs from ${vendorLibDir} to ${buildReleaseDir}`); npmLog.info('sharp', `Copying DLLs from ${vendorLibDir} to ${buildReleaseDir}`);

View File

@@ -9,6 +9,7 @@ const npmLog = require('npmlog');
const semver = require('semver'); const semver = require('semver');
const simpleGet = require('simple-get'); const simpleGet = require('simple-get');
const tar = require('tar'); const tar = require('tar');
const copyFileSync = require('fs-copy-file-sync');
const agent = require('../lib/agent'); const agent = require('../lib/agent');
const libvips = require('../lib/libvips'); const libvips = require('../lib/libvips');
@@ -17,6 +18,24 @@ const platform = require('../lib/platform');
const minimumLibvipsVersion = libvips.minimumLibvipsVersion; const minimumLibvipsVersion = libvips.minimumLibvipsVersion;
const distBaseUrl = process.env.SHARP_DIST_BASE_URL || `https://github.com/lovell/sharp-libvips/releases/download/v${minimumLibvipsVersion}/`; const distBaseUrl = process.env.SHARP_DIST_BASE_URL || `https://github.com/lovell/sharp-libvips/releases/download/v${minimumLibvipsVersion}/`;
const fail = function (err) {
npmLog.error('sharp', err.message);
npmLog.error('sharp', 'Please see http://sharp.pixelplumbing.com/page/install');
process.exit(1);
};
const extractTarball = function (tarPath) {
const vendorPath = path.join(__dirname, '..', 'vendor');
libvips.mkdirSync(vendorPath);
tar
.extract({
file: tarPath,
cwd: vendorPath,
strict: true
})
.catch(fail);
};
try { try {
const useGlobalLibvips = libvips.useGlobalLibvips(); const useGlobalLibvips = libvips.useGlobalLibvips();
if (useGlobalLibvips) { if (useGlobalLibvips) {
@@ -29,54 +48,54 @@ try {
} else { } else {
// Is this arch/platform supported? // Is this arch/platform supported?
const arch = process.env.npm_config_arch || process.arch; const arch = process.env.npm_config_arch || process.arch;
if (platform() === 'win32-ia32') { const platformAndArch = platform();
if (platformAndArch === 'win32-ia32') {
throw new Error('Windows x86 (32-bit) node.exe is not supported'); throw new Error('Windows x86 (32-bit) node.exe is not supported');
} }
if (arch === 'ia32') { if (arch === 'ia32') {
throw new Error(`Intel Architecture 32-bit systems require manual installation of libvips >= ${minimumLibvipsVersion}\n`); throw new Error(`Intel Architecture 32-bit systems require manual installation of libvips >= ${minimumLibvipsVersion}`);
} }
if (detectLibc.isNonGlibcLinux) { if (platformAndArch === 'freebsd-x64') {
throw new Error(`Use with ${detectLibc.family} libc requires manual installation of libvips >= ${minimumLibvipsVersion}`); throw new Error(`FreeBSD systems require manual installation of libvips >= ${minimumLibvipsVersion}`);
} }
if (detectLibc.family === detectLibc.GLIBC && detectLibc.version && semver.lt(`${detectLibc.version}.0`, '2.13.0')) { if (detectLibc.family === detectLibc.GLIBC && detectLibc.version && semver.lt(`${detectLibc.version}.0`, '2.13.0')) {
throw new Error(`Use with glibc version ${detectLibc.version} requires manual installation of libvips >= ${minimumLibvipsVersion}`); throw new Error(`Use with glibc version ${detectLibc.version} requires manual installation of libvips >= ${minimumLibvipsVersion}`);
} }
// Download to per-process temporary file // Download to per-process temporary file
const tarFilename = ['libvips', minimumLibvipsVersion, platform()].join('-') + '.tar.gz'; const tarFilename = ['libvips', minimumLibvipsVersion, platformAndArch].join('-') + '.tar.gz';
const tarPathTemp = path.join(os.tmpdir(), `${process.pid}-${tarFilename}`); const tarPathCache = path.join(libvips.cachePath(), tarFilename);
const tmpFile = fs.createWriteStream(tarPathTemp); if (fs.existsSync(tarPathCache)) {
const url = distBaseUrl + tarFilename; npmLog.info('sharp', `Using cached ${tarPathCache}`);
npmLog.info('sharp', `Downloading ${url}`); extractTarball(tarPathCache);
simpleGet({ url: url, agent: agent() }, function (err, response) { } else {
if (err) { const tarPathTemp = path.join(os.tmpdir(), `${process.pid}-${tarFilename}`);
throw err; const tmpFile = fs.createWriteStream(tarPathTemp);
} const url = distBaseUrl + tarFilename;
if (response.statusCode !== 200) { npmLog.info('sharp', `Downloading ${url}`);
throw new Error(`Status ${response.statusCode}`); simpleGet({ url: url, agent: agent() }, function (err, response) {
} if (err) {
response.pipe(tmpFile);
});
tmpFile.on('close', function () {
const vendorPath = path.join(__dirname, '..', 'vendor');
fs.mkdirSync(vendorPath);
tar
.extract({
file: tarPathTemp,
cwd: vendorPath,
strict: true
})
.then(function () {
try {
fs.unlinkSync(tarPathTemp);
} catch (err) {}
})
.catch(function (err) {
throw err; throw err;
}
if (response.statusCode !== 200) {
throw new Error(`Status ${response.statusCode}`);
}
response.pipe(tmpFile);
});
tmpFile
.on('error', fail)
.on('close', function () {
try {
// Attempt to rename
fs.renameSync(tarPathTemp, tarPathCache);
} catch (err) {
// Fall back to copy and unlink
copyFileSync(tarPathTemp, tarPathCache);
fs.unlinkSync(tarPathTemp);
}
extractTarball(tarPathCache);
}); });
}); }
} }
} catch (err) { } catch (err) {
npmLog.error('sharp', err.message); fail(err);
npmLog.error('sharp', 'Please see http://sharp.pixelplumbing.com/page/install');
process.exit(1);
} }

View File

@@ -12,6 +12,23 @@ const bool = {
eor: 'eor' eor: 'eor'
}; };
/**
* Remove alpha channel, if any. This is a no-op if the image does not have an alpha channel.
*
* @example
* sharp('rgba.png')
* .removeAlpha()
* .toFile('rgb.png', function(err, info) {
* // rgb.png is a 3 channel image without an alpha channel
* });
*
* @returns {Sharp}
*/
function removeAlpha () {
this.options.removeAlpha = true;
return this;
}
/** /**
* Extract a single channel from a multi-channel image. * Extract a single channel from a multi-channel image.
* *
@@ -102,6 +119,7 @@ function bandbool (boolOp) {
module.exports = function (Sharp) { module.exports = function (Sharp) {
// Public instance functions // Public instance functions
[ [
removeAlpha,
extractChannel, extractChannel,
joinChannel, joinChannel,
bandbool bandbool

View File

@@ -1,5 +1,7 @@
'use strict'; 'use strict';
const deprecate = require('util').deprecate;
const color = require('color'); const color = require('color');
const is = require('./is'); const is = require('./is');
@@ -16,25 +18,20 @@ const colourspace = {
}; };
/** /**
* Set the background for the `embed`, `flatten` and `extend` operations. * @deprecated
* The default background is `{r: 0, g: 0, b: 0, alpha: 1}`, black without transparency. * @private
*
* Delegates to the _color_ module, which can throw an Error
* but is liberal in what it accepts, clipping values to sensible min/max.
* The alpha value is a float between `0` (transparent) and `1` (opaque).
*
* @param {String|Object} rgba - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @returns {Sharp}
* @throws {Error} Invalid parameter
*/ */
function background (rgba) { function background (rgba) {
const colour = color(rgba); const colour = color(rgba);
this.options.background = [ const background = [
colour.red(), colour.red(),
colour.green(), colour.green(),
colour.blue(), colour.blue(),
Math.round(colour.alpha() * 255) Math.round(colour.alpha() * 255)
]; ];
this.options.resizeBackground = background;
this.options.extendBackground = background;
this.options.flattenBackground = background.slice(0, 3);
return this; return this;
} }
@@ -80,7 +77,7 @@ function grayscale (grayscale) {
/** /**
* Set the output colourspace. * Set the output colourspace.
* By default output image will be web-friendly sRGB, with additional channels interpreted as alpha channels. * By default output image will be web-friendly sRGB, with additional channels interpreted as alpha channels.
* @param {String} [colourspace] - output colourspace e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L568) * @param {String} [colourspace] - output colourspace e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...](https://github.com/libvips/libvips/blob/master/libvips/iofuncs/enumtypes.c#L568)
* @returns {Sharp} * @returns {Sharp}
* @throws {Error} Invalid parameters * @throws {Error} Invalid parameters
*/ */
@@ -102,23 +99,45 @@ function toColorspace (colorspace) {
return this.toColourspace(colorspace); return this.toColourspace(colorspace);
} }
/**
* Update a colour attribute of the this.options Object.
* @private
* @param {String} key
* @param {String|Object} val
* @throws {Error} Invalid key
*/
function _setColourOption (key, val) {
if (is.object(val) || is.string(val)) {
const colour = color(val);
this.options[key] = [
colour.red(),
colour.green(),
colour.blue(),
Math.round(colour.alpha() * 255)
];
}
}
/** /**
* Decorate the Sharp prototype with colour-related functions. * Decorate the Sharp prototype with colour-related functions.
* @private * @private
*/ */
module.exports = function (Sharp) { module.exports = function (Sharp) {
// Public instance functions
[ [
background, // Public
tint, tint,
greyscale, greyscale,
grayscale, grayscale,
toColourspace, toColourspace,
toColorspace toColorspace,
// Private
_setColourOption
].forEach(function (f) { ].forEach(function (f) {
Sharp.prototype[f.name] = f; Sharp.prototype[f.name] = f;
}); });
// Class attributes // Class attributes
Sharp.colourspace = colourspace; Sharp.colourspace = colourspace;
Sharp.colorspace = colourspace; Sharp.colorspace = colourspace;
// Deprecated
Sharp.prototype.background = deprecate(background, 'background(background) is deprecated, use resize({ background }), extend({ background }) or flatten({ background }) instead');
}; };

View File

@@ -19,8 +19,7 @@ const is = require('./is');
* .overlayWith('overlay.png', { gravity: sharp.gravity.southeast } ) * .overlayWith('overlay.png', { gravity: sharp.gravity.southeast } )
* .sharpen() * .sharpen()
* .withMetadata() * .withMetadata()
* .quality(90) * .webp( { quality: 90 } )
* .webp()
* .toBuffer() * .toBuffer()
* .then(function(outputBuffer) { * .then(function(outputBuffer) {
* // outputBuffer contains upside down, 300px wide, alpha channel flattened * // outputBuffer contains upside down, 300px wide, alpha channel flattened
@@ -35,7 +34,7 @@ const is = require('./is');
* @param {Number} [options.left] - the pixel offset from the left edge. * @param {Number} [options.left] - the pixel offset from the left edge.
* @param {Boolean} [options.tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`. * @param {Boolean} [options.tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`.
* @param {Boolean} [options.cutout=false] - set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another. * @param {Boolean} [options.cutout=false] - set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another.
* @param {Number} [options.density=72] - integral number representing the DPI for vector overlay image. * @param {Number} [options.density=72] - number representing the DPI for vector overlay image.
* @param {Object} [options.raw] - describes overlay when using raw pixel data. * @param {Object} [options.raw] - describes overlay when using raw pixel data.
* @param {Number} [options.raw.width] * @param {Number} [options.raw.width]
* @param {Number} [options.raw.height] * @param {Number} [options.raw.height]

View File

@@ -4,43 +4,11 @@ const path = require('path');
const util = require('util'); const util = require('util');
const stream = require('stream'); const stream = require('stream');
const events = require('events'); const events = require('events');
const semver = require('semver');
const is = require('./is'); const is = require('./is');
const platform = require('./platform');
require('./libvips').hasVendoredLibvips();
const sharp = require('../build/Release/sharp.node'); const sharp = require('../build/Release/sharp.node');
// Vendor platform
(function () {
let vendorPlatformId;
try {
vendorPlatformId = require('../vendor/platform.json');
} catch (err) {
return;
}
const currentPlatformId = platform();
/* istanbul ignore if */
if (currentPlatformId !== vendorPlatformId) {
throw new Error(`'${vendorPlatformId}' binaries cannot be used on the '${currentPlatformId}' platform. Please remove the 'node_modules/sharp/vendor' directory and run 'npm rebuild'.`);
}
})();
// Versioning
let versions = {
vips: sharp.libvipsVersion()
};
(function () {
// Does libvips meet minimum requirement?
const libvipsVersionMin = require('../package.json').config.libvips;
/* istanbul ignore if */
if (semver.lt(versions.vips, libvipsVersionMin)) {
throw new Error('Found libvips ' + versions.vips + ' but require at least ' + libvipsVersionMin);
}
// Include versions of dependencies, if present
try {
versions = require('../vendor/versions.json');
} catch (err) {}
})();
// Use NODE_DEBUG=sharp to enable libvips warnings // Use NODE_DEBUG=sharp to enable libvips warnings
const debuglog = util.debuglog('sharp'); const debuglog = util.debuglog('sharp');
@@ -81,7 +49,7 @@ const debuglog = util.debuglog('sharp');
* width: 300, * width: 300,
* height: 200, * height: 200,
* channels: 4, * channels: 4,
* background: { r: 255, g: 0, b: 0, alpha: 128 } * background: { r: 255, g: 0, b: 0, alpha: 0.5 }
* } * }
* }) * })
* .png() * .png()
@@ -96,7 +64,7 @@ const debuglog = util.debuglog('sharp');
* @param {Boolean} [options.failOnError=false] - by default apply a "best effort" * @param {Boolean} [options.failOnError=false] - by default apply a "best effort"
* to decode images, even if the data is corrupt or invalid. Set this flag to true * to decode images, even if the data is corrupt or invalid. Set this flag to true
* if you'd rather halt processing and raise an error when loading invalid images. * if you'd rather halt processing and raise an error when loading invalid images.
* @param {Number} [options.density=72] - integral number representing the DPI for vector images. * @param {Number} [options.density=72] - number representing the DPI for vector images.
* @param {Number} [options.page=0] - page number to extract for multi-page input (GIF, TIFF) * @param {Number} [options.page=0] - page number to extract for multi-page input (GIF, TIFF)
* @param {Object} [options.raw] - describes raw pixel input image data. See `raw()` for pixel ordering. * @param {Object} [options.raw] - describes raw pixel input image data. See `raw()` for pixel ordering.
* @param {Number} [options.raw.width] * @param {Number} [options.raw.width]
@@ -136,10 +104,12 @@ const Sharp = function (input, options) {
width: -1, width: -1,
height: -1, height: -1,
canvas: 'crop', canvas: 'crop',
crop: 0, position: 0,
embed: 0, resizeBackground: [0, 0, 0, 255],
useExifOrientation: false, useExifOrientation: false,
angle: 0, angle: 0,
rotationAngle: 0,
rotationBackground: [0, 0, 0, 255],
rotateBeforePreExtract: false, rotateBeforePreExtract: false,
flip: false, flip: false,
flop: false, flop: false,
@@ -147,14 +117,15 @@ const Sharp = function (input, options) {
extendBottom: 0, extendBottom: 0,
extendLeft: 0, extendLeft: 0,
extendRight: 0, extendRight: 0,
extendBackground: [0, 0, 0, 255],
withoutEnlargement: false, withoutEnlargement: false,
kernel: 'lanczos3', kernel: 'lanczos3',
fastShrinkOnLoad: true, fastShrinkOnLoad: true,
// operations // operations
background: [0, 0, 0, 255],
tintA: 128, tintA: 128,
tintB: 128, tintB: 128,
flatten: false, flatten: false,
flattenBackground: [0, 0, 0],
negate: false, negate: false,
medianSize: 0, medianSize: 0,
blurSigma: 0, blurSigma: 0,
@@ -163,7 +134,7 @@ const Sharp = function (input, options) {
sharpenJagged: 2, sharpenJagged: 2,
threshold: 0, threshold: 0,
thresholdGrayscale: true, thresholdGrayscale: true,
trimTolerance: 0, trimThreshold: 0,
gamma: 0, gamma: 0,
greyscale: false, greyscale: false,
normalise: 0, normalise: 0,
@@ -171,6 +142,7 @@ const Sharp = function (input, options) {
booleanFileIn: '', booleanFileIn: '',
joinChannelIn: [], joinChannelIn: [],
extractChannel: -1, extractChannel: -1,
removeAlpha: false,
colourspace: 'srgb', colourspace: 'srgb',
// overlay // overlay
overlayGravity: 0, overlayGravity: 0,
@@ -193,6 +165,7 @@ const Sharp = function (input, options) {
jpegOvershootDeringing: false, jpegOvershootDeringing: false,
jpegOptimiseScans: false, jpegOptimiseScans: false,
jpegOptimiseCoding: true, jpegOptimiseCoding: true,
jpegQuantisationTable: 0,
pngProgressive: false, pngProgressive: false,
pngCompressionLevel: 9, pngCompressionLevel: 9,
pngAdaptiveFiltering: false, pngAdaptiveFiltering: false,
@@ -249,7 +222,12 @@ Sharp.format = sharp.format();
* @example * @example
* console.log(sharp.versions); * console.log(sharp.versions);
*/ */
Sharp.versions = versions; Sharp.versions = {
vips: sharp.libvipsVersion()
};
try {
Sharp.versions = require('../vendor/versions.json');
} catch (err) {}
/** /**
* Export constructor. * Export constructor.

View File

@@ -36,7 +36,7 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
} }
// Density // Density
if (is.defined(inputOptions.density)) { if (is.defined(inputOptions.density)) {
if (is.integer(inputOptions.density) && is.inRange(inputOptions.density, 1, 2400)) { if (is.inRange(inputOptions.density, 1, 2400)) {
inputDescriptor.density = inputOptions.density; inputDescriptor.density = inputOptions.density;
} else { } else {
throw new Error('Invalid density (1 to 2400) ' + inputOptions.density); throw new Error('Invalid density (1 to 2400) ' + inputOptions.density);
@@ -177,12 +177,15 @@ function clone () {
* A Promises/A+ promise is returned when `callback` is not provided. * A Promises/A+ promise is returned when `callback` is not provided.
* *
* - `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg` * - `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg`
* - `size`: Total size of image in bytes, for Stream and Buffer input only
* - `width`: Number of pixels wide (EXIF orientation is not taken into consideration) * - `width`: Number of pixels wide (EXIF orientation is not taken into consideration)
* - `height`: Number of pixels high (EXIF orientation is not taken into consideration) * - `height`: Number of pixels high (EXIF orientation is not taken into consideration)
* - `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L636) * - `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...](https://github.com/libvips/libvips/blob/master/libvips/iofuncs/enumtypes.c#L636)
* - `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK * - `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
* - `depth`: Name of pixel depth format e.g. `uchar`, `char`, `ushort`, `float` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L672) * - `depth`: Name of pixel depth format e.g. `uchar`, `char`, `ushort`, `float` [...](https://github.com/libvips/libvips/blob/master/libvips/iofuncs/enumtypes.c#L672)
* - `density`: Number of pixels per inch (DPI), if present * - `density`: Number of pixels per inch (DPI), if present
* - `chromaSubsampling`: String containing JPEG chroma subsampling, `4:2:0` or `4:4:4` for RGB, `4:2:0:4` or `4:4:4:4` for CMYK
* - `isProgressive`: Boolean indicating whether the image is interlaced using a progressive scan
* - `hasProfile`: Boolean indicating the presence of an embedded ICC profile * - `hasProfile`: Boolean indicating the presence of an embedded ICC profile
* - `hasAlpha`: Boolean indicating the presence of an alpha transparency channel * - `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
* - `orientation`: Number value of the EXIF Orientation header, if present * - `orientation`: Number value of the EXIF Orientation header, if present
@@ -264,6 +267,7 @@ function metadata (callback) {
* - `maxX` (x-coordinate of one of the pixel where the maximum lies) * - `maxX` (x-coordinate of one of the pixel where the maximum lies)
* - `maxY` (y-coordinate of one of the pixel where the maximum lies) * - `maxY` (y-coordinate of one of the pixel where the maximum lies)
* - `isOpaque`: Value to identify if the image is opaque or transparent, based on the presence and use of alpha channel * - `isOpaque`: Value to identify if the image is opaque or transparent, based on the presence and use of alpha channel
* - `entropy`: Histogram-based estimation of greyscale entropy, discarding alpha channel if any (experimental)
* *
* @example * @example
* const image = sharp(inputJpg); * const image = sharp(inputJpg);

View File

@@ -1,17 +1,38 @@
'use strict'; 'use strict';
const fs = require('fs');
const os = require('os');
const path = require('path'); const path = require('path');
const spawnSync = require('child_process').spawnSync; const spawnSync = require('child_process').spawnSync;
const semver = require('semver'); const semver = require('semver');
const platform = require('./platform'); const platform = require('./platform');
const minimumLibvipsVersion = process.env.npm_package_config_libvips || require('../package.json').config.libvips; const env = process.env;
const minimumLibvipsVersion = env.npm_package_config_libvips || require('../package.json').config.libvips;
const spawnSyncOptions = { const spawnSyncOptions = {
encoding: 'utf8', encoding: 'utf8',
shell: true shell: true
}; };
const mkdirSync = function (dirPath) {
try {
fs.mkdirSync(dirPath);
} catch (err) {
if (err.code !== 'EEXIST') {
throw err;
}
}
};
const cachePath = function () {
const npmCachePath = env.npm_config_cache || (env.APPDATA ? path.join(env.APPDATA, 'npm-cache') : path.join(os.homedir(), '.npm'));
mkdirSync(npmCachePath);
const libvipsCachePath = path.join(npmCachePath, '_libvips');
mkdirSync(libvipsCachePath);
return libvipsCachePath;
};
const globalLibvipsVersion = function () { const globalLibvipsVersion = function () {
if (process.platform !== 'win32') { if (process.platform !== 'win32') {
const globalLibvipsVersion = spawnSync(`PKG_CONFIG_PATH="${pkgConfigPath()}" pkg-config --modversion vips-cpp`, spawnSyncOptions).stdout || ''; const globalLibvipsVersion = spawnSync(`PKG_CONFIG_PATH="${pkgConfigPath()}" pkg-config --modversion vips-cpp`, spawnSyncOptions).stdout || '';
@@ -23,21 +44,30 @@ const globalLibvipsVersion = function () {
const hasVendoredLibvips = function () { const hasVendoredLibvips = function () {
const currentPlatformId = platform(); const currentPlatformId = platform();
const vendorPath = path.join(__dirname, '..', 'vendor');
let vendorVersionId;
let vendorPlatformId;
try { try {
const vendorPlatformId = require(path.join(__dirname, '..', 'vendor', 'platform.json')); vendorVersionId = require(path.join(vendorPath, 'versions.json')).vips;
vendorPlatformId = require(path.join(vendorPath, 'platform.json'));
} catch (err) {}
if (vendorVersionId && vendorVersionId !== minimumLibvipsVersion) {
throw new Error(`Found vendored libvips v${vendorVersionId} but require v${minimumLibvipsVersion}. Please remove the 'node_modules/sharp/vendor' directory and run 'npm install'.`);
}
if (vendorPlatformId) {
if (currentPlatformId === vendorPlatformId) { if (currentPlatformId === vendorPlatformId) {
return true; return true;
} else { } else {
throw new Error(`'${vendorPlatformId}' binaries cannot be used on the '${currentPlatformId}' platform. Please remove the 'node_modules/sharp/vendor' directory and run 'npm install'.`); throw new Error(`'${vendorPlatformId}' binaries cannot be used on the '${currentPlatformId}' platform. Please remove the 'node_modules/sharp/vendor' directory and run 'npm install'.`);
} }
} catch (err) {} }
return false; return false;
}; };
const pkgConfigPath = function () { const pkgConfigPath = function () {
if (process.platform !== 'win32') { if (process.platform !== 'win32') {
const brewPkgConfigPath = spawnSync('which brew >/dev/null 2>&1 && eval $(brew --env) && echo $PKG_CONFIG_LIBDIR', spawnSyncOptions).stdout || ''; const brewPkgConfigPath = spawnSync('which brew >/dev/null 2>&1 && eval $(brew --env) && echo $PKG_CONFIG_LIBDIR', spawnSyncOptions).stdout || '';
return [brewPkgConfigPath.trim(), process.env.PKG_CONFIG_PATH, '/usr/local/lib/pkgconfig', '/usr/lib/pkgconfig'] return [brewPkgConfigPath.trim(), env.PKG_CONFIG_PATH, '/usr/local/lib/pkgconfig', '/usr/lib/pkgconfig']
.filter(function (p) { return !!p; }) .filter(function (p) { return !!p; })
.join(':'); .join(':');
} else { } else {
@@ -46,7 +76,7 @@ const pkgConfigPath = function () {
}; };
const useGlobalLibvips = function () { const useGlobalLibvips = function () {
if (Boolean(process.env.SHARP_IGNORE_GLOBAL_LIBVIPS) === true) { if (Boolean(env.SHARP_IGNORE_GLOBAL_LIBVIPS) === true) {
return false; return false;
} }
@@ -56,8 +86,10 @@ const useGlobalLibvips = function () {
module.exports = { module.exports = {
minimumLibvipsVersion: minimumLibvipsVersion, minimumLibvipsVersion: minimumLibvipsVersion,
cachePath: cachePath,
globalLibvipsVersion: globalLibvipsVersion, globalLibvipsVersion: globalLibvipsVersion,
hasVendoredLibvips: hasVendoredLibvips, hasVendoredLibvips: hasVendoredLibvips,
pkgConfigPath: pkgConfigPath, pkgConfigPath: pkgConfigPath,
useGlobalLibvips: useGlobalLibvips useGlobalLibvips: useGlobalLibvips,
mkdirSync: mkdirSync
}; };

View File

@@ -1,14 +1,18 @@
'use strict'; 'use strict';
const color = require('color');
const is = require('./is'); const is = require('./is');
/** /**
* Rotate the output image by either an explicit angle * Rotate the output image by either an explicit angle
* or auto-orient based on the EXIF `Orientation` tag. * or auto-orient based on the EXIF `Orientation` tag.
* *
* If an angle is provided, it is converted to a valid 90/180/270deg rotation. * If an angle is provided, it is converted to a valid positive degree rotation.
* For example, `-450` will produce a 270deg rotation. * For example, `-450` will produce a 270deg rotation.
* *
* When rotating by an angle other than a multiple of 90,
* the background colour can be provided with the `background` option.
*
* If no angle is provided, it is determined from the EXIF data. * If no angle is provided, it is determined from the EXIF data.
* Mirroring is supported and may infer the use of a flip operation. * Mirroring is supported and may infer the use of a flip operation.
* *
@@ -28,64 +32,30 @@ const is = require('./is');
* }); * });
* readableStream.pipe(pipeline); * readableStream.pipe(pipeline);
* *
* @param {Number} [angle=auto] angle of rotation, must be a multiple of 90. * @param {Number} [angle=auto] angle of rotation.
* @param {Object} [options] - if present, is an Object with optional attributes.
* @param {String|Object} [options.background="#000000"] parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @returns {Sharp} * @returns {Sharp}
* @throws {Error} Invalid parameters * @throws {Error} Invalid parameters
*/ */
function rotate (angle) { function rotate (angle, options) {
if (!is.defined(angle)) { if (!is.defined(angle)) {
this.options.useExifOrientation = true; this.options.useExifOrientation = true;
} else if (is.integer(angle) && !(angle % 90)) { } else if (is.integer(angle) && !(angle % 90)) {
this.options.angle = angle; this.options.angle = angle;
} else { } else if (is.number(angle)) {
throw new Error('Unsupported angle: angle must be a positive/negative multiple of 90 ' + angle); this.options.rotationAngle = angle;
} if (is.object(options) && options.background) {
return this; const backgroundColour = color(options.background);
} this.options.rotationBackground = [
backgroundColour.red(),
/** backgroundColour.green(),
* Extract a region of the image. backgroundColour.blue(),
* Math.round(backgroundColour.alpha() * 255)
* - Use `extract` before `resize` for pre-resize extraction. ];
* - Use `extract` after `resize` for post-resize extraction.
* - Use `extract` before and after for both.
*
* @example
* sharp(input)
* .extract({ left: left, top: top, width: width, height: height })
* .toFile(output, function(err) {
* // Extract a region of the input image, saving in the same format.
* });
* @example
* sharp(input)
* .extract({ left: leftOffsetPre, top: topOffsetPre, width: widthPre, height: heightPre })
* .resize(width, height)
* .extract({ left: leftOffsetPost, top: topOffsetPost, width: widthPost, height: heightPost })
* .toFile(output, function(err) {
* // Extract a region, resize, then extract from the resized image
* });
*
* @param {Object} options
* @param {Number} options.left - zero-indexed offset from left edge
* @param {Number} options.top - zero-indexed offset from top edge
* @param {Number} options.width - dimension of extracted image
* @param {Number} options.height - dimension of extracted image
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function extract (options) {
const suffix = this.options.width === -1 && this.options.height === -1 ? 'Pre' : 'Post';
['left', 'top', 'width', 'height'].forEach(function (name) {
const value = options[name];
if (is.integer(value) && value >= 0) {
this.options[name + (name === 'left' || name === 'top' ? 'Offset' : '') + suffix] = value;
} else {
throw new Error('Non-integer value for ' + name + ' of ' + value);
} }
}, this); } else {
// Ensure existing rotation occurs before pre-resize extraction throw new Error('Unsupported angle: must be a number.');
if (suffix === 'Pre' && ((this.options.angle % 360) !== 0 || this.options.useExifOrientation === true)) {
this.options.rotateBeforePreExtract = true;
} }
return this; return this;
} }
@@ -201,72 +171,14 @@ function blur (sigma) {
} }
/** /**
* Extends/pads the edges of the image with the colour provided to the `background` method. * Merge alpha transparency channel, if any, with a background.
* This operation will always occur after resizing and extraction, if any. * @param {String|Object} [options.background={r: 0, g: 0, b: 0}] - background colour, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black.
*
* @example
* // Resize to 140 pixels wide, then add 10 transparent pixels
* // to the top, left and right edges and 20 to the bottom edge
* sharp(input)
* .resize(140)
* .background({r: 0, g: 0, b: 0, alpha: 0})
* .extend({top: 10, bottom: 20, left: 10, right: 10})
* ...
*
* @param {(Number|Object)} extend - single pixel count to add to all edges or an Object with per-edge counts
* @param {Number} [extend.top]
* @param {Number} [extend.left]
* @param {Number} [extend.bottom]
* @param {Number} [extend.right]
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function extend (extend) {
if (is.integer(extend) && extend > 0) {
this.options.extendTop = extend;
this.options.extendBottom = extend;
this.options.extendLeft = extend;
this.options.extendRight = extend;
} else if (
is.object(extend) &&
is.integer(extend.top) && extend.top >= 0 &&
is.integer(extend.bottom) && extend.bottom >= 0 &&
is.integer(extend.left) && extend.left >= 0 &&
is.integer(extend.right) && extend.right >= 0
) {
this.options.extendTop = extend.top;
this.options.extendBottom = extend.bottom;
this.options.extendLeft = extend.left;
this.options.extendRight = extend.right;
} else {
throw new Error('Invalid edge extension ' + extend);
}
return this;
}
/**
* Merge alpha transparency channel, if any, with `background`.
* @param {Boolean} [flatten=true]
* @returns {Sharp} * @returns {Sharp}
*/ */
function flatten (flatten) { function flatten (options) {
this.options.flatten = is.bool(flatten) ? flatten : true; this.options.flatten = is.bool(options) ? options : true;
return this; if (is.object(options)) {
} this._setColourOption('flattenBackground', options.background);
/**
* Trim "boring" pixels from all edges that contain values within a percentage similarity of the top-left pixel.
* @param {Number} [tolerance=10] value between 1 and 99 representing the percentage similarity.
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function trim (tolerance) {
if (!is.defined(tolerance)) {
this.options.trimTolerance = 10;
} else if (is.integer(tolerance) && is.inRange(tolerance, 1, 99)) {
this.options.trimTolerance = tolerance;
} else {
throw new Error('Invalid trim tolerance (1 to 99) ' + tolerance);
} }
return this; return this;
} }
@@ -460,15 +372,12 @@ function linear (a, b) {
module.exports = function (Sharp) { module.exports = function (Sharp) {
[ [
rotate, rotate,
extract,
flip, flip,
flop, flop,
sharpen, sharpen,
median, median,
blur, blur,
extend,
flatten, flatten,
trim,
gamma, gamma,
negate, negate,
normalise, normalise,

View File

@@ -150,6 +150,8 @@ function withMetadata (withMetadata) {
* @param {Boolean} [options.optimizeScans=false] - alternative spelling of optimiseScans * @param {Boolean} [options.optimizeScans=false] - alternative spelling of optimiseScans
* @param {Boolean} [options.optimiseCoding=true] - optimise Huffman coding tables * @param {Boolean} [options.optimiseCoding=true] - optimise Huffman coding tables
* @param {Boolean} [options.optimizeCoding=true] - alternative spelling of optimiseCoding * @param {Boolean} [options.optimizeCoding=true] - alternative spelling of optimiseCoding
* @param {Number} [options.quantisationTable=0] - quantization table to use, integer 0-8, requires mozjpeg
* @param {Number} [options.quantizationTable=0] - alternative spelling of quantisationTable
* @param {Boolean} [options.force=true] - force JPEG output, otherwise attempt to use input format * @param {Boolean} [options.force=true] - force JPEG output, otherwise attempt to use input format
* @returns {Sharp} * @returns {Sharp}
* @throws {Error} Invalid options * @throws {Error} Invalid options
@@ -191,6 +193,14 @@ function jpeg (options) {
if (is.defined(options.optimiseCoding)) { if (is.defined(options.optimiseCoding)) {
this._setBooleanOption('jpegOptimiseCoding', options.optimiseCoding); this._setBooleanOption('jpegOptimiseCoding', options.optimiseCoding);
} }
options.quantisationTable = is.number(options.quantizationTable) ? options.quantizationTable : options.quantisationTable;
if (is.defined(options.quantisationTable)) {
if (is.integer(options.quantisationTable) && is.inRange(options.quantisationTable, 0, 8)) {
this.options.jpegQuantisationTable = options.quantisationTable;
} else {
throw new Error('Invalid quantisation table (integer, 0-8) ' + options.quantisationTable);
}
}
} }
return this._updateFormatOut('jpeg', options); return this._updateFormatOut('jpeg', options);
} }
@@ -261,10 +271,10 @@ function webp (options) {
} }
} }
if (is.object(options) && is.defined(options.alphaQuality)) { if (is.object(options) && is.defined(options.alphaQuality)) {
if (is.integer(options.alphaQuality) && is.inRange(options.alphaQuality, 1, 100)) { if (is.integer(options.alphaQuality) && is.inRange(options.alphaQuality, 0, 100)) {
this.options.webpAlphaQuality = options.alphaQuality; this.options.webpAlphaQuality = options.alphaQuality;
} else { } else {
throw new Error('Invalid webp alpha quality (integer, 1-100) ' + options.alphaQuality); throw new Error('Invalid webp alpha quality (integer, 0-100) ' + options.alphaQuality);
} }
} }
if (is.object(options) && is.defined(options.lossless)) { if (is.object(options) && is.defined(options.lossless)) {
@@ -413,6 +423,7 @@ function toFormat (format, options) {
* @param {Number} [tile.size=256] tile size in pixels, a value between 1 and 8192. * @param {Number} [tile.size=256] tile size in pixels, a value between 1 and 8192.
* @param {Number} [tile.overlap=0] tile overlap in pixels, a value between 0 and 8192. * @param {Number} [tile.overlap=0] tile overlap in pixels, a value between 0 and 8192.
* @param {Number} [tile.angle=0] tile angle of rotation, must be a multiple of 90. * @param {Number} [tile.angle=0] tile angle of rotation, must be a multiple of 90.
* @param {String} [tile.depth] how deep to make the pyramid, possible values are `onepixel`, `onetile` or `one`, default based on layout.
* @param {String} [tile.container='fs'] tile container, with value `fs` (filesystem) or `zip` (compressed file). * @param {String} [tile.container='fs'] tile container, with value `fs` (filesystem) or `zip` (compressed file).
* @param {String} [tile.layout='dz'] filesystem layout, possible values are `dz`, `zoomify` or `google`. * @param {String} [tile.layout='dz'] filesystem layout, possible values are `dz`, `zoomify` or `google`.
* @returns {Sharp} * @returns {Sharp}
@@ -464,6 +475,15 @@ function tile (tile) {
throw new Error('Unsupported angle: angle must be a positive/negative multiple of 90 ' + tile.angle); throw new Error('Unsupported angle: angle must be a positive/negative multiple of 90 ' + tile.angle);
} }
} }
// Depth of tiles
if (is.defined(tile.depth)) {
if (is.string(tile.depth) && is.inArray(tile.depth, ['onepixel', 'onetile', 'one'])) {
this.options.tileDepth = tile.depth;
} else {
throw new Error("Invalid tile depth '" + tile.depth + "', should be one of 'onepixel', 'onetile' or 'one'");
}
}
} }
// Format // Format
if (is.inArray(this.options.formatOut, ['jpeg', 'png', 'webp'])) { if (is.inArray(this.options.formatOut, ['jpeg', 'png', 'webp'])) {

View File

@@ -1,10 +1,13 @@
'use strict'; 'use strict';
const detectLibc = require('detect-libc');
module.exports = function () { module.exports = function () {
const arch = process.env.npm_config_arch || process.arch; const arch = process.env.npm_config_arch || process.arch;
const platform = process.env.npm_config_platform || process.platform; const platform = process.env.npm_config_platform || process.platform;
const libc = (platform === 'linux' && detectLibc.isNonGlibcLinux) ? detectLibc.family : '';
const platformId = [platform]; const platformId = [`${platform}${libc}`];
if (arch === 'arm' || arch === 'armhf' || arch === 'arm64') { if (arch === 'arm' || arch === 'armhf' || arch === 'arm64') {
const armVersion = (arch === 'arm64') ? '8' : process.env.npm_config_armv || process.config.variables.arm_version || '6'; const armVersion = (arch === 'arm64') ? '8' : process.env.npm_config_armv || process.config.variables.arm_version || '6';
platformId.push(`armv${armVersion}`); platformId.push(`armv${armVersion}`);

View File

@@ -1,9 +1,10 @@
'use strict'; 'use strict';
const deprecate = require('util').deprecate;
const is = require('./is'); const is = require('./is');
/** /**
* Weighting to apply to image crop. * Weighting to apply when using contain/cover fit.
* @member * @member
* @private * @private
*/ */
@@ -21,7 +22,23 @@ const gravity = {
}; };
/** /**
* Strategies for automagic crop behaviour. * Position to apply when using contain/cover fit.
* @member
* @private
*/
const position = {
top: 1,
right: 2,
bottom: 3,
left: 4,
'right top': 5,
'right bottom': 6,
'left bottom': 7,
'left top': 8
};
/**
* Strategies for automagic cover behaviour.
* @member * @member
* @private * @private
*/ */
@@ -43,40 +60,137 @@ const kernel = {
}; };
/** /**
* Resize image to `width` x `height`. * Methods by which an image can be resized to fit the provided dimensions.
* By default, the resized image is centre cropped to the exact size specified. * @member
* @private
*/
const fit = {
contain: 'contain',
cover: 'cover',
fill: 'fill',
inside: 'inside',
outside: 'outside'
};
/**
* Map external fit property to internal canvas property.
* @member
* @private
*/
const mapFitToCanvas = {
contain: 'embed',
cover: 'crop',
fill: 'ignore_aspect',
inside: 'max',
outside: 'min'
};
/**
* Resize image to `width`, `height` or `width x height`.
* *
* Possible kernels are: * When both a `width` and `height` are provided, the possible methods by which the image should **fit** these are:
* - `cover`: Crop to cover both provided dimensions (the default).
* - `contain`: Embed within both provided dimensions.
* - `fill`: Ignore the aspect ratio of the input and stretch to both provided dimensions.
* - `inside`: Preserving aspect ratio, resize the image to be as large as possible while ensuring its dimensions are less than or equal to both those specified.
* - `outside`: Preserving aspect ratio, resize the image to be as small as possible while ensuring its dimensions are greater than or equal to both those specified.
* Some of these values are based on the [object-fit](https://developer.mozilla.org/en-US/docs/Web/CSS/object-fit) CSS property.
*
* When using a `fit` of `cover` or `contain`, the default **position** is `centre`. Other options are:
* - `sharp.position`: `top`, `right top`, `right`, `right bottom`, `bottom`, `left bottom`, `left`, `left top`.
* - `sharp.gravity`: `north`, `northeast`, `east`, `southeast`, `south`, `southwest`, `west`, `northwest`, `center` or `centre`.
* - `sharp.strategy`: `cover` only, dynamically crop using either the `entropy` or `attention` strategy.
* Some of these values are based on the [object-position](https://developer.mozilla.org/en-US/docs/Web/CSS/object-position) CSS property.
*
* The experimental strategy-based approach resizes so one dimension is at its target length
* then repeatedly ranks edge regions, discarding the edge with the lowest score based on the selected strategy.
* - `entropy`: focus on the region with the highest [Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29).
* - `attention`: focus on the region with the highest luminance frequency, colour saturation and presence of skin tones.
*
* Possible interpolation kernels are:
* - `nearest`: Use [nearest neighbour interpolation](http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation). * - `nearest`: Use [nearest neighbour interpolation](http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation).
* - `cubic`: Use a [Catmull-Rom spline](https://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline). * - `cubic`: Use a [Catmull-Rom spline](https://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline).
* - `lanczos2`: Use a [Lanczos kernel](https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel) with `a=2`. * - `lanczos2`: Use a [Lanczos kernel](https://en.wikipedia.org/wiki/Lanczos_resampling#Lanczos_kernel) with `a=2`.
* - `lanczos3`: Use a Lanczos kernel with `a=3` (the default). * - `lanczos3`: Use a Lanczos kernel with `a=3` (the default).
* *
* @example * @example
* sharp(inputBuffer) * sharp(input)
* .resize({ width: 100 })
* .toBuffer()
* .then(data => {
* // 100 pixels wide, auto-scaled height
* });
*
* @example
* sharp(input)
* .resize({ height: 100 })
* .toBuffer()
* .then(data => {
* // 100 pixels high, auto-scaled width
* });
*
* @example
* sharp(input)
* .resize(200, 300, { * .resize(200, 300, {
* kernel: sharp.kernel.nearest * kernel: sharp.kernel.nearest,
* fit: 'contain',
* position: 'right top',
* background: { r: 255, g: 255, b: 255, alpha: 0.5 }
* }) * })
* .background('white') * .toFile('output.png')
* .embed() * .then(() => {
* .toFile('output.tiff') * // output.png is a 200 pixels wide and 300 pixels high image
* .then(function() { * // containing a nearest-neighbour scaled version
* // output.tiff is a 200 pixels wide and 300 pixels high image * // contained within the north-east corner of a semi-transparent white canvas
* // containing a nearest-neighbour scaled version, embedded on a white canvas, * });
* // of the image data in inputBuffer *
* @example
* const transformer = sharp()
* .resize({
* width: 200,
* height: 200,
* fit: sharp.fit.cover,
* position: sharp.strategy.entropy
* });
* // Read image data from readableStream
* // Write 200px square auto-cropped image data to writableStream
* readableStream
* .pipe(transformer)
* .pipe(writableStream);
*
* @example
* sharp(input)
* .resize(200, 200, {
* fit: sharp.fit.inside,
* withoutEnlargement: true
* })
* .toFormat('jpeg')
* .toBuffer()
* .then(function(outputBuffer) {
* // outputBuffer contains JPEG image data
* // no wider and no higher than 200 pixels
* // and no larger than the input image
* }); * });
* *
* @param {Number} [width] - pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height. * @param {Number} [width] - pixels wide the resultant image should be. Use `null` or `undefined` to auto-scale the width to match the height.
* @param {Number} [height] - pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width. * @param {Number} [height] - pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
* @param {Object} [options] * @param {Object} [options]
* @param {String} [options.width] - alternative means of specifying `width`. If both are present this take priority.
* @param {String} [options.height] - alternative means of specifying `height`. If both are present this take priority.
* @param {String} [options.fit='cover'] - how the image should be resized to fit both provided dimensions, one of `cover`, `contain`, `fill`, `inside` or `outside`.
* @param {String} [options.position='centre'] - position, gravity or strategy to use when `fit` is `cover` or `contain`.
* @param {String|Object} [options.background={r: 0, g: 0, b: 0, alpha: 1}] - background colour when using a `fit` of `contain`, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black without transparency.
* @param {String} [options.kernel='lanczos3'] - the kernel to use for image reduction. * @param {String} [options.kernel='lanczos3'] - the kernel to use for image reduction.
* @param {Boolean} [options.withoutEnlargement=false] - do not enlarge if the width *or* height are already less than the specified dimensions, equivalent to GraphicsMagick's `>` geometry option.
* @param {Boolean} [options.fastShrinkOnLoad=true] - take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images. * @param {Boolean} [options.fastShrinkOnLoad=true] - take greater advantage of the JPEG and WebP shrink-on-load feature, which can lead to a slight moiré pattern on some images.
* @returns {Sharp} * @returns {Sharp}
* @throws {Error} Invalid parameters * @throws {Error} Invalid parameters
*/ */
function resize (width, height, options) { function resize (width, height, options) {
if (is.defined(width)) { if (is.defined(width)) {
if (is.integer(width) && width > 0) { if (is.object(width) && !is.defined(options)) {
options = width;
} else if (is.integer(width) && width > 0) {
this.options.width = width; this.options.width = width;
} else { } else {
throw is.invalidParameterError('width', 'positive integer', width); throw is.invalidParameterError('width', 'positive integer', width);
@@ -94,6 +208,38 @@ function resize (width, height, options) {
this.options.height = -1; this.options.height = -1;
} }
if (is.object(options)) { if (is.object(options)) {
// Width
if (is.integer(options.width) && options.width > 0) {
this.options.width = options.width;
}
// Height
if (is.integer(options.height) && options.height > 0) {
this.options.height = options.height;
}
// Fit
if (is.defined(options.fit)) {
const canvas = mapFitToCanvas[options.fit];
if (is.string(canvas)) {
this.options.canvas = canvas;
} else {
throw is.invalidParameterError('fit', 'valid fit', options.fit);
}
}
// Position
if (is.defined(options.position)) {
const pos = is.integer(options.position)
? options.position
: strategy[options.position] || position[options.position] || gravity[options.position];
if (is.integer(pos) && (is.inRange(pos, 0, 8) || is.inRange(pos, 16, 17))) {
this.options.position = pos;
} else {
throw is.invalidParameterError('position', 'valid position/gravity/strategy', options.position);
}
}
// Background
if (is.defined(options.background)) {
this._setColourOption('resizeBackground', options.background);
}
// Kernel // Kernel
if (is.defined(options.kernel)) { if (is.defined(options.kernel)) {
if (is.string(kernel[options.kernel])) { if (is.string(kernel[options.kernel])) {
@@ -102,6 +248,10 @@ function resize (width, height, options) {
throw is.invalidParameterError('kernel', 'valid kernel name', options.kernel); throw is.invalidParameterError('kernel', 'valid kernel name', options.kernel);
} }
} }
// Without enlargement
if (is.defined(options.withoutEnlargement)) {
this._setBooleanOption('withoutEnlargement', options.withoutEnlargement);
}
// Shrink on load // Shrink on load
if (is.defined(options.fastShrinkOnLoad)) { if (is.defined(options.fastShrinkOnLoad)) {
this._setBooleanOption('fastShrinkOnLoad', options.fastShrinkOnLoad); this._setBooleanOption('fastShrinkOnLoad', options.fastShrinkOnLoad);
@@ -111,48 +261,145 @@ function resize (width, height, options) {
} }
/** /**
* Crop the resized image to the exact size specified, the default behaviour. * Extends/pads the edges of the image with the provided background colour.
* * This operation will always occur after resizing and extraction, if any.
* Possible attributes of the optional `sharp.gravity` are `north`, `northeast`, `east`, `southeast`, `south`,
* `southwest`, `west`, `northwest`, `center` and `centre`.
*
* The experimental strategy-based approach resizes so one dimension is at its target length
* then repeatedly ranks edge regions, discarding the edge with the lowest score based on the selected strategy.
* - `entropy`: focus on the region with the highest [Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29).
* - `attention`: focus on the region with the highest luminance frequency, colour saturation and presence of skin tones.
* *
* @example * @example
* const transformer = sharp() * // Resize to 140 pixels wide, then add 10 transparent pixels
* .resize(200, 200) * // to the top, left and right edges and 20 to the bottom edge
* .crop(sharp.strategy.entropy) * sharp(input)
* .on('error', function(err) { * .resize(140)
* console.log(err); * .)
* }); * .extend({
* // Read image data from readableStream * top: 10,
* // Write 200px square auto-cropped image data to writableStream * bottom: 20,
* readableStream.pipe(transformer).pipe(writableStream); * left: 10,
* right: 10
* background: { r: 0, g: 0, b: 0, alpha: 0 }
* })
* ...
* *
* @param {String} [crop='centre'] - A member of `sharp.gravity` to crop to an edge/corner or `sharp.strategy` to crop dynamically. * @param {(Number|Object)} extend - single pixel count to add to all edges or an Object with per-edge counts
* @param {Number} [extend.top]
* @param {Number} [extend.left]
* @param {Number} [extend.bottom]
* @param {Number} [extend.right]
* @param {String|Object} [extend.background={r: 0, g: 0, b: 0, alpha: 1}] - background colour, parsed by the [color](https://www.npmjs.org/package/color) module, defaults to black without transparency.
* @returns {Sharp} * @returns {Sharp}
* @throws {Error} Invalid parameters * @throws {Error} Invalid parameters
*/
function extend (extend) {
if (is.integer(extend) && extend > 0) {
this.options.extendTop = extend;
this.options.extendBottom = extend;
this.options.extendLeft = extend;
this.options.extendRight = extend;
} else if (
is.object(extend) &&
is.integer(extend.top) && extend.top >= 0 &&
is.integer(extend.bottom) && extend.bottom >= 0 &&
is.integer(extend.left) && extend.left >= 0 &&
is.integer(extend.right) && extend.right >= 0
) {
this.options.extendTop = extend.top;
this.options.extendBottom = extend.bottom;
this.options.extendLeft = extend.left;
this.options.extendRight = extend.right;
this._setColourOption('extendBackground', extend.background);
} else {
throw new Error('Invalid edge extension ' + extend);
}
return this;
}
/**
* Extract a region of the image.
*
* - Use `extract` before `resize` for pre-resize extraction.
* - Use `extract` after `resize` for post-resize extraction.
* - Use `extract` before and after for both.
*
* @example
* sharp(input)
* .extract({ left: left, top: top, width: width, height: height })
* .toFile(output, function(err) {
* // Extract a region of the input image, saving in the same format.
* });
* @example
* sharp(input)
* .extract({ left: leftOffsetPre, top: topOffsetPre, width: widthPre, height: heightPre })
* .resize(width, height)
* .extract({ left: leftOffsetPost, top: topOffsetPost, width: widthPost, height: heightPost })
* .toFile(output, function(err) {
* // Extract a region, resize, then extract from the resized image
* });
*
* @param {Object} options
* @param {Number} options.left - zero-indexed offset from left edge
* @param {Number} options.top - zero-indexed offset from top edge
* @param {Number} options.width - dimension of extracted image
* @param {Number} options.height - dimension of extracted image
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function extract (options) {
const suffix = this.options.width === -1 && this.options.height === -1 ? 'Pre' : 'Post';
['left', 'top', 'width', 'height'].forEach(function (name) {
const value = options[name];
if (is.integer(value) && value >= 0) {
this.options[name + (name === 'left' || name === 'top' ? 'Offset' : '') + suffix] = value;
} else {
throw new Error('Non-integer value for ' + name + ' of ' + value);
}
}, this);
// Ensure existing rotation occurs before pre-resize extraction
if (suffix === 'Pre' && ((this.options.angle % 360) !== 0 || this.options.useExifOrientation === true)) {
this.options.rotateBeforePreExtract = true;
}
return this;
}
/**
* Trim "boring" pixels from all edges that contain values similar to the top-left pixel.
* The `info` response Object will contain `trimOffsetLeft` and `trimOffsetTop` properties.
* @param {Number} [threshold=10] the allowed difference from the top-left pixel, a number greater than zero.
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function trim (threshold) {
if (!is.defined(threshold)) {
this.options.trimThreshold = 10;
} else if (is.number(threshold) && threshold > 0) {
this.options.trimThreshold = threshold;
} else {
throw is.invalidParameterError('threshold', 'number greater than zero', threshold);
}
return this;
}
// Deprecated functions
/**
* @deprecated
* @private
*/ */
function crop (crop) { function crop (crop) {
this.options.canvas = 'crop'; this.options.canvas = 'crop';
if (!is.defined(crop)) { if (!is.defined(crop)) {
// Default // Default
this.options.crop = gravity.center; this.options.position = gravity.center;
} else if (is.integer(crop) && is.inRange(crop, 0, 8)) { } else if (is.integer(crop) && is.inRange(crop, 0, 8)) {
// Gravity (numeric) // Gravity (numeric)
this.options.crop = crop; this.options.position = crop;
} else if (is.string(crop) && is.integer(gravity[crop])) { } else if (is.string(crop) && is.integer(gravity[crop])) {
// Gravity (string) // Gravity (string)
this.options.crop = gravity[crop]; this.options.position = gravity[crop];
} else if (is.integer(crop) && crop >= strategy.entropy) { } else if (is.integer(crop) && crop >= strategy.entropy) {
// Strategy // Strategy
this.options.crop = crop; this.options.position = crop;
} else if (is.string(crop) && is.integer(strategy[crop])) { } else if (is.string(crop) && is.integer(strategy[crop])) {
// Strategy (string) // Strategy (string)
this.options.crop = strategy[crop]; this.options.position = strategy[crop];
} else { } else {
throw is.invalidParameterError('crop', 'valid crop id/name/strategy', crop); throw is.invalidParameterError('crop', 'valid crop id/name/strategy', crop);
} }
@@ -160,66 +407,29 @@ function crop (crop) {
} }
/** /**
* Preserving aspect ratio, resize the image to the maximum `width` or `height` specified * @deprecated
* then embed on a background of the exact `width` and `height` specified. * @private
*
* If the background contains an alpha value then WebP and PNG format output images will
* contain an alpha channel, even when the input image does not.
*
* @example
* sharp('input.gif')
* .resize(200, 300)
* .background({r: 0, g: 0, b: 0, alpha: 0})
* .embed()
* .toFormat(sharp.format.webp)
* .toBuffer(function(err, outputBuffer) {
* if (err) {
* throw err;
* }
* // outputBuffer contains WebP image data of a 200 pixels wide and 300 pixels high
* // containing a scaled version, embedded on a transparent canvas, of input.gif
* });
* @param {String} [embed='centre'] - A member of `sharp.gravity` to embed to an edge/corner.
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/ */
function embed (embed) { function embed (embed) {
this.options.canvas = 'embed'; this.options.canvas = 'embed';
if (!is.defined(embed)) { if (!is.defined(embed)) {
// Default // Default
this.options.embed = gravity.center; this.options.position = gravity.center;
} else if (is.integer(embed) && is.inRange(embed, 0, 8)) { } else if (is.integer(embed) && is.inRange(embed, 0, 8)) {
// Gravity (numeric) // Gravity (numeric)
this.options.embed = embed; this.options.position = embed;
} else if (is.string(embed) && is.integer(gravity[embed])) { } else if (is.string(embed) && is.integer(gravity[embed])) {
// Gravity (string) // Gravity (string)
this.options.embed = gravity[embed]; this.options.position = gravity[embed];
} else { } else {
throw is.invalidParameterError('embed', 'valid embed id/name', embed); throw is.invalidParameterError('embed', 'valid embed id/name', embed);
} }
return this; return this;
} }
/** /**
* Preserving aspect ratio, resize the image to be as large as possible * @deprecated
* while ensuring its dimensions are less than or equal to the `width` and `height` specified. * @private
*
* Both `width` and `height` must be provided via `resize` otherwise the behaviour will default to `crop`.
*
* @example
* sharp(inputBuffer)
* .resize(200, 200)
* .max()
* .toFormat('jpeg')
* .toBuffer()
* .then(function(outputBuffer) {
* // outputBuffer contains JPEG image data no wider than 200 pixels and no higher
* // than 200 pixels regardless of the inputBuffer image dimensions
* });
*
* @returns {Sharp}
*/ */
function max () { function max () {
this.options.canvas = 'max'; this.options.canvas = 'max';
@@ -227,12 +437,8 @@ function max () {
} }
/** /**
* Preserving aspect ratio, resize the image to be as small as possible * @deprecated
* while ensuring its dimensions are greater than or equal to the `width` and `height` specified. * @private
*
* Both `width` and `height` must be provided via `resize` otherwise the behaviour will default to `crop`.
*
* @returns {Sharp}
*/ */
function min () { function min () {
this.options.canvas = 'min'; this.options.canvas = 'min';
@@ -240,9 +446,8 @@ function min () {
} }
/** /**
* Ignoring the aspect ratio of the input, stretch the image to * @deprecated
* the exact `width` and/or `height` provided via `resize`. * @private
* @returns {Sharp}
*/ */
function ignoreAspectRatio () { function ignoreAspectRatio () {
this.options.canvas = 'ignore_aspect'; this.options.canvas = 'ignore_aspect';
@@ -250,15 +455,8 @@ function ignoreAspectRatio () {
} }
/** /**
* Do not enlarge the output image if the input image width *or* height are already less than the required dimensions. * @deprecated
* This is equivalent to GraphicsMagick's `>` geometry option: * @private
* "*change the dimensions of the image only if its width or height exceeds the geometry specification*".
* Use with `max()` to preserve the image's aspect ratio.
*
* The default behaviour *before* function call is `false`, meaning the image will be enlarged.
*
* @param {Boolean} [withoutEnlargement=true]
* @returns {Sharp}
*/ */
function withoutEnlargement (withoutEnlargement) { function withoutEnlargement (withoutEnlargement) {
this.options.withoutEnlargement = is.bool(withoutEnlargement) ? withoutEnlargement : true; this.options.withoutEnlargement = is.bool(withoutEnlargement) ? withoutEnlargement : true;
@@ -272,12 +470,9 @@ function withoutEnlargement (withoutEnlargement) {
module.exports = function (Sharp) { module.exports = function (Sharp) {
[ [
resize, resize,
crop, extend,
embed, extract,
max, trim
min,
ignoreAspectRatio,
withoutEnlargement
].forEach(function (f) { ].forEach(function (f) {
Sharp.prototype[f.name] = f; Sharp.prototype[f.name] = f;
}); });
@@ -285,4 +480,13 @@ module.exports = function (Sharp) {
Sharp.gravity = gravity; Sharp.gravity = gravity;
Sharp.strategy = strategy; Sharp.strategy = strategy;
Sharp.kernel = kernel; Sharp.kernel = kernel;
Sharp.fit = fit;
Sharp.position = position;
// Deprecated functions, to be removed in v0.22.0
Sharp.prototype.crop = deprecate(crop, 'crop(position) is deprecated, use resize({ fit: "cover", position }) instead');
Sharp.prototype.embed = deprecate(embed, 'embed(position) is deprecated, use resize({ fit: "contain", position }) instead');
Sharp.prototype.max = deprecate(max, 'max() is deprecated, use resize({ fit: "inside" }) instead');
Sharp.prototype.min = deprecate(min, 'min() is deprecated, use resize({ fit: "outside" }) instead');
Sharp.prototype.ignoreAspectRatio = deprecate(ignoreAspectRatio, 'ignoreAspectRatio() is deprecated, use resize({ fit: "fill" }) instead');
Sharp.prototype.withoutEnlargement = deprecate(withoutEnlargement, 'withoutEnlargement() is deprecated, use resize({ withoutEnlargement: true }) instead');
}; };

View File

@@ -82,23 +82,20 @@ function counters () {
* Improves the performance of `resize`, `blur` and `sharpen` operations * Improves the performance of `resize`, `blur` and `sharpen` operations
* by taking advantage of the SIMD vector unit of the CPU, e.g. Intel SSE and ARM NEON. * by taking advantage of the SIMD vector unit of the CPU, e.g. Intel SSE and ARM NEON.
* *
* This feature is currently off by default but future versions may reverse this.
* Versions of liborc prior to 0.4.25 are known to segfault under heavy load.
*
* @example * @example
* const simd = sharp.simd(); * const simd = sharp.simd();
* // simd is `true` if SIMD is currently enabled * // simd is `true` if the runtime use of liborc is currently enabled
* @example * @example
* const simd = sharp.simd(true); * const simd = sharp.simd(false);
* // attempts to enable the use of SIMD, returning true if available * // prevent libvips from using liborc at runtime
* *
* @param {Boolean} [simd=false] * @param {Boolean} [simd=true]
* @returns {Boolean} * @returns {Boolean}
*/ */
function simd (simd) { function simd (simd) {
return sharp.simd(is.bool(simd) ? simd : null); return sharp.simd(is.bool(simd) ? simd : null);
} }
simd(false); simd(true);
/** /**
* Decorate the Sharp class with utility-related functions. * Decorate the Sharp class with utility-related functions.

View File

@@ -1,7 +1,7 @@
{ {
"name": "sharp", "name": "sharp",
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images", "description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images",
"version": "0.20.5", "version": "0.21.0",
"author": "Lovell Fuller <npm@lovell.info>", "author": "Lovell Fuller <npm@lovell.info>",
"homepage": "https://github.com/lovell/sharp", "homepage": "https://github.com/lovell/sharp",
"contributors": [ "contributors": [
@@ -49,11 +49,17 @@
"Rik Heywood <rik@rik.org>", "Rik Heywood <rik@rik.org>",
"Thomas Parisot <hi@oncletom.io>", "Thomas Parisot <hi@oncletom.io>",
"Nathan Graves <nathanrgraves+github@gmail.com>", "Nathan Graves <nathanrgraves+github@gmail.com>",
"Tom Lokhorst <tom@lokhorst.eu>" "Tom Lokhorst <tom@lokhorst.eu>",
"Espen Hovlandsdal <espen@hovlandsdal.com>",
"Sylvain Dumont <sylvain.dumont35@gmail.com>",
"Alun Davies <alun.owain.davies@googlemail.com>",
"Aidan Hoolachan <ajhoolachan21@gmail.com>",
"Axel Eirola <axel.eirola@iki.fi>",
"Freezy <freezy@xbmc.org>"
], ],
"scripts": { "scripts": {
"install": "(node install/libvips && node install/dll-copy && prebuild-install) || (node-gyp rebuild && node install/dll-copy)", "install": "(node install/libvips && node install/dll-copy && prebuild-install) || (node-gyp rebuild && node install/dll-copy)",
"clean": "rm -rf node_modules/ build/ vendor/ coverage/ test/fixtures/output.*", "clean": "rm -rf node_modules/ build/ vendor/ .nyc_output/ coverage/ test/fixtures/output.*",
"test": "semistandard && cc && nyc --reporter=lcov --branches=99 mocha --slow=5000 --timeout=60000 ./test/unit/*.js && prebuild-ci", "test": "semistandard && cc && nyc --reporter=lcov --branches=99 mocha --slow=5000 --timeout=60000 ./test/unit/*.js && prebuild-ci",
"coverage": "./test/coverage/report.sh", "coverage": "./test/coverage/report.sh",
"test-leak": "./test/leak/leak.sh", "test-leak": "./test/leak/leak.sh",
@@ -83,35 +89,36 @@
"dependencies": { "dependencies": {
"color": "^3.0.0", "color": "^3.0.0",
"detect-libc": "^1.0.3", "detect-libc": "^1.0.3",
"nan": "^2.10.0", "nan": "^2.11.1",
"fs-copy-file-sync": "^1.1.1", "fs-copy-file-sync": "^1.1.1",
"npmlog": "^4.1.2", "npmlog": "^4.1.2",
"prebuild-install": "^4.0.0", "prebuild-install": "^5.2.0",
"semver": "^5.5.0", "semver": "^5.5.1",
"simple-get": "^2.8.1", "simple-get": "^3.0.3",
"tar": "^4.4.4", "tar": "^4.4.6",
"tunnel-agent": "^0.6.0" "tunnel-agent": "^0.6.0"
}, },
"devDependencies": { "devDependencies": {
"async": "^2.6.1", "async": "^2.6.1",
"cc": "^1.0.2", "cc": "^1.0.2",
"decompress-zip": "^0.3.1", "decompress-zip": "^0.3.1",
"documentation": "^8.0.0", "documentation": "^8.1.2",
"exif-reader": "^1.0.2", "exif-reader": "^1.0.2",
"icc": "^1.0.0", "icc": "^1.0.0",
"mocha": "^5.2.0", "mocha": "^5.2.0",
"nyc": "^12.0.2", "mock-fs": "^4.7.0",
"prebuild": "^7.6.0", "nyc": "^13.1.0",
"prebuild": "^8.1.0",
"prebuild-ci": "^2.2.3", "prebuild-ci": "^2.2.3",
"rimraf": "^2.6.2", "rimraf": "^2.6.2",
"semistandard": "^12.0.1" "semistandard": "^12.0.1"
}, },
"license": "Apache-2.0", "license": "Apache-2.0",
"config": { "config": {
"libvips": "8.6.1" "libvips": "8.7.0"
}, },
"engines": { "engines": {
"node": ">=4.5.0" "node": ">=6"
}, },
"semistandard": { "semistandard": {
"env": [ "env": [

View File

@@ -37,6 +37,14 @@ namespace sharp {
std::string AttrAsStr(v8::Handle<v8::Object> obj, std::string attr) { std::string AttrAsStr(v8::Handle<v8::Object> obj, std::string attr) {
return *Nan::Utf8String(Nan::Get(obj, Nan::New(attr).ToLocalChecked()).ToLocalChecked()); return *Nan::Utf8String(Nan::Get(obj, Nan::New(attr).ToLocalChecked()).ToLocalChecked());
} }
std::vector<double> AttrAsRgba(v8::Handle<v8::Object> obj, std::string attr) {
v8::Local<v8::Object> background = AttrAs<v8::Object>(obj, attr);
std::vector<double> rgba(4);
for (unsigned int i = 0; i < 4; i++) {
rgba[i] = AttrTo<double>(background, i);
}
return rgba;
}
// Create an InputDescriptor instance from a v8::Object describing an input image // Create an InputDescriptor instance from a v8::Object describing an input image
InputDescriptor* CreateInputDescriptor( InputDescriptor* CreateInputDescriptor(
@@ -55,7 +63,7 @@ namespace sharp {
descriptor->failOnError = AttrTo<bool>(input, "failOnError"); descriptor->failOnError = AttrTo<bool>(input, "failOnError");
// Density for vector-based input // Density for vector-based input
if (HasAttr(input, "density")) { if (HasAttr(input, "density")) {
descriptor->density = AttrTo<uint32_t>(input, "density"); descriptor->density = AttrTo<double>(input, "density");
} }
// Raw pixel input // Raw pixel input
if (HasAttr(input, "rawChannels")) { if (HasAttr(input, "rawChannels")) {
@@ -72,10 +80,7 @@ namespace sharp {
descriptor->createChannels = AttrTo<uint32_t>(input, "createChannels"); descriptor->createChannels = AttrTo<uint32_t>(input, "createChannels");
descriptor->createWidth = AttrTo<uint32_t>(input, "createWidth"); descriptor->createWidth = AttrTo<uint32_t>(input, "createWidth");
descriptor->createHeight = AttrTo<uint32_t>(input, "createHeight"); descriptor->createHeight = AttrTo<uint32_t>(input, "createHeight");
v8::Local<v8::Object> createBackground = AttrAs<v8::Object>(input, "createBackground"); descriptor->createBackground = AttrAsRgba(input, "createBackground");
for (unsigned int i = 0; i < 4; i++) {
descriptor->createBackground[i] = AttrTo<double>(createBackground, i);
}
} }
return descriptor; return descriptor;
} }
@@ -228,7 +233,7 @@ namespace sharp {
->set("access", accessMethod) ->set("access", accessMethod)
->set("fail", descriptor->failOnError); ->set("fail", descriptor->failOnError);
if (imageType == ImageType::SVG || imageType == ImageType::PDF) { if (imageType == ImageType::SVG || imageType == ImageType::PDF) {
option->set("dpi", static_cast<double>(descriptor->density)); option->set("dpi", descriptor->density);
} }
if (imageType == ImageType::MAGICK) { if (imageType == ImageType::MAGICK) {
option->set("density", std::to_string(descriptor->density).data()); option->set("density", std::to_string(descriptor->density).data());
@@ -270,7 +275,7 @@ namespace sharp {
->set("access", accessMethod) ->set("access", accessMethod)
->set("fail", descriptor->failOnError); ->set("fail", descriptor->failOnError);
if (imageType == ImageType::SVG || imageType == ImageType::PDF) { if (imageType == ImageType::SVG || imageType == ImageType::PDF) {
option->set("dpi", static_cast<double>(descriptor->density)); option->set("dpi", descriptor->density);
} }
if (imageType == ImageType::MAGICK) { if (imageType == ImageType::MAGICK) {
option->set("density", std::to_string(descriptor->density).data()); option->set("density", std::to_string(descriptor->density).data());
@@ -355,8 +360,8 @@ namespace sharp {
/* /*
Set pixels/mm resolution based on a pixels/inch density. Set pixels/mm resolution based on a pixels/inch density.
*/ */
void SetDensity(VImage image, const int density) { void SetDensity(VImage image, const double density) {
const double pixelsPerMm = static_cast<double>(density) / 25.4; const double pixelsPerMm = density / 25.4;
image.set("Xres", pixelsPerMm); image.set("Xres", pixelsPerMm);
image.set("Yres", pixelsPerMm); image.set("Yres", pixelsPerMm);
image.set(VIPS_META_RESOLUTION_UNIT, "in"); image.set(VIPS_META_RESOLUTION_UNIT, "in");
@@ -370,10 +375,6 @@ namespace sharp {
if (image.width() > 65535 || image.height() > 65535) { if (image.width() > 65535 || image.height() > 65535) {
throw vips::VError("Processed image is too large for the JPEG format"); throw vips::VError("Processed image is too large for the JPEG format");
} }
} else if (imageType == ImageType::PNG) {
if (image.width() > 2147483647 || image.height() > 2147483647) {
throw vips::VError("Processed image is too large for the PNG format");
}
} else if (imageType == ImageType::WEBP) { } else if (imageType == ImageType::WEBP) {
if (image.width() > 16383 || image.height() > 16383) { if (image.width() > 16383 || image.height() > 16383) {
throw vips::VError("Processed image is too large for the WebP format"); throw vips::VError("Processed image is too large for the WebP format");
@@ -606,4 +607,40 @@ namespace sharp {
} }
} }
/*
Apply the alpha channel to a given colour
*/
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour) {
// Scale up 8-bit values to match 16-bit input image
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
// Create alphaColour colour
std::vector<double> alphaColour;
if (image.bands() > 2) {
alphaColour = {
multiplier * colour[0],
multiplier * colour[1],
multiplier * colour[2]
};
} else {
// Convert sRGB to greyscale
alphaColour = { multiplier * (
0.2126 * colour[0] +
0.7152 * colour[1] +
0.0722 * colour[2])
};
}
// Add alpha channel to alphaColour colour
if (colour[3] < 255.0 || HasAlpha(image)) {
alphaColour.push_back(colour[3] * multiplier);
}
// Ensure alphaColour colour uses correct colourspace
alphaColour = sharp::GetRgbaAsColourspace(alphaColour, image.interpretation());
// Add non-transparent alpha channel, if required
if (colour[3] < 255.0 && !HasAlpha(image)) {
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
return std::make_tuple(image, alphaColour);
}
} // namespace sharp } // namespace sharp

View File

@@ -49,7 +49,7 @@ namespace sharp {
char *buffer; char *buffer;
bool failOnError; bool failOnError;
size_t bufferLength; size_t bufferLength;
int density; double density;
int rawChannels; int rawChannels;
int rawWidth; int rawWidth;
int rawHeight; int rawHeight;
@@ -57,30 +57,27 @@ namespace sharp {
int createChannels; int createChannels;
int createWidth; int createWidth;
int createHeight; int createHeight;
double createBackground[4]; std::vector<double> createBackground;
InputDescriptor(): InputDescriptor():
buffer(nullptr), buffer(nullptr),
failOnError(FALSE), failOnError(FALSE),
bufferLength(0), bufferLength(0),
density(72), density(72.0),
rawChannels(0), rawChannels(0),
rawWidth(0), rawWidth(0),
rawHeight(0), rawHeight(0),
page(0), page(0),
createChannels(0), createChannels(0),
createWidth(0), createWidth(0),
createHeight(0) { createHeight(0),
createBackground[0] = 0.0; createBackground{ 0.0, 0.0, 0.0, 255.0 } {}
createBackground[1] = 0.0;
createBackground[2] = 0.0;
createBackground[3] = 255.0;
}
}; };
// Convenience methods to access the attributes of a v8::Object // Convenience methods to access the attributes of a v8::Object
bool HasAttr(v8::Handle<v8::Object> obj, std::string attr); bool HasAttr(v8::Handle<v8::Object> obj, std::string attr);
std::string AttrAsStr(v8::Handle<v8::Object> obj, std::string attr); std::string AttrAsStr(v8::Handle<v8::Object> obj, std::string attr);
std::vector<double> AttrAsRgba(v8::Handle<v8::Object> obj, std::string attr);
template<typename T> v8::Local<T> AttrAs(v8::Handle<v8::Object> obj, std::string attr) { template<typename T> v8::Local<T> AttrAs(v8::Handle<v8::Object> obj, std::string attr) {
return Nan::Get(obj, Nan::New(attr).ToLocalChecked()).ToLocalChecked().As<T>(); return Nan::Get(obj, Nan::New(attr).ToLocalChecked()).ToLocalChecked().As<T>();
} }
@@ -186,7 +183,7 @@ namespace sharp {
/* /*
Set pixels/mm resolution based on a pixels/inch density. Set pixels/mm resolution based on a pixels/inch density.
*/ */
void SetDensity(VImage image, const int density); void SetDensity(VImage image, const double density);
/* /*
Check the proposed format supports the current dimensions. Check the proposed format supports the current dimensions.
@@ -255,6 +252,11 @@ namespace sharp {
*/ */
std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba, VipsInterpretation const interpretation); std::vector<double> GetRgbaAsColourspace(std::vector<double> const rgba, VipsInterpretation const interpretation);
/*
Apply the alpha channel to a given colour
*/
std::tuple<VImage, std::vector<double>> ApplyAlpha(VImage image, std::vector<double> colour);
} // namespace sharp } // namespace sharp
#endif // SRC_COMMON_H_ #endif // SRC_COMMON_H_

View File

@@ -613,7 +613,7 @@ VImage::new_matrixv( int width, int height, ... )
} }
VImage VImage
VImage::write( VImage out ) VImage::write( VImage out ) const
{ {
if( vips_image_write( this->get_image(), out.get_image() ) ) if( vips_image_write( this->get_image(), out.get_image() ) )
throw VError(); throw VError();
@@ -622,7 +622,7 @@ VImage::write( VImage out )
} }
void void
VImage::write_to_file( const char *name, VOption *options ) VImage::write_to_file( const char *name, VOption *options ) const
{ {
char filename[VIPS_PATH_MAX]; char filename[VIPS_PATH_MAX];
char option_string[VIPS_PATH_MAX]; char option_string[VIPS_PATH_MAX];
@@ -642,7 +642,7 @@ VImage::write_to_file( const char *name, VOption *options )
void void
VImage::write_to_buffer( const char *suffix, void **buf, size_t *size, VImage::write_to_buffer( const char *suffix, void **buf, size_t *size,
VOption *options ) VOption *options ) const
{ {
char filename[VIPS_PATH_MAX]; char filename[VIPS_PATH_MAX];
char option_string[VIPS_PATH_MAX]; char option_string[VIPS_PATH_MAX];
@@ -675,7 +675,7 @@ VImage::write_to_buffer( const char *suffix, void **buf, size_t *size,
#include "vips-operators.cpp" #include "vips-operators.cpp"
std::vector<VImage> std::vector<VImage>
VImage::bandsplit( VOption *options ) VImage::bandsplit( VOption *options ) const
{ {
std::vector<VImage> b; std::vector<VImage> b;
@@ -686,7 +686,7 @@ VImage::bandsplit( VOption *options )
} }
VImage VImage
VImage::bandjoin( VImage other, VOption *options ) VImage::bandjoin( VImage other, VOption *options ) const
{ {
VImage v[2] = { *this, other }; VImage v[2] = { *this, other };
std::vector<VImage> vec( v, v + VIPS_NUMBER( v ) ); std::vector<VImage> vec( v, v + VIPS_NUMBER( v ) );
@@ -695,7 +695,7 @@ VImage::bandjoin( VImage other, VOption *options )
} }
VImage VImage
VImage::composite( VImage other, VipsBlendMode mode, VOption *options ) VImage::composite( VImage other, VipsBlendMode mode, VOption *options ) const
{ {
VImage v[2] = { *this, other }; VImage v[2] = { *this, other };
std::vector<VImage> ivec( v, v + VIPS_NUMBER( v ) ); std::vector<VImage> ivec( v, v + VIPS_NUMBER( v ) );
@@ -706,7 +706,7 @@ VImage::composite( VImage other, VipsBlendMode mode, VOption *options )
} }
std::complex<double> std::complex<double>
VImage::minpos( VOption *options ) VImage::minpos( VOption *options ) const
{ {
double x, y; double x, y;
@@ -719,7 +719,7 @@ VImage::minpos( VOption *options )
} }
std::complex<double> std::complex<double>
VImage::maxpos( VOption *options ) VImage::maxpos( VOption *options ) const
{ {
double x, y; double x, y;
@@ -734,43 +734,43 @@ VImage::maxpos( VOption *options )
// Operator overloads // Operator overloads
VImage VImage
VImage::operator[]( int index ) VImage::operator[]( int index ) const
{ {
return( this->extract_band( index ) ); return( this->extract_band( index ) );
} }
std::vector<double> std::vector<double>
VImage::operator()( int x, int y ) VImage::operator()( int x, int y ) const
{ {
return( this->getpoint( x, y ) ); return( this->getpoint( x, y ) );
} }
VImage VImage
operator+( VImage a, VImage b ) operator+( const VImage a, const VImage b )
{ {
return( a.add( b ) ); return( a.add( b ) );
} }
VImage VImage
operator+( double a, VImage b ) operator+( double a, const VImage b )
{ {
return( b.linear( 1.0, a ) ); return( b.linear( 1.0, a ) );
} }
VImage VImage
operator+( VImage a, double b ) operator+( const VImage a, double b )
{ {
return( a.linear( 1.0, b ) ); return( a.linear( 1.0, b ) );
} }
VImage VImage
operator+( std::vector<double> a, VImage b ) operator+( const std::vector<double> a, const VImage b )
{ {
return( b.linear( 1.0, a ) ); return( b.linear( 1.0, a ) );
} }
VImage VImage
operator+( VImage a, std::vector<double> b ) operator+( const VImage a, const std::vector<double> b )
{ {
return( a.linear( 1.0, b ) ); return( a.linear( 1.0, b ) );
} }
@@ -788,37 +788,37 @@ operator+=( VImage &a, const double b )
} }
VImage & VImage &
operator+=( VImage &a, std::vector<double> b ) operator+=( VImage &a, const std::vector<double> b )
{ {
return( a = a + b ); return( a = a + b );
} }
VImage VImage
operator-( VImage a, VImage b ) operator-( const VImage a, const VImage b )
{ {
return( a.subtract( b ) ); return( a.subtract( b ) );
} }
VImage VImage
operator-( double a, VImage b ) operator-( double a, const VImage b )
{ {
return( b.linear( -1.0, a ) ); return( b.linear( -1.0, a ) );
} }
VImage VImage
operator-( VImage a, double b ) operator-( const VImage a, double b )
{ {
return( a.linear( 1.0, -b ) ); return( a.linear( 1.0, -b ) );
} }
VImage VImage
operator-( std::vector<double> a, VImage b ) operator-( const std::vector<double> a, const VImage b )
{ {
return( b.linear( -1.0, a ) ); return( b.linear( -1.0, a ) );
} }
VImage VImage
operator-( VImage a, std::vector<double> b ) operator-( const VImage a, const std::vector<double> b )
{ {
return( a.linear( 1.0, vips::negate( b ) ) ); return( a.linear( 1.0, vips::negate( b ) ) );
} }
@@ -836,43 +836,43 @@ operator-=( VImage &a, const double b )
} }
VImage & VImage &
operator-=( VImage &a, std::vector<double> b ) operator-=( VImage &a, const std::vector<double> b )
{ {
return( a = a - b ); return( a = a - b );
} }
VImage VImage
operator-( VImage a ) operator-( const VImage a )
{ {
return( a * -1 ); return( a * -1 );
} }
VImage VImage
operator*( VImage a, VImage b ) operator*( const VImage a, const VImage b )
{ {
return( a.multiply( b ) ); return( a.multiply( b ) );
} }
VImage VImage
operator*( double a, VImage b ) operator*( double a, const VImage b )
{ {
return( b.linear( a, 0.0 ) ); return( b.linear( a, 0.0 ) );
} }
VImage VImage
operator*( VImage a, double b ) operator*( const VImage a, double b )
{ {
return( a.linear( b, 0.0 ) ); return( a.linear( b, 0.0 ) );
} }
VImage VImage
operator*( std::vector<double> a, VImage b ) operator*( const std::vector<double> a, const VImage b )
{ {
return( b.linear( a, 0.0 ) ); return( b.linear( a, 0.0 ) );
} }
VImage VImage
operator*( VImage a, std::vector<double> b ) operator*( const VImage a, const std::vector<double> b )
{ {
return( a.linear( b, 0.0 ) ); return( a.linear( b, 0.0 ) );
} }
@@ -890,37 +890,37 @@ operator*=( VImage &a, const double b )
} }
VImage & VImage &
operator*=( VImage &a, std::vector<double> b ) operator*=( VImage &a, const std::vector<double> b )
{ {
return( a = a * b ); return( a = a * b );
} }
VImage VImage
operator/( VImage a, VImage b ) operator/( const VImage a, const VImage b )
{ {
return( a.divide( b ) ); return( a.divide( b ) );
} }
VImage VImage
operator/( double a, VImage b ) operator/( double a, const VImage b )
{ {
return( b.pow( -1.0 ).linear( a, 0.0 ) ); return( b.pow( -1.0 ).linear( a, 0.0 ) );
} }
VImage VImage
operator/( VImage a, double b ) operator/( const VImage a, double b )
{ {
return( a.linear( 1.0 / b, 0.0 ) ); return( a.linear( 1.0 / b, 0.0 ) );
} }
VImage VImage
operator/( std::vector<double> a, VImage b ) operator/( const std::vector<double> a, const VImage b )
{ {
return( b.pow( -1.0 ).linear( a, 0.0 ) ); return( b.pow( -1.0 ).linear( a, 0.0 ) );
} }
VImage VImage
operator/( VImage a, std::vector<double> b ) operator/( const VImage a, const std::vector<double> b )
{ {
return( a.linear( vips::invert( b ), 0.0 ) ); return( a.linear( vips::invert( b ), 0.0 ) );
} }
@@ -938,25 +938,25 @@ operator/=( VImage &a, const double b )
} }
VImage & VImage &
operator/=( VImage &a, std::vector<double> b ) operator/=( VImage &a, const std::vector<double> b )
{ {
return( a = a / b ); return( a = a / b );
} }
VImage VImage
operator%( VImage a, VImage b ) operator%( const VImage a, const VImage b )
{ {
return( a.remainder( b ) ); return( a.remainder( b ) );
} }
VImage VImage
operator%( VImage a, double b ) operator%( const VImage a, const double b )
{ {
return( a.remainder_const( to_vector( b ) ) ); return( a.remainder_const( to_vector( b ) ) );
} }
VImage VImage
operator%( VImage a, std::vector<double> b ) operator%( const VImage a, const std::vector<double> b )
{ {
return( a.remainder_const( b ) ); return( a.remainder_const( b ) );
} }
@@ -974,243 +974,243 @@ operator%=( VImage &a, const double b )
} }
VImage & VImage &
operator%=( VImage &a, std::vector<double> b ) operator%=( VImage &a, const std::vector<double> b )
{ {
return( a = a % b ); return( a = a % b );
} }
VImage VImage
operator<( VImage a, VImage b ) operator<( const VImage a, const VImage b )
{ {
return( a.relational( b, VIPS_OPERATION_RELATIONAL_LESS ) ); return( a.relational( b, VIPS_OPERATION_RELATIONAL_LESS ) );
} }
VImage VImage
operator<( double a, VImage b ) operator<( const double a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_MORE, return( b.relational_const( VIPS_OPERATION_RELATIONAL_MORE,
to_vector( a ) ) ); to_vector( a ) ) );
} }
VImage VImage
operator<( VImage a, double b ) operator<( const VImage a, const double b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_LESS, return( a.relational_const( VIPS_OPERATION_RELATIONAL_LESS,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator<( std::vector<double> a, VImage b ) operator<( const std::vector<double> a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_MORE, return( b.relational_const( VIPS_OPERATION_RELATIONAL_MORE,
a ) ); a ) );
} }
VImage VImage
operator<( VImage a, std::vector<double> b ) operator<( const VImage a, const std::vector<double> b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_LESS, return( a.relational_const( VIPS_OPERATION_RELATIONAL_LESS,
b ) ); b ) );
} }
VImage VImage
operator<=( VImage a, VImage b ) operator<=( const VImage a, const VImage b )
{ {
return( a.relational( b, VIPS_OPERATION_RELATIONAL_LESSEQ ) ); return( a.relational( b, VIPS_OPERATION_RELATIONAL_LESSEQ ) );
} }
VImage VImage
operator<=( double a, VImage b ) operator<=( const double a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_MOREEQ, return( b.relational_const( VIPS_OPERATION_RELATIONAL_MOREEQ,
to_vector( a ) ) ); to_vector( a ) ) );
} }
VImage VImage
operator<=( VImage a, double b ) operator<=( const VImage a, const double b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_LESSEQ, return( a.relational_const( VIPS_OPERATION_RELATIONAL_LESSEQ,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator<=( std::vector<double> a, VImage b ) operator<=( const std::vector<double> a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_MOREEQ, return( b.relational_const( VIPS_OPERATION_RELATIONAL_MOREEQ,
a ) ); a ) );
} }
VImage VImage
operator<=( VImage a, std::vector<double> b ) operator<=( const VImage a, const std::vector<double> b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_LESSEQ, return( a.relational_const( VIPS_OPERATION_RELATIONAL_LESSEQ,
b ) ); b ) );
} }
VImage VImage
operator>( VImage a, VImage b ) operator>( const VImage a, const VImage b )
{ {
return( a.relational( b, VIPS_OPERATION_RELATIONAL_MORE ) ); return( a.relational( b, VIPS_OPERATION_RELATIONAL_MORE ) );
} }
VImage VImage
operator>( double a, VImage b ) operator>( const double a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_LESS, return( b.relational_const( VIPS_OPERATION_RELATIONAL_LESS,
to_vector( a ) ) ); to_vector( a ) ) );
} }
VImage VImage
operator>( VImage a, double b ) operator>( const VImage a, const double b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_MORE, return( a.relational_const( VIPS_OPERATION_RELATIONAL_MORE,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator>( std::vector<double> a, VImage b ) operator>( const std::vector<double> a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_LESS, return( b.relational_const( VIPS_OPERATION_RELATIONAL_LESS,
a ) ); a ) );
} }
VImage VImage
operator>( VImage a, std::vector<double> b ) operator>( const VImage a, const std::vector<double> b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_MORE, return( a.relational_const( VIPS_OPERATION_RELATIONAL_MORE,
b ) ); b ) );
} }
VImage VImage
operator>=( VImage a, VImage b ) operator>=( const VImage a, const VImage b )
{ {
return( a.relational( b, VIPS_OPERATION_RELATIONAL_MOREEQ ) ); return( a.relational( b, VIPS_OPERATION_RELATIONAL_MOREEQ ) );
} }
VImage VImage
operator>=( double a, VImage b ) operator>=( const double a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_LESSEQ, return( b.relational_const( VIPS_OPERATION_RELATIONAL_LESSEQ,
to_vector( a ) ) ); to_vector( a ) ) );
} }
VImage VImage
operator>=( VImage a, double b ) operator>=( const VImage a, const double b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_MOREEQ, return( a.relational_const( VIPS_OPERATION_RELATIONAL_MOREEQ,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator>=( std::vector<double> a, VImage b ) operator>=( const std::vector<double> a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_LESSEQ, return( b.relational_const( VIPS_OPERATION_RELATIONAL_LESSEQ,
a ) ); a ) );
} }
VImage VImage
operator>=( VImage a, std::vector<double> b ) operator>=( const VImage a, const std::vector<double> b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_MOREEQ, return( a.relational_const( VIPS_OPERATION_RELATIONAL_MOREEQ,
b ) ); b ) );
} }
VImage VImage
operator==( VImage a, VImage b ) operator==( const VImage a, const VImage b )
{ {
return( a.relational( b, VIPS_OPERATION_RELATIONAL_EQUAL ) ); return( a.relational( b, VIPS_OPERATION_RELATIONAL_EQUAL ) );
} }
VImage VImage
operator==( double a, VImage b ) operator==( const double a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_EQUAL, return( b.relational_const( VIPS_OPERATION_RELATIONAL_EQUAL,
to_vector( a ) ) ); to_vector( a ) ) );
} }
VImage VImage
operator==( VImage a, double b ) operator==( const VImage a, const double b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_EQUAL, return( a.relational_const( VIPS_OPERATION_RELATIONAL_EQUAL,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator==( std::vector<double> a, VImage b ) operator==( const std::vector<double> a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_EQUAL, return( b.relational_const( VIPS_OPERATION_RELATIONAL_EQUAL,
a ) ); a ) );
} }
VImage VImage
operator==( VImage a, std::vector<double> b ) operator==( const VImage a, const std::vector<double> b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_EQUAL, return( a.relational_const( VIPS_OPERATION_RELATIONAL_EQUAL,
b ) ); b ) );
} }
VImage VImage
operator!=( VImage a, VImage b ) operator!=( const VImage a, const VImage b )
{ {
return( a.relational( b, VIPS_OPERATION_RELATIONAL_NOTEQ ) ); return( a.relational( b, VIPS_OPERATION_RELATIONAL_NOTEQ ) );
} }
VImage VImage
operator!=( double a, VImage b ) operator!=( const double a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_NOTEQ, return( b.relational_const( VIPS_OPERATION_RELATIONAL_NOTEQ,
to_vector( a ) ) ); to_vector( a ) ) );
} }
VImage VImage
operator!=( VImage a, double b ) operator!=( const VImage a, const double b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_NOTEQ, return( a.relational_const( VIPS_OPERATION_RELATIONAL_NOTEQ,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator!=( std::vector<double> a, VImage b ) operator!=( const std::vector<double> a, const VImage b )
{ {
return( b.relational_const( VIPS_OPERATION_RELATIONAL_NOTEQ, return( b.relational_const( VIPS_OPERATION_RELATIONAL_NOTEQ,
a ) ); a ) );
} }
VImage VImage
operator!=( VImage a, std::vector<double> b ) operator!=( const VImage a, const std::vector<double> b )
{ {
return( a.relational_const( VIPS_OPERATION_RELATIONAL_NOTEQ, return( a.relational_const( VIPS_OPERATION_RELATIONAL_NOTEQ,
b ) ); b ) );
} }
VImage VImage
operator&( VImage a, VImage b ) operator&( const VImage a, const VImage b )
{ {
return( a.boolean( b, VIPS_OPERATION_BOOLEAN_AND ) ); return( a.boolean( b, VIPS_OPERATION_BOOLEAN_AND ) );
} }
VImage VImage
operator&( double a, VImage b ) operator&( const double a, const VImage b )
{ {
return( b.boolean_const( VIPS_OPERATION_BOOLEAN_AND, return( b.boolean_const( VIPS_OPERATION_BOOLEAN_AND,
to_vector( a ) ) ); to_vector( a ) ) );
} }
VImage VImage
operator&( VImage a, double b ) operator&( const VImage a, const double b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_AND, return( a.boolean_const( VIPS_OPERATION_BOOLEAN_AND,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator&( std::vector<double> a, VImage b ) operator&( const std::vector<double> a, const VImage b )
{ {
return( b.boolean_const( VIPS_OPERATION_BOOLEAN_AND, a ) ); return( b.boolean_const( VIPS_OPERATION_BOOLEAN_AND, a ) );
} }
VImage VImage
operator&( VImage a, std::vector<double> b ) operator&( const VImage a, const std::vector<double> b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_AND, b ) ); return( a.boolean_const( VIPS_OPERATION_BOOLEAN_AND, b ) );
} }
@@ -1228,40 +1228,40 @@ operator&=( VImage &a, const double b )
} }
VImage & VImage &
operator&=( VImage &a, std::vector<double> b ) operator&=( VImage &a, const std::vector<double> b )
{ {
return( a = a & b ); return( a = a & b );
} }
VImage VImage
operator|( VImage a, VImage b ) operator|( const VImage a, const VImage b )
{ {
return( a.boolean( b, VIPS_OPERATION_BOOLEAN_OR ) ); return( a.boolean( b, VIPS_OPERATION_BOOLEAN_OR ) );
} }
VImage VImage
operator|( double a, VImage b ) operator|( const double a, const VImage b )
{ {
return( b.boolean_const( VIPS_OPERATION_BOOLEAN_OR, return( b.boolean_const( VIPS_OPERATION_BOOLEAN_OR,
to_vector( a ) ) ); to_vector( a ) ) );
} }
VImage VImage
operator|( VImage a, double b ) operator|( const VImage a, const double b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_OR, return( a.boolean_const( VIPS_OPERATION_BOOLEAN_OR,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator|( std::vector<double> a, VImage b ) operator|( const std::vector<double> a, const VImage b )
{ {
return( b.boolean_const( VIPS_OPERATION_BOOLEAN_OR, return( b.boolean_const( VIPS_OPERATION_BOOLEAN_OR,
a ) ); a ) );
} }
VImage VImage
operator|( VImage a, std::vector<double> b ) operator|( const VImage a, const std::vector<double> b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_OR, return( a.boolean_const( VIPS_OPERATION_BOOLEAN_OR,
b ) ); b ) );
@@ -1280,40 +1280,40 @@ operator|=( VImage &a, const double b )
} }
VImage & VImage &
operator|=( VImage &a, std::vector<double> b ) operator|=( VImage &a, const std::vector<double> b )
{ {
return( a = a | b ); return( a = a | b );
} }
VImage VImage
operator^( VImage a, VImage b ) operator^( const VImage a, const VImage b )
{ {
return( a.boolean( b, VIPS_OPERATION_BOOLEAN_EOR ) ); return( a.boolean( b, VIPS_OPERATION_BOOLEAN_EOR ) );
} }
VImage VImage
operator^( double a, VImage b ) operator^( const double a, const VImage b )
{ {
return( b.boolean_const( VIPS_OPERATION_BOOLEAN_EOR, return( b.boolean_const( VIPS_OPERATION_BOOLEAN_EOR,
to_vector( a ) ) ); to_vector( a ) ) );
} }
VImage VImage
operator^( VImage a, double b ) operator^( const VImage a, const double b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_EOR, return( a.boolean_const( VIPS_OPERATION_BOOLEAN_EOR,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator^( std::vector<double> a, VImage b ) operator^( const std::vector<double> a, const VImage b )
{ {
return( b.boolean_const( VIPS_OPERATION_BOOLEAN_EOR, return( b.boolean_const( VIPS_OPERATION_BOOLEAN_EOR,
a ) ); a ) );
} }
VImage VImage
operator^( VImage a, std::vector<double> b ) operator^( const VImage a, const std::vector<double> b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_EOR, return( a.boolean_const( VIPS_OPERATION_BOOLEAN_EOR,
b ) ); b ) );
@@ -1332,26 +1332,26 @@ operator^=( VImage &a, const double b )
} }
VImage & VImage &
operator^=( VImage &a, std::vector<double> b ) operator^=( VImage &a, const std::vector<double> b )
{ {
return( a = a ^ b ); return( a = a ^ b );
} }
VImage VImage
operator<<( VImage a, VImage b ) operator<<( const VImage a, const VImage b )
{ {
return( a.boolean( b, VIPS_OPERATION_BOOLEAN_LSHIFT ) ); return( a.boolean( b, VIPS_OPERATION_BOOLEAN_LSHIFT ) );
} }
VImage VImage
operator<<( VImage a, double b ) operator<<( const VImage a, const double b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_LSHIFT, return( a.boolean_const( VIPS_OPERATION_BOOLEAN_LSHIFT,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator<<( VImage a, std::vector<double> b ) operator<<( const VImage a, const std::vector<double> b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_LSHIFT, return( a.boolean_const( VIPS_OPERATION_BOOLEAN_LSHIFT,
b ) ); b ) );
@@ -1370,26 +1370,26 @@ operator<<=( VImage &a, const double b )
} }
VImage & VImage &
operator<<=( VImage &a, std::vector<double> b ) operator<<=( VImage &a, const std::vector<double> b )
{ {
return( a = a << b ); return( a = a << b );
} }
VImage VImage
operator>>( VImage a, VImage b ) operator>>( const VImage a, const VImage b )
{ {
return( a.boolean( b, VIPS_OPERATION_BOOLEAN_RSHIFT ) ); return( a.boolean( b, VIPS_OPERATION_BOOLEAN_RSHIFT ) );
} }
VImage VImage
operator>>( VImage a, double b ) operator>>( const VImage a, const double b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_RSHIFT, return( a.boolean_const( VIPS_OPERATION_BOOLEAN_RSHIFT,
to_vector( b ) ) ); to_vector( b ) ) );
} }
VImage VImage
operator>>( VImage a, std::vector<double> b ) operator>>( const VImage a, const std::vector<double> b )
{ {
return( a.boolean_const( VIPS_OPERATION_BOOLEAN_RSHIFT, return( a.boolean_const( VIPS_OPERATION_BOOLEAN_RSHIFT,
b ) ); b ) );
@@ -1408,7 +1408,7 @@ operator>>=( VImage &a, const double b )
} }
VImage & VImage &
operator>>=( VImage &a, std::vector<double> b ) operator>>=( VImage &a, const std::vector<double> b )
{ {
return( a = a << b ); return( a = a << b );
} }

File diff suppressed because it is too large Load Diff

View File

@@ -62,6 +62,12 @@ class MetadataWorker : public Nan::AsyncWorker {
if (sharp::HasDensity(image)) { if (sharp::HasDensity(image)) {
baton->density = sharp::GetDensity(image); baton->density = sharp::GetDensity(image);
} }
if (image.get_typeof("jpeg-chroma-subsample") == VIPS_TYPE_REF_STRING) {
baton->chromaSubsampling = image.get_string("jpeg-chroma-subsample");
}
if (image.get_typeof("interlaced") == G_TYPE_INT) {
baton->isProgressive = image.get_int("interlaced") == 1;
}
baton->hasProfile = sharp::HasProfile(image); baton->hasProfile = sharp::HasProfile(image);
// Derived attributes // Derived attributes
baton->hasAlpha = sharp::HasAlpha(image); baton->hasAlpha = sharp::HasAlpha(image);
@@ -117,6 +123,9 @@ class MetadataWorker : public Nan::AsyncWorker {
// Metadata Object // Metadata Object
v8::Local<v8::Object> info = New<v8::Object>(); v8::Local<v8::Object> info = New<v8::Object>();
Set(info, New("format").ToLocalChecked(), New<v8::String>(baton->format).ToLocalChecked()); Set(info, New("format").ToLocalChecked(), New<v8::String>(baton->format).ToLocalChecked());
if (baton->input->bufferLength > 0) {
Set(info, New("size").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(baton->input->bufferLength)));
}
Set(info, New("width").ToLocalChecked(), New<v8::Uint32>(baton->width)); Set(info, New("width").ToLocalChecked(), New<v8::Uint32>(baton->width));
Set(info, New("height").ToLocalChecked(), New<v8::Uint32>(baton->height)); Set(info, New("height").ToLocalChecked(), New<v8::Uint32>(baton->height));
Set(info, New("space").ToLocalChecked(), New<v8::String>(baton->space).ToLocalChecked()); Set(info, New("space").ToLocalChecked(), New<v8::String>(baton->space).ToLocalChecked());
@@ -125,6 +134,12 @@ class MetadataWorker : public Nan::AsyncWorker {
if (baton->density > 0) { if (baton->density > 0) {
Set(info, New("density").ToLocalChecked(), New<v8::Uint32>(baton->density)); Set(info, New("density").ToLocalChecked(), New<v8::Uint32>(baton->density));
} }
if (!baton->chromaSubsampling.empty()) {
Set(info,
New("chromaSubsampling").ToLocalChecked(),
New<v8::String>(baton->chromaSubsampling).ToLocalChecked());
}
Set(info, New("isProgressive").ToLocalChecked(), New<v8::Boolean>(baton->isProgressive));
Set(info, New("hasProfile").ToLocalChecked(), New<v8::Boolean>(baton->hasProfile)); Set(info, New("hasProfile").ToLocalChecked(), New<v8::Boolean>(baton->hasProfile));
Set(info, New("hasAlpha").ToLocalChecked(), New<v8::Boolean>(baton->hasAlpha)); Set(info, New("hasAlpha").ToLocalChecked(), New<v8::Boolean>(baton->hasAlpha));
if (baton->orientation > 0) { if (baton->orientation > 0) {

View File

@@ -31,6 +31,8 @@ struct MetadataBaton {
int channels; int channels;
std::string depth; std::string depth;
int density; int density;
std::string chromaSubsampling;
bool isProgressive;
bool hasProfile; bool hasProfile;
bool hasAlpha; bool hasAlpha;
int orientation; int orientation;
@@ -50,6 +52,7 @@ struct MetadataBaton {
height(0), height(0),
channels(0), channels(0),
density(0), density(0),
isProgressive(false),
hasProfile(false), hasProfile(false),
hasAlpha(false), hasAlpha(false),
orientation(0), orientation(0),

View File

@@ -28,6 +28,16 @@ using vips::VError;
namespace sharp { namespace sharp {
/*
Removes alpha channel, if any.
*/
VImage RemoveAlpha(VImage image) {
if (HasAlpha(image)) {
image = image.extract_band(0, VImage::option()->set("n", image.bands() - 1));
}
return image;
}
/* /*
Composite overlayImage over image at given position Composite overlayImage over image at given position
Assumes alpha channels are already premultiplied and will be unpremultiplied after Assumes alpha channels are already premultiplied and will be unpremultiplied after
@@ -68,7 +78,7 @@ namespace sharp {
// //
// References: // References:
// - http://en.wikipedia.org/wiki/Alpha_compositing#Alpha_blending // - http://en.wikipedia.org/wiki/Alpha_compositing#Alpha_blending
// - https://github.com/jcupitt/ruby-vips/issues/28#issuecomment-9014826 // - https://github.com/libvips/ruby-vips/issues/28#issuecomment-9014826
// //
// out_a = src_a + dst_a * (1 - src_a) // out_a = src_a + dst_a * (1 - src_a)
// ^^^^^^^^^^^ // ^^^^^^^^^^^
@@ -223,10 +233,8 @@ namespace sharp {
VImage Gamma(VImage image, double const exponent) { VImage Gamma(VImage image, double const exponent) {
if (HasAlpha(image)) { if (HasAlpha(image)) {
// Separate alpha channel // Separate alpha channel
VImage imageWithoutAlpha = image.extract_band(0,
VImage::option()->set("n", image.bands() - 1));
VImage alpha = image[image.bands() - 1]; VImage alpha = image[image.bands() - 1];
return imageWithoutAlpha.gamma(VImage::option()->set("exponent", exponent)).bandjoin(alpha); return RemoveAlpha(image).gamma(VImage::option()->set("exponent", exponent)).bandjoin(alpha);
} else { } else {
return image.gamma(VImage::option()->set("exponent", exponent)); return image.gamma(VImage::option()->set("exponent", exponent));
} }
@@ -316,55 +324,22 @@ namespace sharp {
return image.boolean(imageR, boolean); return image.boolean(imageR, boolean);
} }
VImage Trim(VImage image, int const tolerance) { /*
using sharp::MaximumImageAlpha; Trim an image
// An equivalent of ImageMagick's -trim in C++ ... automatically remove */
// "boring" image edges. VImage Trim(VImage image, double const threshold) {
// Top-left pixel provides the background colour
// We use .project to sum the rows and columns of a 0/255 mask image, the first VImage background = image.extract_area(0, 0, 1, 1);
// non-zero row or column is the object edge. We make the mask image with an if (HasAlpha(background)) {
// amount-different-from-background image plus a threshold. background = background.flatten();
}
// find the value of the pixel at (0, 0) ... we will search for all pixels int top, width, height;
// significantly different from this int const left = image.find_trim(&top, &width, &height, VImage::option()
std::vector<double> background = image(0, 0); ->set("background", background(0, 0))
->set("threshold", threshold));
double const max = MaximumImageAlpha(image.interpretation()); if (width == 0 || height == 0) {
// we need to smooth the image, subtract the background from every pixel, take
// the absolute value of the difference, then threshold
VImage mask = (image.median(3) - background).abs() > (max * tolerance / 100);
// sum mask rows and columns, then search for the first non-zero sum in each
// direction
VImage rows;
VImage columns = mask.project(&rows);
VImage profileLeftV;
VImage profileLeftH = columns.profile(&profileLeftV);
VImage profileRightV;
VImage profileRightH = columns.fliphor().profile(&profileRightV);
VImage profileTopV;
VImage profileTopH = rows.profile(&profileTopV);
VImage profileBottomV;
VImage profileBottomH = rows.flipver().profile(&profileBottomV);
int left = static_cast<int>(floor(profileLeftV.min()));
int right = columns.width() - static_cast<int>(floor(profileRightV.min()));
int top = static_cast<int>(floor(profileTopH.min()));
int bottom = rows.height() - static_cast<int>(floor(profileBottomH.min()));
int width = right - left;
int height = bottom - top;
if (width <= 0 || height <= 0) {
throw VError("Unexpected error while trimming. Try to lower the tolerance"); throw VError("Unexpected error while trimming. Try to lower the tolerance");
} }
// and now crop the original image
return image.extract_area(left, top, width, height); return image.extract_area(left, top, width, height);
} }
@@ -374,10 +349,8 @@ namespace sharp {
VImage Linear(VImage image, double const a, double const b) { VImage Linear(VImage image, double const a, double const b) {
if (HasAlpha(image)) { if (HasAlpha(image)) {
// Separate alpha channel // Separate alpha channel
VImage imageWithoutAlpha = image.extract_band(0,
VImage::option()->set("n", image.bands() - 1));
VImage alpha = image[image.bands() - 1]; VImage alpha = image[image.bands() - 1];
return imageWithoutAlpha.linear(a, b).bandjoin(alpha); return RemoveAlpha(image).linear(a, b).bandjoin(alpha);
} else { } else {
return image.linear(a, b); return image.linear(a, b);
} }

View File

@@ -25,6 +25,11 @@ using vips::VImage;
namespace sharp { namespace sharp {
/*
Removes alpha channel, if any.
*/
VImage RemoveAlpha(VImage image);
/* /*
Alpha composite src over dst with given gravity. Alpha composite src over dst with given gravity.
Assumes alpha channels are already premultiplied and will be unpremultiplied after. Assumes alpha channels are already premultiplied and will be unpremultiplied after.
@@ -95,7 +100,7 @@ namespace sharp {
/* /*
Trim an image Trim an image
*/ */
VImage Trim(VImage image, int const tolerance); VImage Trim(VImage image, double const threshold);
/* /*
* Linear adjustment (a * in + b) * Linear adjustment (a * in + b)

View File

@@ -100,8 +100,10 @@ class PipelineWorker : public Nan::AsyncWorker {
} }
// Trim // Trim
if (baton->trimTolerance != 0) { if (baton->trimThreshold > 0.0) {
image = sharp::Trim(image, baton->trimTolerance); image = sharp::Trim(image, baton->trimThreshold);
baton->trimOffsetLeft = image.xoffset();
baton->trimOffsetTop = image.yoffset();
} }
// Pre extraction // Pre extraction
@@ -233,7 +235,7 @@ class PipelineWorker : public Nan::AsyncWorker {
if ( if (
xshrink == yshrink && xshrink >= 2 * shrink_on_load_factor && xshrink == yshrink && xshrink >= 2 * shrink_on_load_factor &&
(inputImageType == ImageType::JPEG || inputImageType == ImageType::WEBP) && (inputImageType == ImageType::JPEG || inputImageType == ImageType::WEBP) &&
baton->gamma == 0 && baton->topOffsetPre == -1 && baton->trimTolerance == 0 baton->gamma == 0 && baton->topOffsetPre == -1 && baton->trimThreshold == 0.0
) { ) {
if (xshrink >= 8 * shrink_on_load_factor) { if (xshrink >= 8 * shrink_on_load_factor) {
xfactor = xfactor / 8; xfactor = xfactor / 8;
@@ -318,9 +320,9 @@ class PipelineWorker : public Nan::AsyncWorker {
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0; double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
// Background colour // Background colour
std::vector<double> background { std::vector<double> background {
baton->background[0] * multiplier, baton->flattenBackground[0] * multiplier,
baton->background[1] * multiplier, baton->flattenBackground[1] * multiplier,
baton->background[2] * multiplier baton->flattenBackground[2] * multiplier
}; };
image = image.flatten(VImage::option() image = image.flatten(VImage::option()
->set("background", background)); ->set("background", background));
@@ -421,35 +423,8 @@ class PipelineWorker : public Nan::AsyncWorker {
// Crop/embed // Crop/embed
if (image.width() != baton->width || image.height() != baton->height) { if (image.width() != baton->width || image.height() != baton->height) {
if (baton->canvas == Canvas::EMBED) { if (baton->canvas == Canvas::EMBED) {
// Scale up 8-bit values to match 16-bit input image
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
// Create background colour
std::vector<double> background; std::vector<double> background;
if (image.bands() > 2) { std::tie(image, background) = sharp::ApplyAlpha(image, baton->resizeBackground);
background = {
multiplier * baton->background[0],
multiplier * baton->background[1],
multiplier * baton->background[2]
};
} else {
// Convert sRGB to greyscale
background = { multiplier * (
0.2126 * baton->background[0] +
0.7152 * baton->background[1] +
0.0722 * baton->background[2])
};
}
// Add alpha channel to background colour
if (baton->background[3] < 255.0 || HasAlpha(image)) {
background.push_back(baton->background[3] * multiplier);
}
// Ensure background colour uses correct colourspace
background = sharp::GetRgbaAsColourspace(background, image.interpretation());
// Add non-transparent alpha channel, if required
if (baton->background[3] < 255.0 && !HasAlpha(image)) {
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
// Embed // Embed
@@ -463,7 +438,7 @@ class PipelineWorker : public Nan::AsyncWorker {
int width = std::max(image.width(), baton->width); int width = std::max(image.width(), baton->width);
int height = std::max(image.height(), baton->height); int height = std::max(image.height(), baton->height);
std::tie(left, top) = sharp::CalculateEmbedPosition( std::tie(left, top) = sharp::CalculateEmbedPosition(
image.width(), image.height(), baton->width, baton->height, baton->embed); image.width(), image.height(), baton->width, baton->height, baton->position);
image = image.embed(left, top, width, height, VImage::option() image = image.embed(left, top, width, height, VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND) ->set("extend", VIPS_EXTEND_BACKGROUND)
@@ -474,12 +449,12 @@ class PipelineWorker : public Nan::AsyncWorker {
(image.width() > baton->width || image.height() > baton->height) (image.width() > baton->width || image.height() > baton->height)
) { ) {
// Crop/max/min // Crop/max/min
if (baton->crop < 9) { if (baton->position < 9) {
// Gravity-based crop // Gravity-based crop
int left; int left;
int top; int top;
std::tie(left, top) = sharp::CalculateCrop( std::tie(left, top) = sharp::CalculateCrop(
image.width(), image.height(), baton->width, baton->height, baton->crop); image.width(), image.height(), baton->width, baton->height, baton->position);
int width = std::min(image.width(), baton->width); int width = std::min(image.width(), baton->width);
int height = std::min(image.height(), baton->height); int height = std::min(image.height(), baton->height);
image = image.extract_area(left, top, width, height); image = image.extract_area(left, top, width, height);
@@ -495,7 +470,7 @@ class PipelineWorker : public Nan::AsyncWorker {
->set("access", baton->accessMethod) ->set("access", baton->accessMethod)
->set("threaded", TRUE)); ->set("threaded", TRUE));
image = image.smartcrop(baton->width, baton->height, VImage::option() image = image.smartcrop(baton->width, baton->height, VImage::option()
->set("interesting", baton->crop == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION)); ->set("interesting", baton->position == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION));
baton->hasCropOffset = true; baton->hasCropOffset = true;
baton->cropOffsetLeft = static_cast<int>(image.xoffset()); baton->cropOffsetLeft = static_cast<int>(image.xoffset());
baton->cropOffsetTop = static_cast<int>(image.yoffset()); baton->cropOffsetTop = static_cast<int>(image.yoffset());
@@ -503,6 +478,13 @@ class PipelineWorker : public Nan::AsyncWorker {
} }
} }
// Rotate by degree
if (baton->rotationAngle != 0.0) {
std::vector<double> background;
std::tie(image, background) = sharp::ApplyAlpha(image, baton->rotationBackground);
image = image.rotate(baton->rotationAngle, VImage::option()->set("background", background));
}
// Post extraction // Post extraction
if (baton->topOffsetPost != -1) { if (baton->topOffsetPost != -1) {
image = image.extract_area( image = image.extract_area(
@@ -511,35 +493,9 @@ class PipelineWorker : public Nan::AsyncWorker {
// Extend edges // Extend edges
if (baton->extendTop > 0 || baton->extendBottom > 0 || baton->extendLeft > 0 || baton->extendRight > 0) { if (baton->extendTop > 0 || baton->extendBottom > 0 || baton->extendLeft > 0 || baton->extendRight > 0) {
// Scale up 8-bit values to match 16-bit input image
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
// Create background colour
std::vector<double> background; std::vector<double> background;
if (image.bands() > 2) { std::tie(image, background) = sharp::ApplyAlpha(image, baton->extendBackground);
background = {
multiplier * baton->background[0],
multiplier * baton->background[1],
multiplier * baton->background[2]
};
} else {
// Convert sRGB to greyscale
background = { multiplier * (
0.2126 * baton->background[0] +
0.7152 * baton->background[1] +
0.0722 * baton->background[2])
};
}
// Add alpha channel to background colour
if (baton->background[3] < 255.0 || HasAlpha(image)) {
background.push_back(baton->background[3] * multiplier);
}
// Ensure background colour uses correct colourspace
background = sharp::GetRgbaAsColourspace(background, image.interpretation());
// Add non-transparent alpha channel, if required
if (baton->background[3] < 255.0 && !HasAlpha(image)) {
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
// Embed // Embed
baton->width = image.width() + baton->extendLeft + baton->extendRight; baton->width = image.width() + baton->extendLeft + baton->extendRight;
baton->height = image.height() + baton->extendTop + baton->extendBottom; baton->height = image.height() + baton->extendTop + baton->extendBottom;
@@ -694,10 +650,19 @@ class PipelineWorker : public Nan::AsyncWorker {
(baton->err).append("Cannot extract channel from image. Too few channels in image."); (baton->err).append("Cannot extract channel from image. Too few channels in image.");
return Error(); return Error();
} }
VipsInterpretation const interpretation = sharp::Is16Bit(image.interpretation())
? VIPS_INTERPRETATION_GREY16
: VIPS_INTERPRETATION_B_W;
image = image image = image
.extract_band(baton->extractChannel) .extract_band(baton->extractChannel)
.copy(VImage::option()->set("interpretation", VIPS_INTERPRETATION_B_W)); .copy(VImage::option()->set("interpretation", interpretation));
} }
// Remove alpha channel, if any
if (baton->removeAlpha) {
image = sharp::RemoveAlpha(image);
}
// Convert image to sRGB, if not already // Convert image to sRGB, if not already
if (sharp::Is16Bit(image.interpretation())) { if (sharp::Is16Bit(image.interpretation())) {
image = image.cast(VIPS_FORMAT_USHORT); image = image.cast(VIPS_FORMAT_USHORT);
@@ -733,6 +698,7 @@ class PipelineWorker : public Nan::AsyncWorker {
->set("interlace", baton->jpegProgressive) ->set("interlace", baton->jpegProgressive)
->set("no_subsample", baton->jpegChromaSubsampling == "4:4:4") ->set("no_subsample", baton->jpegChromaSubsampling == "4:4:4")
->set("trellis_quant", baton->jpegTrellisQuantisation) ->set("trellis_quant", baton->jpegTrellisQuantisation)
->set("quant_table", baton->jpegQuantisationTable)
->set("overshoot_deringing", baton->jpegOvershootDeringing) ->set("overshoot_deringing", baton->jpegOvershootDeringing)
->set("optimize_scans", baton->jpegOptimiseScans) ->set("optimize_scans", baton->jpegOptimiseScans)
->set("optimize_coding", baton->jpegOptimiseCoding))); ->set("optimize_coding", baton->jpegOptimiseCoding)));
@@ -848,6 +814,7 @@ class PipelineWorker : public Nan::AsyncWorker {
->set("interlace", baton->jpegProgressive) ->set("interlace", baton->jpegProgressive)
->set("no_subsample", baton->jpegChromaSubsampling == "4:4:4") ->set("no_subsample", baton->jpegChromaSubsampling == "4:4:4")
->set("trellis_quant", baton->jpegTrellisQuantisation) ->set("trellis_quant", baton->jpegTrellisQuantisation)
->set("quant_table", baton->jpegQuantisationTable)
->set("overshoot_deringing", baton->jpegOvershootDeringing) ->set("overshoot_deringing", baton->jpegOvershootDeringing)
->set("optimize_scans", baton->jpegOptimiseScans) ->set("optimize_scans", baton->jpegOptimiseScans)
->set("optimize_coding", baton->jpegOptimiseCoding)); ->set("optimize_coding", baton->jpegOptimiseCoding));
@@ -883,10 +850,6 @@ class PipelineWorker : public Nan::AsyncWorker {
if (baton->tiffCompression == VIPS_FOREIGN_TIFF_COMPRESSION_JPEG) { if (baton->tiffCompression == VIPS_FOREIGN_TIFF_COMPRESSION_JPEG) {
sharp::AssertImageTypeDimensions(image, ImageType::JPEG); sharp::AssertImageTypeDimensions(image, ImageType::JPEG);
} }
// Cast pixel values to float, if required
if (baton->tiffPredictor == VIPS_FOREIGN_TIFF_PREDICTOR_FLOAT) {
image = image.cast(VIPS_FORMAT_FLOAT);
}
image.tiffsave(const_cast<char*>(baton->fileOut.data()), VImage::option() image.tiffsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("strip", !baton->withMetadata) ->set("strip", !baton->withMetadata)
->set("Q", baton->tiffQuality) ->set("Q", baton->tiffQuality)
@@ -927,6 +890,7 @@ class PipelineWorker : public Nan::AsyncWorker {
{"interlace", baton->jpegProgressive ? "TRUE" : "FALSE"}, {"interlace", baton->jpegProgressive ? "TRUE" : "FALSE"},
{"no_subsample", baton->jpegChromaSubsampling == "4:4:4" ? "TRUE": "FALSE"}, {"no_subsample", baton->jpegChromaSubsampling == "4:4:4" ? "TRUE": "FALSE"},
{"trellis_quant", baton->jpegTrellisQuantisation ? "TRUE" : "FALSE"}, {"trellis_quant", baton->jpegTrellisQuantisation ? "TRUE" : "FALSE"},
{"quant_table", std::to_string(baton->jpegQuantisationTable)},
{"overshoot_deringing", baton->jpegOvershootDeringing ? "TRUE": "FALSE"}, {"overshoot_deringing", baton->jpegOvershootDeringing ? "TRUE": "FALSE"},
{"optimize_scans", baton->jpegOptimiseScans ? "TRUE": "FALSE"}, {"optimize_scans", baton->jpegOptimiseScans ? "TRUE": "FALSE"},
{"optimize_coding", baton->jpegOptimiseCoding ? "TRUE": "FALSE"} {"optimize_coding", baton->jpegOptimiseCoding ? "TRUE": "FALSE"}
@@ -934,14 +898,22 @@ class PipelineWorker : public Nan::AsyncWorker {
suffix = AssembleSuffixString(extname, options); suffix = AssembleSuffixString(extname, options);
} }
// Write DZ to file // Write DZ to file
image.dzsave(const_cast<char*>(baton->fileOut.data()), VImage::option() vips::VOption *options = VImage::option()
->set("strip", !baton->withMetadata) ->set("strip", !baton->withMetadata)
->set("tile_size", baton->tileSize) ->set("tile_size", baton->tileSize)
->set("overlap", baton->tileOverlap) ->set("overlap", baton->tileOverlap)
->set("container", baton->tileContainer) ->set("container", baton->tileContainer)
->set("layout", baton->tileLayout) ->set("layout", baton->tileLayout)
->set("suffix", const_cast<char*>(suffix.data())) ->set("suffix", const_cast<char*>(suffix.data()))
->set("angle", CalculateAngleRotation(baton->tileAngle))); ->set("angle", CalculateAngleRotation(baton->tileAngle));
// libvips chooses a default depth based on layout. Instead of replicating that logic here by
// not passing anything - libvips will handle choice
if (baton->tileDepth < VIPS_FOREIGN_DZ_DEPTH_LAST) {
options->set("depth", baton->tileDepth);
}
image.dzsave(const_cast<char*>(baton->fileOut.data()), options);
baton->formatOut = "dz"; baton->formatOut = "dz";
} else if (baton->formatOut == "v" || (mightMatchInput && isV) || } else if (baton->formatOut == "v" || (mightMatchInput && isV) ||
(willMatchInput && inputImageType == ImageType::VIPS)) { (willMatchInput && inputImageType == ImageType::VIPS)) {
@@ -996,6 +968,12 @@ class PipelineWorker : public Nan::AsyncWorker {
Set(info, New("cropOffsetTop").ToLocalChecked(), Set(info, New("cropOffsetTop").ToLocalChecked(),
New<v8::Int32>(static_cast<int32_t>(baton->cropOffsetTop))); New<v8::Int32>(static_cast<int32_t>(baton->cropOffsetTop)));
} }
if (baton->trimThreshold > 0.0) {
Set(info, New("trimOffsetLeft").ToLocalChecked(),
New<v8::Int32>(static_cast<int32_t>(baton->trimOffsetLeft)));
Set(info, New("trimOffsetTop").ToLocalChecked(),
New<v8::Int32>(static_cast<int32_t>(baton->trimOffsetTop)));
}
if (baton->bufferOutLength > 0) { if (baton->bufferOutLength > 0) {
// Pass ownership of output data to Buffer instance // Pass ownership of output data to Buffer instance
@@ -1127,6 +1105,7 @@ NAN_METHOD(pipeline) {
using sharp::AttrTo; using sharp::AttrTo;
using sharp::AttrAs; using sharp::AttrAs;
using sharp::AttrAsStr; using sharp::AttrAsStr;
using sharp::AttrAsRgba;
using sharp::CreateInputDescriptor; using sharp::CreateInputDescriptor;
// Input Buffers must not undergo GC compaction during processing // Input Buffers must not undergo GC compaction during processing
@@ -1170,11 +1149,6 @@ NAN_METHOD(pipeline) {
} else if (canvas == "ignore_aspect") { } else if (canvas == "ignore_aspect") {
baton->canvas = Canvas::IGNORE_ASPECT; baton->canvas = Canvas::IGNORE_ASPECT;
} }
// Background colour
v8::Local<v8::Object> background = AttrAs<v8::Object>(options, "background");
for (unsigned int i = 0; i < 4; i++) {
baton->background[i] = AttrTo<double>(background, i);
}
// Tint chroma // Tint chroma
baton->tintA = AttrTo<double>(options, "tintA"); baton->tintA = AttrTo<double>(options, "tintA");
baton->tintB = AttrTo<double>(options, "tintB"); baton->tintB = AttrTo<double>(options, "tintB");
@@ -1189,8 +1163,8 @@ NAN_METHOD(pipeline) {
} }
// Resize options // Resize options
baton->withoutEnlargement = AttrTo<bool>(options, "withoutEnlargement"); baton->withoutEnlargement = AttrTo<bool>(options, "withoutEnlargement");
baton->crop = AttrTo<int32_t>(options, "crop"); baton->position = AttrTo<int32_t>(options, "position");
baton->embed = AttrTo<int32_t>(options, "embed"); baton->resizeBackground = AttrAsRgba(options, "resizeBackground");
baton->kernel = AttrAsStr(options, "kernel"); baton->kernel = AttrAsStr(options, "kernel");
baton->fastShrinkOnLoad = AttrTo<bool>(options, "fastShrinkOnLoad"); baton->fastShrinkOnLoad = AttrTo<bool>(options, "fastShrinkOnLoad");
// Join Channel Options // Join Channel Options
@@ -1208,6 +1182,7 @@ NAN_METHOD(pipeline) {
} }
// Operators // Operators
baton->flatten = AttrTo<bool>(options, "flatten"); baton->flatten = AttrTo<bool>(options, "flatten");
baton->flattenBackground = AttrAsRgba(options, "flattenBackground");
baton->negate = AttrTo<bool>(options, "negate"); baton->negate = AttrTo<bool>(options, "negate");
baton->blurSigma = AttrTo<double>(options, "blurSigma"); baton->blurSigma = AttrTo<double>(options, "blurSigma");
baton->medianSize = AttrTo<uint32_t>(options, "medianSize"); baton->medianSize = AttrTo<uint32_t>(options, "medianSize");
@@ -1216,7 +1191,7 @@ NAN_METHOD(pipeline) {
baton->sharpenJagged = AttrTo<double>(options, "sharpenJagged"); baton->sharpenJagged = AttrTo<double>(options, "sharpenJagged");
baton->threshold = AttrTo<int32_t>(options, "threshold"); baton->threshold = AttrTo<int32_t>(options, "threshold");
baton->thresholdGrayscale = AttrTo<bool>(options, "thresholdGrayscale"); baton->thresholdGrayscale = AttrTo<bool>(options, "thresholdGrayscale");
baton->trimTolerance = AttrTo<int32_t>(options, "trimTolerance"); baton->trimThreshold = AttrTo<double>(options, "trimThreshold");
baton->gamma = AttrTo<double>(options, "gamma"); baton->gamma = AttrTo<double>(options, "gamma");
baton->linearA = AttrTo<double>(options, "linearA"); baton->linearA = AttrTo<double>(options, "linearA");
baton->linearB = AttrTo<double>(options, "linearB"); baton->linearB = AttrTo<double>(options, "linearB");
@@ -1224,6 +1199,8 @@ NAN_METHOD(pipeline) {
baton->normalise = AttrTo<bool>(options, "normalise"); baton->normalise = AttrTo<bool>(options, "normalise");
baton->useExifOrientation = AttrTo<bool>(options, "useExifOrientation"); baton->useExifOrientation = AttrTo<bool>(options, "useExifOrientation");
baton->angle = AttrTo<int32_t>(options, "angle"); baton->angle = AttrTo<int32_t>(options, "angle");
baton->rotationAngle = AttrTo<double>(options, "rotationAngle");
baton->rotationBackground = AttrAsRgba(options, "rotationBackground");
baton->rotateBeforePreExtract = AttrTo<bool>(options, "rotateBeforePreExtract"); baton->rotateBeforePreExtract = AttrTo<bool>(options, "rotateBeforePreExtract");
baton->flip = AttrTo<bool>(options, "flip"); baton->flip = AttrTo<bool>(options, "flip");
baton->flop = AttrTo<bool>(options, "flop"); baton->flop = AttrTo<bool>(options, "flop");
@@ -1231,7 +1208,10 @@ NAN_METHOD(pipeline) {
baton->extendBottom = AttrTo<int32_t>(options, "extendBottom"); baton->extendBottom = AttrTo<int32_t>(options, "extendBottom");
baton->extendLeft = AttrTo<int32_t>(options, "extendLeft"); baton->extendLeft = AttrTo<int32_t>(options, "extendLeft");
baton->extendRight = AttrTo<int32_t>(options, "extendRight"); baton->extendRight = AttrTo<int32_t>(options, "extendRight");
baton->extendBackground = AttrAsRgba(options, "extendBackground");
baton->extractChannel = AttrTo<int32_t>(options, "extractChannel"); baton->extractChannel = AttrTo<int32_t>(options, "extractChannel");
baton->removeAlpha = AttrTo<bool>(options, "removeAlpha");
if (HasAttr(options, "boolean")) { if (HasAttr(options, "boolean")) {
baton->boolean = CreateInputDescriptor(AttrAs<v8::Object>(options, "boolean"), buffersToPersist); baton->boolean = CreateInputDescriptor(AttrAs<v8::Object>(options, "boolean"), buffersToPersist);
baton->booleanOp = sharp::GetBooleanOperation(AttrAsStr(options, "booleanOp")); baton->booleanOp = sharp::GetBooleanOperation(AttrAsStr(options, "booleanOp"));
@@ -1266,6 +1246,7 @@ NAN_METHOD(pipeline) {
baton->jpegProgressive = AttrTo<bool>(options, "jpegProgressive"); baton->jpegProgressive = AttrTo<bool>(options, "jpegProgressive");
baton->jpegChromaSubsampling = AttrAsStr(options, "jpegChromaSubsampling"); baton->jpegChromaSubsampling = AttrAsStr(options, "jpegChromaSubsampling");
baton->jpegTrellisQuantisation = AttrTo<bool>(options, "jpegTrellisQuantisation"); baton->jpegTrellisQuantisation = AttrTo<bool>(options, "jpegTrellisQuantisation");
baton->jpegQuantisationTable = AttrTo<uint32_t>(options, "jpegQuantisationTable");
baton->jpegOvershootDeringing = AttrTo<bool>(options, "jpegOvershootDeringing"); baton->jpegOvershootDeringing = AttrTo<bool>(options, "jpegOvershootDeringing");
baton->jpegOptimiseScans = AttrTo<bool>(options, "jpegOptimiseScans"); baton->jpegOptimiseScans = AttrTo<bool>(options, "jpegOptimiseScans");
baton->jpegOptimiseCoding = AttrTo<bool>(options, "jpegOptimiseCoding"); baton->jpegOptimiseCoding = AttrTo<bool>(options, "jpegOptimiseCoding");
@@ -1307,10 +1288,21 @@ NAN_METHOD(pipeline) {
baton->tileLayout = VIPS_FOREIGN_DZ_LAYOUT_DZ; baton->tileLayout = VIPS_FOREIGN_DZ_LAYOUT_DZ;
} }
baton->tileFormat = AttrAsStr(options, "tileFormat"); baton->tileFormat = AttrAsStr(options, "tileFormat");
std::string tileDepth = AttrAsStr(options, "tileDepth");
if (tileDepth == "onetile") {
baton->tileDepth = VIPS_FOREIGN_DZ_DEPTH_ONETILE;
} else if (tileDepth == "one") {
baton->tileDepth = VIPS_FOREIGN_DZ_DEPTH_ONE;
} else if (tileDepth == "onepixel") {
baton->tileDepth = VIPS_FOREIGN_DZ_DEPTH_ONEPIXEL;
} else {
// signal that we do not want to pass any value to dzSave
baton->tileDepth = VIPS_FOREIGN_DZ_DEPTH_LAST;
}
// Force random access for certain operations // Force random access for certain operations
if (baton->accessMethod == VIPS_ACCESS_SEQUENTIAL && ( if (baton->accessMethod == VIPS_ACCESS_SEQUENTIAL && (
baton->trimTolerance != 0 || baton->normalise || baton->trimThreshold > 0.0 || baton->normalise ||
baton->crop == 16 || baton->crop == 17)) { baton->position == 16 || baton->position == 17)) {
baton->accessMethod = VIPS_ACCESS_RANDOM; baton->accessMethod = VIPS_ACCESS_RANDOM;
} }

View File

@@ -61,18 +61,18 @@ struct PipelineBaton {
int height; int height;
int channels; int channels;
Canvas canvas; Canvas canvas;
int crop; int position;
int embed; std::vector<double> resizeBackground;
bool hasCropOffset; bool hasCropOffset;
int cropOffsetLeft; int cropOffsetLeft;
int cropOffsetTop; int cropOffsetTop;
bool premultiplied; bool premultiplied;
std::string kernel; std::string kernel;
bool fastShrinkOnLoad; bool fastShrinkOnLoad;
double background[4];
double tintA; double tintA;
double tintB; double tintB;
bool flatten; bool flatten;
std::vector<double> flattenBackground;
bool negate; bool negate;
double blurSigma; double blurSigma;
int medianSize; int medianSize;
@@ -81,7 +81,9 @@ struct PipelineBaton {
double sharpenJagged; double sharpenJagged;
int threshold; int threshold;
bool thresholdGrayscale; bool thresholdGrayscale;
int trimTolerance; double trimThreshold;
int trimOffsetLeft;
int trimOffsetTop;
double linearA; double linearA;
double linearB; double linearB;
double gamma; double gamma;
@@ -89,6 +91,8 @@ struct PipelineBaton {
bool normalise; bool normalise;
bool useExifOrientation; bool useExifOrientation;
int angle; int angle;
double rotationAngle;
std::vector<double> rotationBackground;
bool rotateBeforePreExtract; bool rotateBeforePreExtract;
bool flip; bool flip;
bool flop; bool flop;
@@ -96,12 +100,14 @@ struct PipelineBaton {
int extendBottom; int extendBottom;
int extendLeft; int extendLeft;
int extendRight; int extendRight;
std::vector<double> extendBackground;
bool withoutEnlargement; bool withoutEnlargement;
VipsAccess accessMethod; VipsAccess accessMethod;
int jpegQuality; int jpegQuality;
bool jpegProgressive; bool jpegProgressive;
std::string jpegChromaSubsampling; std::string jpegChromaSubsampling;
bool jpegTrellisQuantisation; bool jpegTrellisQuantisation;
int jpegQuantisationTable;
bool jpegOvershootDeringing; bool jpegOvershootDeringing;
bool jpegOptimiseScans; bool jpegOptimiseScans;
bool jpegOptimiseCoding; bool jpegOptimiseCoding;
@@ -130,6 +136,7 @@ struct PipelineBaton {
VipsOperationBoolean booleanOp; VipsOperationBoolean booleanOp;
VipsOperationBoolean bandBoolOp; VipsOperationBoolean bandBoolOp;
int extractChannel; int extractChannel;
bool removeAlpha;
VipsInterpretation colourspace; VipsInterpretation colourspace;
int tileSize; int tileSize;
int tileOverlap; int tileOverlap;
@@ -137,6 +144,7 @@ struct PipelineBaton {
VipsForeignDzLayout tileLayout; VipsForeignDzLayout tileLayout;
std::string tileFormat; std::string tileFormat;
int tileAngle; int tileAngle;
VipsForeignDzDepth tileDepth;
PipelineBaton(): PipelineBaton():
input(nullptr), input(nullptr),
@@ -152,8 +160,8 @@ struct PipelineBaton {
topOffsetPost(-1), topOffsetPost(-1),
channels(0), channels(0),
canvas(Canvas::CROP), canvas(Canvas::CROP),
crop(0), position(0),
embed(0), resizeBackground{ 0.0, 0.0, 0.0, 255.0 },
hasCropOffset(false), hasCropOffset(false),
cropOffsetLeft(0), cropOffsetLeft(0),
cropOffsetTop(0), cropOffsetTop(0),
@@ -161,6 +169,7 @@ struct PipelineBaton {
tintA(128.0), tintA(128.0),
tintB(128.0), tintB(128.0),
flatten(false), flatten(false),
flattenBackground{ 0.0, 0.0, 0.0 },
negate(false), negate(false),
blurSigma(0.0), blurSigma(0.0),
medianSize(0), medianSize(0),
@@ -169,7 +178,9 @@ struct PipelineBaton {
sharpenJagged(2.0), sharpenJagged(2.0),
threshold(0), threshold(0),
thresholdGrayscale(true), thresholdGrayscale(true),
trimTolerance(0), trimThreshold(0.0),
trimOffsetLeft(0),
trimOffsetTop(0),
linearA(1.0), linearA(1.0),
linearB(0.0), linearB(0.0),
gamma(0.0), gamma(0.0),
@@ -177,17 +188,21 @@ struct PipelineBaton {
normalise(false), normalise(false),
useExifOrientation(false), useExifOrientation(false),
angle(0), angle(0),
rotationAngle(0.0),
rotationBackground{ 0.0, 0.0, 0.0, 255.0 },
flip(false), flip(false),
flop(false), flop(false),
extendTop(0), extendTop(0),
extendBottom(0), extendBottom(0),
extendLeft(0), extendLeft(0),
extendRight(0), extendRight(0),
extendBackground{ 0.0, 0.0, 0.0, 255.0 },
withoutEnlargement(false), withoutEnlargement(false),
jpegQuality(80), jpegQuality(80),
jpegProgressive(false), jpegProgressive(false),
jpegChromaSubsampling("4:2:0"), jpegChromaSubsampling("4:2:0"),
jpegTrellisQuantisation(false), jpegTrellisQuantisation(false),
jpegQuantisationTable(0),
jpegOvershootDeringing(false), jpegOvershootDeringing(false),
jpegOptimiseScans(false), jpegOptimiseScans(false),
jpegOptimiseCoding(true), jpegOptimiseCoding(true),
@@ -211,17 +226,14 @@ struct PipelineBaton {
booleanOp(VIPS_OPERATION_BOOLEAN_LAST), booleanOp(VIPS_OPERATION_BOOLEAN_LAST),
bandBoolOp(VIPS_OPERATION_BOOLEAN_LAST), bandBoolOp(VIPS_OPERATION_BOOLEAN_LAST),
extractChannel(-1), extractChannel(-1),
removeAlpha(false),
colourspace(VIPS_INTERPRETATION_LAST), colourspace(VIPS_INTERPRETATION_LAST),
tileSize(256), tileSize(256),
tileOverlap(0), tileOverlap(0),
tileContainer(VIPS_FOREIGN_DZ_CONTAINER_FS), tileContainer(VIPS_FOREIGN_DZ_CONTAINER_FS),
tileLayout(VIPS_FOREIGN_DZ_LAYOUT_DZ), tileLayout(VIPS_FOREIGN_DZ_LAYOUT_DZ),
tileAngle(0){ tileAngle(0),
background[0] = 0.0; tileDepth(VIPS_FOREIGN_DZ_DEPTH_LAST) {}
background[1] = 0.0;
background[2] = 0.0;
background[3] = 255.0;
}
}; };
#endif // SRC_PIPELINE_H_ #endif // SRC_PIPELINE_H_

View File

@@ -59,7 +59,6 @@ class StatsWorker : public Nan::AsyncWorker {
using sharp::MaximumImageAlpha; using sharp::MaximumImageAlpha;
vips::VImage image; vips::VImage image;
vips::VImage stats;
sharp::ImageType imageType = sharp::ImageType::UNKNOWN; sharp::ImageType imageType = sharp::ImageType::UNKNOWN;
try { try {
@@ -69,9 +68,8 @@ class StatsWorker : public Nan::AsyncWorker {
} }
if (imageType != sharp::ImageType::UNKNOWN) { if (imageType != sharp::ImageType::UNKNOWN) {
try { try {
stats = image.stats(); vips::VImage stats = image.stats();
int bands = image.bands(); int const bands = image.bands();
double const max = MaximumImageAlpha(image.interpretation());
for (int b = 1; b <= bands; b++) { for (int b = 1; b <= bands; b++) {
ChannelStats cStats(static_cast<int>(stats.getpoint(STAT_MIN_INDEX, b).front()), ChannelStats cStats(static_cast<int>(stats.getpoint(STAT_MIN_INDEX, b).front()),
static_cast<int>(stats.getpoint(STAT_MAX_INDEX, b).front()), static_cast<int>(stats.getpoint(STAT_MAX_INDEX, b).front()),
@@ -83,11 +81,15 @@ class StatsWorker : public Nan::AsyncWorker {
static_cast<int>(stats.getpoint(STAT_MAXY_INDEX, b).front())); static_cast<int>(stats.getpoint(STAT_MAXY_INDEX, b).front()));
baton->channelStats.push_back(cStats); baton->channelStats.push_back(cStats);
} }
// Image is not opaque when alpha layer is present and contains a non-mamixa value
// alpha layer is there and the last band i.e. alpha has its max value greater than 0) if (sharp::HasAlpha(image)) {
if (sharp::HasAlpha(image) && stats.getpoint(STAT_MIN_INDEX, bands).front() != max) { double const minAlpha = static_cast<double>(stats.getpoint(STAT_MIN_INDEX, bands).front());
baton->isOpaque = false; if (minAlpha != MaximumImageAlpha(image.interpretation())) {
baton->isOpaque = false;
}
} }
// Estimate entropy via histogram of greyscale value frequency
baton->entropy = std::abs(image.colourspace(VIPS_INTERPRETATION_B_W)[0].hist_find().hist_entropy());
} catch (vips::VError const &err) { } catch (vips::VError const &err) {
(baton->err).append(err.what()); (baton->err).append(err.what());
} }
@@ -130,6 +132,7 @@ class StatsWorker : public Nan::AsyncWorker {
Set(info, New("channels").ToLocalChecked(), channels); Set(info, New("channels").ToLocalChecked(), channels);
Set(info, New("isOpaque").ToLocalChecked(), New<v8::Boolean>(baton->isOpaque)); Set(info, New("isOpaque").ToLocalChecked(), New<v8::Boolean>(baton->isOpaque));
Set(info, New("entropy").ToLocalChecked(), New<v8::Number>(baton->entropy));
argv[1] = info; argv[1] = info;
} }

View File

@@ -51,12 +51,14 @@ struct StatsBaton {
// Output // Output
std::vector<ChannelStats> channelStats; std::vector<ChannelStats> channelStats;
bool isOpaque; bool isOpaque;
double entropy;
std::string err; std::string err;
StatsBaton(): StatsBaton():
input(nullptr), input(nullptr),
isOpaque(true) isOpaque(true),
entropy(0.0)
{} {}
}; };

View File

@@ -259,7 +259,7 @@ NAN_METHOD(_maxColourDistance) {
} }
// Calculate colour distance // Calculate colour distance
maxColourDistance = image1.dE00(image2).max(); maxColourDistance = image1.dE00(image2).max();
} catch (VError err) { } catch (VError const &err) {
return ThrowError(err.what()); return ThrowError(err.what());
} }

View File

@@ -8,19 +8,17 @@
"test": "node perf && node random && node parallel" "test": "node perf && node random && node parallel"
}, },
"devDependencies": { "devDependencies": {
"async": "^2.6.0", "async": "^2.6.1",
"benchmark": "^2.1.4", "benchmark": "^2.1.4",
"gm": "^1.23.1", "gm": "^1.23.1",
"imagemagick": "^0.1.3", "imagemagick": "^0.1.3",
"imagemagick-native": "^1.9.3", "imagemagick-native": "^1.9.3",
"images": "^3.0.1", "jimp": "^0.5.3",
"jimp": "^0.2.28", "mapnik": "^4.0.1",
"mapnik": "^3.6.2", "semver": "^5.5.1"
"pajk-lwip": "^0.2.0",
"semver": "^5.4.1"
}, },
"license": "Apache-2.0", "license": "Apache-2.0",
"engines": { "engines": {
"node": ">=4" "node": ">=6"
} }
} }

View File

@@ -12,24 +12,12 @@ const gm = require('gm');
const imagemagick = require('imagemagick'); const imagemagick = require('imagemagick');
const mapnik = require('mapnik'); const mapnik = require('mapnik');
const jimp = require('jimp'); const jimp = require('jimp');
let images;
try {
images = require('images');
} catch (err) {
console.log('Excluding node-images');
}
let imagemagickNative; let imagemagickNative;
try { try {
imagemagickNative = require('imagemagick-native'); imagemagickNative = require('imagemagick-native');
} catch (err) { } catch (err) {
console.log('Excluding imagemagick-native'); console.log('Excluding imagemagick-native');
} }
let lwip;
try {
lwip = require('pajk-lwip');
} catch (err) {
console.log('Excluding lwip');
}
const fixtures = require('../fixtures'); const fixtures = require('../fixtures');
@@ -38,8 +26,6 @@ const height = 588;
// Disable libvips cache to ensure tests are as fair as they can be // Disable libvips cache to ensure tests are as fair as they can be
sharp.cache(false); sharp.cache(false);
// Enable use of SIMD
sharp.simd(true);
async.series({ async.series({
'jpeg': function (callback) { 'jpeg': function (callback) {
@@ -54,7 +40,7 @@ async.series({
throw err; throw err;
} else { } else {
image image
.resize(width, height) .resize(width, height, jimp.RESIZE_BICUBIC)
.quality(80) .quality(80)
.getBuffer(jimp.MIME_JPEG, function (err) { .getBuffer(jimp.MIME_JPEG, function (err) {
if (err) { if (err) {
@@ -74,7 +60,7 @@ async.series({
throw err; throw err;
} else { } else {
image image
.resize(width, height) .resize(width, height, jimp.RESIZE_BICUBIC)
.quality(80) .quality(80)
.write(fixtures.outputJpg, function (err) { .write(fixtures.outputJpg, function (err) {
if (err) { if (err) {
@@ -87,51 +73,6 @@ async.series({
}); });
} }
}); });
// lwip
if (typeof lwip !== 'undefined') {
jpegSuite.add('lwip-file-file', {
defer: true,
fn: function (deferred) {
lwip.open(fixtures.inputJpg, function (err, image) {
if (err) {
throw err;
}
image.resize(width, height, 'lanczos', function (err, image) {
if (err) {
throw err;
}
image.writeFile(fixtures.outputJpg, {quality: 80}, function (err) {
if (err) {
throw err;
}
deferred.resolve();
});
});
});
}
}).add('lwip-buffer-buffer', {
defer: true,
fn: function (deferred) {
lwip.open(inputJpgBuffer, 'jpg', function (err, image) {
if (err) {
throw err;
}
image.resize(width, height, 'lanczos', function (err, image) {
if (err) {
throw err;
}
image.toBuffer('jpg', {quality: 80}, function (err, buffer) {
if (err) {
throw err;
}
assert.notStrictEqual(null, buffer);
deferred.resolve();
});
});
});
}
});
}
// mapnik // mapnik
jpegSuite.add('mapnik-file-file', { jpegSuite.add('mapnik-file-file', {
defer: true, defer: true,
@@ -272,14 +213,6 @@ async.series({
}); });
} }
}); });
// images
if (typeof images !== 'undefined') {
jpegSuite.add('images-file-file', function () {
images(fixtures.inputJpg)
.resize(width, height)
.save(fixtures.outputJpg, { quality: 80 });
});
}
// sharp // sharp
jpegSuite.add('sharp-buffer-file', { jpegSuite.add('sharp-buffer-file', {
defer: true, defer: true,
@@ -569,8 +502,10 @@ async.series({
defer: true, defer: true,
fn: function (deferred) { fn: function (deferred) {
sharp(inputJpgBuffer) sharp(inputJpgBuffer)
.resize(width, height) .resize(width, height, {
.crop(sharp.strategy.entropy) fit: 'cover',
position: sharp.strategy.entropy
})
.toBuffer(function (err, buffer) { .toBuffer(function (err, buffer) {
if (err) { if (err) {
throw err; throw err;
@@ -584,8 +519,10 @@ async.series({
defer: true, defer: true,
fn: function (deferred) { fn: function (deferred) {
sharp(inputJpgBuffer) sharp(inputJpgBuffer)
.resize(width, height) .resize(width, height, {
.crop(sharp.strategy.attention) fit: 'cover',
position: sharp.strategy.attention
})
.toBuffer(function (err, buffer) { .toBuffer(function (err, buffer) {
if (err) { if (err) {
throw err; throw err;
@@ -696,31 +633,6 @@ async.series({
}); });
} }
}); });
// lwip
if (typeof lwip !== 'undefined') {
pngSuite.add('lwip-buffer-buffer', {
defer: true,
fn: function (deferred) {
lwip.open(inputPngBuffer, 'png', function (err, image) {
if (err) {
throw err;
}
image.resize(width, height, 'lanczos', function (err, image) {
if (err) {
throw err;
}
image.toBuffer('png', function (err, buffer) {
if (err) {
throw err;
}
assert.notStrictEqual(null, buffer);
deferred.resolve();
});
});
});
}
});
}
// mapnik // mapnik
pngSuite.add('mapnik-file-file', { pngSuite.add('mapnik-file-file', {
defer: true, defer: true,
@@ -833,14 +745,6 @@ async.series({
}); });
} }
}); });
// images
if (typeof images !== 'undefined') {
pngSuite.add('images-file-file', function () {
images(fixtures.inputPng)
.resize(width, height)
.save(fixtures.outputPng);
});
}
// sharp // sharp
pngSuite.add('sharp-buffer-file', { pngSuite.add('sharp-buffer-file', {
defer: true, defer: true,

Binary file not shown.

After

Width:  |  Height:  |  Size: 685 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 258 B

After

Width:  |  Height:  |  Size: 270 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 263 B

After

Width:  |  Height:  |  Size: 265 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 238 KiB

BIN
test/fixtures/expected/svg14.4.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 340 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

@@ -15,8 +15,7 @@ const fingerprint = function (image, callback) {
sharp(image) sharp(image)
.greyscale() .greyscale()
.normalise() .normalise()
.resize(9, 8) .resize(9, 8, { fit: sharp.fit.fill })
.ignoreAspectRatio()
.raw() .raw()
.toBuffer(function (err, data) { .toBuffer(function (err, data) {
if (err) { if (err) {

View File

@@ -5,7 +5,7 @@ if ! type valgrind >/dev/null; then
exit 1 exit 1
fi fi
curl -o ./test/leak/libvips.supp https://raw.githubusercontent.com/jcupitt/libvips/master/libvips.supp curl -s -o ./test/leak/libvips.supp https://raw.githubusercontent.com/libvips/libvips/master/libvips.supp
for test in ./test/unit/*.js; do for test in ./test/unit/*.js; do
G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind \ G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind \
@@ -16,5 +16,5 @@ for test in ./test/unit/*.js; do
--show-leak-kinds=definite,indirect,possible \ --show-leak-kinds=definite,indirect,possible \
--num-callers=20 \ --num-callers=20 \
--trace-children=yes \ --trace-children=yes \
mocha --slow=60000 --timeout=120000 "$test"; node node_modules/.bin/mocha --slow=60000 --timeout=120000 --file test/unit/beforeEach.js "$test";
done done

View File

@@ -233,7 +233,13 @@
... ...
fun:uv__work_done fun:uv__work_done
} }
{
leak_libuv_FlushForegroundTasks
Memcheck:Leak
match-leak-kinds: possible
...
fun:_ZN4node12NodePlatform28FlushForegroundTasksInternalEv
}
# nodejs warnings # nodejs warnings
{ {
param_nodejs_write_buffer param_nodejs_write_buffer
@@ -360,6 +366,17 @@
... ...
fun:_ZN2v84base6Thread5StartEv fun:_ZN2v84base6Thread5StartEv
} }
{
leak_nodejs_thread_TracingController
Memcheck:Leak
match-leak-kinds: possible
fun:calloc
fun:allocate_dtv
fun:_dl_allocate_tls
fun:allocate_stack
...
fun:_ZN4node12NodePlatformC1EiPN2v817TracingControllerE
}
{ {
leak_nan_FunctionCallbackInfo leak_nan_FunctionCallbackInfo
Memcheck:Leak Memcheck:Leak

View File

@@ -7,7 +7,6 @@ const async = require('async');
const sharp = require('../../'); const sharp = require('../../');
const crops = { const crops = {
centre: sharp.gravity.centre,
entropy: sharp.strategy.entropy, entropy: sharp.strategy.entropy,
attention: sharp.strategy.attention attention: sharp.strategy.attention
}; };
@@ -34,23 +33,35 @@ async.eachLimit(files, concurrency, function (file, done) {
const salientHeight = userData[file].bottom - userData[file].top; const salientHeight = userData[file].bottom - userData[file].top;
sharp(filename).metadata(function (err, metadata) { sharp(filename).metadata(function (err, metadata) {
if (err) console.log(err); if (err) console.log(err);
const marginWidth = metadata.width - salientWidth;
const marginHeight = metadata.height - salientHeight;
async.each(Object.keys(crops), function (crop, done) { async.each(Object.keys(crops), function (crop, done) {
async.parallel([ async.parallel([
// Left edge accuracy // Left edge accuracy
function (done) { function (done) {
sharp(filename).resize(salientWidth, metadata.height).crop(crops[crop]).toBuffer(function (err, data, info) { if (marginWidth) {
const accuracy = Math.round(Math.abs(userData[file].left - info.cropCalcLeft) / (metadata.width - salientWidth) * 100); sharp(filename).resize(salientWidth, metadata.height).crop(crops[crop]).toBuffer(function (err, data, info) {
incrementScore(accuracy, crop); const delta = Math.abs(userData[file].left + info.cropOffsetLeft);
done(err); const accuracy = Math.round(marginWidth / (marginWidth + delta) * 100);
}); incrementScore(accuracy, crop);
done(err);
});
} else {
done();
}
}, },
// Top edge accuracy // Top edge accuracy
function (done) { function (done) {
sharp(filename).resize(metadata.width, salientHeight).crop(crops[crop]).toBuffer(function (err, data, info) { if (marginHeight) {
const accuracy = Math.round(Math.abs(userData[file].top - info.cropCalcTop) / (metadata.height - salientHeight) * 100); sharp(filename).resize(metadata.width, salientHeight).crop(crops[crop]).toBuffer(function (err, data, info) {
incrementScore(accuracy, crop); const delta = Math.abs(userData[file].top + info.cropOffsetTop);
done(err); const accuracy = Math.round(marginHeight / (marginHeight + delta) * 100);
}); incrementScore(accuracy, crop);
done(err);
});
} else {
done();
}
} }
], done); ], done);
}, done); }, done);
@@ -60,7 +71,7 @@ async.eachLimit(files, concurrency, function (file, done) {
Object.keys(scores).forEach(function (accuracy) { Object.keys(scores).forEach(function (accuracy) {
report.push( report.push(
Object.assign({ Object.assign({
accuracy: parseInt(accuracy, 10) accuracy: Number(accuracy)
}, scores[accuracy]) }, scores[accuracy])
); );
}); });

View File

@@ -19,9 +19,10 @@ describe('Alpha transparency', function () {
it('Flatten to RGB orange', function (done) { it('Flatten to RGB orange', function (done) {
sharp(fixtures.inputPngWithTransparency) sharp(fixtures.inputPngWithTransparency)
.flatten()
.background({r: 255, g: 102, b: 0})
.resize(400, 300) .resize(400, 300)
.flatten({
background: { r: 255, g: 102, b: 0 }
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(400, info.width); assert.strictEqual(400, info.width);
@@ -32,9 +33,8 @@ describe('Alpha transparency', function () {
it('Flatten to CSS/hex orange', function (done) { it('Flatten to CSS/hex orange', function (done) {
sharp(fixtures.inputPngWithTransparency) sharp(fixtures.inputPngWithTransparency)
.flatten()
.background('#ff6600')
.resize(400, 300) .resize(400, 300)
.flatten({ background: '#ff6600' })
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(400, info.width); assert.strictEqual(400, info.width);
@@ -46,8 +46,9 @@ describe('Alpha transparency', function () {
it('Flatten 16-bit PNG with transparency to orange', function (done) { it('Flatten 16-bit PNG with transparency to orange', function (done) {
const output = fixtures.path('output.flatten-rgb16-orange.jpg'); const output = fixtures.path('output.flatten-rgb16-orange.jpg');
sharp(fixtures.inputPngWithTransparency16bit) sharp(fixtures.inputPngWithTransparency16bit)
.flatten() .flatten({
.background({r: 255, g: 102, b: 0}) background: { r: 255, g: 102, b: 0 }
})
.toFile(output, function (err, info) { .toFile(output, function (err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0); assert.strictEqual(true, info.size > 0);
@@ -71,8 +72,7 @@ describe('Alpha transparency', function () {
it('Ignored for JPEG', function (done) { it('Ignored for JPEG', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.background('#ff0000') .flatten({ background: '#ff0000' })
.flatten()
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
@@ -81,35 +81,45 @@ describe('Alpha transparency', function () {
}); });
}); });
it('Enlargement with non-nearest neighbor interpolation shouldnt cause dark edges', function (done) { it('Enlargement with non-nearest neighbor interpolation shouldnt cause dark edges', function () {
const base = 'alpha-premultiply-enlargement-2048x1536-paper.png'; const base = 'alpha-premultiply-enlargement-2048x1536-paper.png';
const actual = fixtures.path('output.' + base); const actual = fixtures.path('output.' + base);
const expected = fixtures.expected(base); const expected = fixtures.expected(base);
sharp(fixtures.inputPngAlphaPremultiplicationSmall) return sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.resize(2048, 1536) .resize(2048, 1536)
.toFile(actual, function (err) { .toFile(actual)
if (err) { .then(function () {
done(err); fixtures.assertMaxColourDistance(actual, expected, 102);
} else {
fixtures.assertMaxColourDistance(actual, expected, 102);
done();
}
}); });
}); });
it('Reduction with non-nearest neighbor interpolation shouldnt cause dark edges', function (done) { it('Reduction with non-nearest neighbor interpolation shouldnt cause dark edges', function () {
const base = 'alpha-premultiply-reduction-1024x768-paper.png'; const base = 'alpha-premultiply-reduction-1024x768-paper.png';
const actual = fixtures.path('output.' + base); const actual = fixtures.path('output.' + base);
const expected = fixtures.expected(base); const expected = fixtures.expected(base);
sharp(fixtures.inputPngAlphaPremultiplicationLarge) return sharp(fixtures.inputPngAlphaPremultiplicationLarge)
.resize(1024, 768) .resize(1024, 768)
.toFile(actual, function (err) { .toFile(actual)
if (err) { .then(function () {
done(err); fixtures.assertMaxColourDistance(actual, expected, 102);
} else {
fixtures.assertMaxColourDistance(actual, expected, 102);
done();
}
}); });
}); });
it('Removes alpha from fixtures with transparency, ignores those without', function () {
return Promise.all([
fixtures.inputPngWithTransparency,
fixtures.inputPngWithTransparency16bit,
fixtures.inputWebPWithTransparency,
fixtures.inputJpg,
fixtures.inputPng,
fixtures.inputWebP
].map(function (input) {
return sharp(input)
.removeAlpha()
.toBuffer({ resolveWithObject: true })
.then(function (result) {
assert.strictEqual(3, result.info.channels);
});
}));
});
}); });

12
test/unit/beforeEach.js Normal file
View File

@@ -0,0 +1,12 @@
'use strict';
const detectLibc = require('detect-libc');
const sharp = require('../../');
const usingCache = detectLibc.family !== detectLibc.MUSL;
const usingSimd = !process.env.G_DEBUG;
beforeEach(function () {
sharp.cache(usingCache);
sharp.simd(usingSimd);
});

View File

@@ -1,9 +0,0 @@
'use strict';
const sharp = require('../../');
// Define SHARP_TEST_WITHOUT_CACHE environment variable to prevent use of libvips' cache
beforeEach(function () {
sharp.cache(!process.env.SHARP_TEST_WITHOUT_CACHE);
});

View File

@@ -69,9 +69,10 @@ describe('Colour space conversion', function () {
it('From CMYK to sRGB with white background, not yellow', function (done) { it('From CMYK to sRGB with white background, not yellow', function (done) {
sharp(fixtures.inputJpgWithCmykProfile) sharp(fixtures.inputJpgWithCmykProfile)
.resize(320, 240) .resize(320, 240, {
.background('white') fit: sharp.fit.contain,
.embed() background: 'white'
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);

View File

@@ -0,0 +1,73 @@
'use strict';
const assert = require('assert');
const fixtures = require('../fixtures');
const sharp = require('../../');
describe('Deprecated background', function () {
it('Flatten to RGB orange', function (done) {
sharp(fixtures.inputPngWithTransparency)
.flatten()
.background({r: 255, g: 102, b: 0})
.resize(400, 300)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(400, info.width);
assert.strictEqual(300, info.height);
fixtures.assertSimilar(fixtures.expected('flatten-orange.jpg'), data, done);
});
});
it('Flatten to CSS/hex orange', function (done) {
sharp(fixtures.inputPngWithTransparency)
.flatten()
.background('#ff6600')
.resize(400, 300)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(400, info.width);
assert.strictEqual(300, info.height);
fixtures.assertSimilar(fixtures.expected('flatten-orange.jpg'), data, done);
});
});
it('Flatten 16-bit PNG with transparency to orange', function (done) {
const output = fixtures.path('output.flatten-rgb16-orange.jpg');
sharp(fixtures.inputPngWithTransparency16bit)
.flatten()
.background({r: 255, g: 102, b: 0})
.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual(32, info.width);
assert.strictEqual(32, info.height);
fixtures.assertMaxColourDistance(output, fixtures.expected('flatten-rgb16-orange.jpg'), 25);
done();
});
});
it('Ignored for JPEG', function (done) {
sharp(fixtures.inputJpg)
.background('#ff0000')
.flatten()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
done();
});
});
it('extend all sides equally with RGB', function (done) {
sharp(fixtures.inputJpg)
.resize(120)
.background({r: 255, g: 0, b: 0})
.extend(10)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(140, info.width);
assert.strictEqual(118, info.height);
fixtures.assertSimilar(fixtures.expected('extend-equal.jpg'), data, done);
});
});
});

View File

@@ -5,7 +5,7 @@ const assert = require('assert');
const sharp = require('../../'); const sharp = require('../../');
const fixtures = require('../fixtures'); const fixtures = require('../fixtures');
describe('Crop', function () { describe('Deprecated crop', function () {
[ [
{ {
name: 'North', name: 'North',

View File

@@ -5,7 +5,7 @@ const assert = require('assert');
const sharp = require('../../'); const sharp = require('../../');
const fixtures = require('../fixtures'); const fixtures = require('../fixtures');
describe('Embed', function () { describe('Deprecated embed', function () {
it('Allows specifying the gravity as a string', function (done) { it('Allows specifying the gravity as a string', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 240) .resize(320, 240)
@@ -114,23 +114,6 @@ describe('Embed', function () {
}); });
}); });
it.skip('embed TIFF in LAB colourspace onto RGBA background', function (done) {
sharp(fixtures.inputTiffCielab)
.resize(64, 128)
.embed()
.background({r: 255, g: 102, b: 0, alpha: 0.5})
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(64, info.width);
assert.strictEqual(128, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-lab-into-rgba.png'), data, done);
});
});
it('Enlarge and embed', function (done) { it('Enlarge and embed', function (done) {
sharp(fixtures.inputPngWithOneColor) sharp(fixtures.inputPngWithOneColor)
.embed() .embed()

View File

@@ -0,0 +1,261 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Deprecated resize-related functions', function () {
it('Max width or height considering ratio (portrait)', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 320)
.max()
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(243, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Min width or height considering ratio (portrait)', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 320)
.min()
.jpeg()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(422, info.height);
done();
});
});
it('Max width or height considering ratio (landscape)', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.max()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Provide only one dimension with max, should default to crop', function (done) {
sharp(fixtures.inputJpg)
.resize(320)
.max()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Min width or height considering ratio (landscape)', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.min()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(392, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Provide only one dimension with min, should default to crop', function (done) {
sharp(fixtures.inputJpg)
.resize(320)
.min()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Do not enlarge when input width is already less than output width', function (done) {
sharp(fixtures.inputJpg)
.resize(2800)
.withoutEnlargement()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Do not enlarge when input height is already less than output height', function (done) {
sharp(fixtures.inputJpg)
.resize(null, 2300)
.withoutEnlargement()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Do enlarge when input width is less than output width', function (done) {
sharp(fixtures.inputJpg)
.resize(2800)
.withoutEnlargement(false)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2800, info.width);
assert.strictEqual(2286, info.height);
done();
});
});
it('Downscale width and height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Downscale width, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Downscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(null, 320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Upscale width and height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(3000, 3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3000, info.width);
assert.strictEqual(3000, info.height);
done();
});
});
it('Upscale width, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3000, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
it('Upscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(null, 3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(3000, info.height);
done();
});
});
it('Downscale width, upscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 3000)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(3000, info.height);
done();
});
});
it('Upscale width, downscale height, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(3000, 320)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3000, info.width);
assert.strictEqual(320, info.height);
done();
});
});
it('Identity transform, ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.ignoreAspectRatio()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
done();
});
});
});

View File

@@ -9,8 +9,13 @@ describe('Extend', function () {
it('extend all sides equally with RGB', function (done) { it('extend all sides equally with RGB', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(120) .resize(120)
.background({r: 255, g: 0, b: 0}) .extend({
.extend(10) top: 10,
bottom: 10,
left: 10,
right: 10,
background: { r: 255, g: 0, b: 0 }
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(140, info.width); assert.strictEqual(140, info.width);
@@ -22,8 +27,13 @@ describe('Extend', function () {
it('extend sides unequally with RGBA', function (done) { it('extend sides unequally with RGBA', function (done) {
sharp(fixtures.inputPngWithTransparency16bit) sharp(fixtures.inputPngWithTransparency16bit)
.resize(120) .resize(120)
.background({r: 0, g: 0, b: 0, alpha: 0}) .extend({
.extend({top: 50, bottom: 0, left: 10, right: 35}) top: 50,
bottom: 0,
left: 10,
right: 35,
background: { r: 0, g: 0, b: 0, alpha: 0 }
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(165, info.width); assert.strictEqual(165, info.width);
@@ -50,9 +60,14 @@ describe('Extend', function () {
it('should add alpha channel before extending with a transparent Background', function (done) { it('should add alpha channel before extending with a transparent Background', function (done) {
sharp(fixtures.inputJpgWithLandscapeExif1) sharp(fixtures.inputJpgWithLandscapeExif1)
.background({r: 0, g: 0, b: 0, alpha: 0}) .extend({
top: 0,
bottom: 10,
left: 0,
right: 10,
background: { r: 0, g: 0, b: 0, alpha: 0 }
})
.toFormat(sharp.format.png) .toFormat(sharp.format.png)
.extend({top: 0, bottom: 10, left: 0, right: 10})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(610, info.width); assert.strictEqual(610, info.width);
@@ -63,8 +78,13 @@ describe('Extend', function () {
it('PNG with 2 channels', function (done) { it('PNG with 2 channels', function (done) {
sharp(fixtures.inputPngWithGreyAlpha) sharp(fixtures.inputPngWithGreyAlpha)
.background('transparent') .extend({
.extend({top: 0, bottom: 20, left: 0, right: 20}) top: 0,
bottom: 20,
left: 0,
right: 20,
background: 'transparent'
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);

View File

@@ -69,8 +69,9 @@ describe('Partial image extraction', function () {
it('After resize and crop', function (done) { it('After resize and crop', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(500, 500) .resize(500, 500, {
.crop(sharp.gravity.north) position: sharp.gravity.north
})
.extract({ left: 10, top: 10, width: 100, height: 100 }) .extract({ left: 10, top: 10, width: 100, height: 100 })
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
@@ -83,8 +84,9 @@ describe('Partial image extraction', function () {
it('Before and after resize and crop', function (done) { it('Before and after resize and crop', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.extract({ left: 0, top: 0, width: 700, height: 700 }) .extract({ left: 0, top: 0, width: 700, height: 700 })
.resize(500, 500) .resize(500, 500, {
.crop(sharp.gravity.north) position: sharp.gravity.north
})
.extract({ left: 10, top: 10, width: 100, height: 100 }) .extract({ left: 10, top: 10, width: 100, height: 100 })
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
@@ -115,7 +117,7 @@ describe('Partial image extraction', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual(280, info.width); assert.strictEqual(280, info.width);
assert.strictEqual(380, info.height); assert.strictEqual(380, info.height);
fixtures.assertSimilar(fixtures.expected('rotate-extract.jpg'), data, { threshold: 6 }, done); fixtures.assertSimilar(fixtures.expected('rotate-extract.jpg'), data, { threshold: 7 }, done);
}); });
}); });

View File

@@ -59,12 +59,23 @@ describe('Image channel extraction', function () {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.toColourspace('lch') .toColourspace('lch')
.extractChannel(1) .extractChannel(1)
.resize(320, 240) .resize(320, 240, { fastShrinkOnLoad: false })
.toFile(output, function (err, info) { .toFile(output, function (err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
fixtures.assertMaxColourDistance(output, fixtures.expected('extract-lch.jpg')); fixtures.assertMaxColourDistance(output, fixtures.expected('extract-lch.jpg'), 9);
done();
});
});
it('Alpha from 16-bit PNG', function (done) {
const output = fixtures.path('output.extract-alpha-16bit.jpg');
sharp(fixtures.inputPngWithTransparency16bit)
.extractChannel(3)
.toFile(output, function (err, info) {
if (err) throw err;
fixtures.assertMaxColourDistance(output, fixtures.expected('extract-alpha-16bit.jpg'));
done(); done();
}); });
}); });

View File

@@ -2,6 +2,7 @@
const fs = require('fs'); const fs = require('fs');
const assert = require('assert'); const assert = require('assert');
const rimraf = require('rimraf');
const sharp = require('../../'); const sharp = require('../../');
const fixtures = require('../fixtures'); const fixtures = require('../fixtures');
@@ -16,7 +17,7 @@ describe('Input/output', function () {
it('Read from File and write to Stream', function (done) { it('Read from File and write to Stream', function (done) {
const writable = fs.createWriteStream(fixtures.outputJpg); const writable = fs.createWriteStream(fixtures.outputJpg);
writable.on('finish', function () { writable.on('close', function () {
sharp(fixtures.outputJpg).toBuffer(function (err, data, info) { sharp(fixtures.outputJpg).toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -24,8 +25,7 @@ describe('Input/output', function () {
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
fs.unlinkSync(fixtures.outputJpg); rimraf(fixtures.outputJpg, done);
done();
}); });
}); });
sharp(fixtures.inputJpg).resize(320, 240).pipe(writable); sharp(fixtures.inputJpg).resize(320, 240).pipe(writable);
@@ -34,7 +34,7 @@ describe('Input/output', function () {
it('Read from Buffer and write to Stream', function (done) { it('Read from Buffer and write to Stream', function (done) {
const inputJpgBuffer = fs.readFileSync(fixtures.inputJpg); const inputJpgBuffer = fs.readFileSync(fixtures.inputJpg);
const writable = fs.createWriteStream(fixtures.outputJpg); const writable = fs.createWriteStream(fixtures.outputJpg);
writable.on('finish', function () { writable.on('close', function () {
sharp(fixtures.outputJpg).toBuffer(function (err, data, info) { sharp(fixtures.outputJpg).toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -42,8 +42,7 @@ describe('Input/output', function () {
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
fs.unlinkSync(fixtures.outputJpg); rimraf(fixtures.outputJpg, done);
done();
}); });
}); });
sharp(inputJpgBuffer).resize(320, 240).pipe(writable); sharp(inputJpgBuffer).resize(320, 240).pipe(writable);
@@ -57,8 +56,7 @@ describe('Input/output', function () {
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
fs.unlinkSync(fixtures.outputJpg); rimraf(fixtures.outputJpg, done);
done();
}); });
readable.pipe(pipeline); readable.pipe(pipeline);
}); });
@@ -134,7 +132,7 @@ describe('Input/output', function () {
it('Read from Stream and write to Stream', function (done) { it('Read from Stream and write to Stream', function (done) {
const readable = fs.createReadStream(fixtures.inputJpg); const readable = fs.createReadStream(fixtures.inputJpg);
const writable = fs.createWriteStream(fixtures.outputJpg); const writable = fs.createWriteStream(fixtures.outputJpg);
writable.on('finish', function () { writable.on('close', function () {
sharp(fixtures.outputJpg).toBuffer(function (err, data, info) { sharp(fixtures.outputJpg).toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -142,8 +140,7 @@ describe('Input/output', function () {
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
fs.unlinkSync(fixtures.outputJpg); rimraf(fixtures.outputJpg, done);
done();
}); });
}); });
const pipeline = sharp().resize(320, 240); const pipeline = sharp().resize(320, 240);
@@ -162,10 +159,9 @@ describe('Input/output', function () {
assert.strictEqual(3, info.channels); assert.strictEqual(3, info.channels);
infoEventEmitted = true; infoEventEmitted = true;
}); });
writable.on('finish', function () { writable.on('close', function () {
assert.strictEqual(true, infoEventEmitted); assert.strictEqual(true, infoEventEmitted);
fs.unlinkSync(fixtures.outputJpg); rimraf(fixtures.outputJpg, done);
done();
}); });
readable.pipe(pipeline).pipe(writable); readable.pipe(pipeline).pipe(writable);
}); });
@@ -177,8 +173,7 @@ describe('Input/output', function () {
anErrorWasEmitted = !!err; anErrorWasEmitted = !!err;
}).on('end', function () { }).on('end', function () {
assert(anErrorWasEmitted); assert(anErrorWasEmitted);
fs.unlinkSync(fixtures.outputJpg); rimraf(fixtures.outputJpg, done);
done();
}); });
const readableButNotAnImage = fs.createReadStream(__filename); const readableButNotAnImage = fs.createReadStream(__filename);
const writable = fs.createWriteStream(fixtures.outputJpg); const writable = fs.createWriteStream(fixtures.outputJpg);
@@ -192,8 +187,7 @@ describe('Input/output', function () {
anErrorWasEmitted = !!err; anErrorWasEmitted = !!err;
}).on('end', function () { }).on('end', function () {
assert(anErrorWasEmitted); assert(anErrorWasEmitted);
fs.unlinkSync(fixtures.outputJpg); rimraf(fixtures.outputJpg, done);
done();
}); });
const writable = fs.createWriteStream(fixtures.outputJpg); const writable = fs.createWriteStream(fixtures.outputJpg);
readableButNotAnImage.pipe(writable); readableButNotAnImage.pipe(writable);
@@ -202,7 +196,7 @@ describe('Input/output', function () {
it('Readable side of Stream can start flowing after Writable side has finished', function (done) { it('Readable side of Stream can start flowing after Writable side has finished', function (done) {
const readable = fs.createReadStream(fixtures.inputJpg); const readable = fs.createReadStream(fixtures.inputJpg);
const writable = fs.createWriteStream(fixtures.outputJpg); const writable = fs.createWriteStream(fixtures.outputJpg);
writable.on('finish', function () { writable.on('close', function () {
sharp(fixtures.outputJpg).toBuffer(function (err, data, info) { sharp(fixtures.outputJpg).toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -210,8 +204,7 @@ describe('Input/output', function () {
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
fs.unlinkSync(fixtures.outputJpg); rimraf(fixtures.outputJpg, done);
done();
}); });
}); });
const pipeline = sharp().resize(320, 240); const pipeline = sharp().resize(320, 240);
@@ -389,6 +382,16 @@ describe('Input/output', function () {
}); });
}); });
describe('Invalid JPEG quantisation table', function () {
[-1, 88.2, 'test'].forEach(function (table) {
it(table.toString(), function () {
assert.throws(function () {
sharp().jpeg({ quantisationTable: table });
});
});
});
});
it('Progressive JPEG image', function (done) { it('Progressive JPEG image', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 240) .resize(320, 240)
@@ -554,8 +557,7 @@ describe('Input/output', function () {
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
fs.unlinkSync(fixtures.outputZoinks); rimraf(fixtures.outputZoinks, done);
done();
}); });
}); });
@@ -568,8 +570,7 @@ describe('Input/output', function () {
assert.strictEqual('png', info.format); assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
fs.unlinkSync(fixtures.outputZoinks); rimraf(fixtures.outputZoinks, done);
done();
}); });
}); });
@@ -582,8 +583,7 @@ describe('Input/output', function () {
assert.strictEqual('webp', info.format); assert.strictEqual('webp', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
fs.unlinkSync(fixtures.outputZoinks); rimraf(fixtures.outputZoinks, done);
done();
}); });
}); });
@@ -596,8 +596,7 @@ describe('Input/output', function () {
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
fs.unlinkSync(fixtures.outputZoinks); rimraf(fixtures.outputZoinks, done);
done();
}); });
}); });
@@ -610,8 +609,7 @@ describe('Input/output', function () {
assert.strictEqual('png', info.format); assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
fs.unlinkSync(fixtures.outputZoinks); rimraf(fixtures.outputZoinks, done);
done();
}); });
}); });
@@ -625,8 +623,7 @@ describe('Input/output', function () {
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
fs.unlinkSync(fixtures.outputZoinks); rimraf(fixtures.outputZoinks, done);
done();
}); });
}); });
}); });
@@ -856,6 +853,37 @@ describe('Input/output', function () {
}); });
}); });
it('Specifying quantisation table provides different JPEG', function (done) {
// First generate with default quantisation table
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimiseCoding: false })
.toBuffer(function (err, withDefaultQuantisationTable, withInfo) {
if (err) throw err;
assert.strictEqual(true, withDefaultQuantisationTable.length > 0);
assert.strictEqual(withDefaultQuantisationTable.length, withInfo.size);
assert.strictEqual('jpeg', withInfo.format);
assert.strictEqual(320, withInfo.width);
assert.strictEqual(240, withInfo.height);
// Then generate with different quantisation table
sharp(fixtures.inputJpg)
.resize(320, 240)
.jpeg({ optimiseCoding: false, quantisationTable: 3 })
.toBuffer(function (err, withQuantTable3, withoutInfo) {
if (err) throw err;
assert.strictEqual(true, withQuantTable3.length > 0);
assert.strictEqual(withQuantTable3.length, withoutInfo.size);
assert.strictEqual('jpeg', withoutInfo.format);
assert.strictEqual(320, withoutInfo.width);
assert.strictEqual(240, withoutInfo.height);
// Verify image is same (as mozjpeg may not be present) size or less
assert.strictEqual(true, withQuantTable3.length <= withDefaultQuantisationTable.length);
done();
});
});
});
it('Convert SVG to PNG at default 72DPI', function (done) { it('Convert SVG to PNG at default 72DPI', function (done) {
sharp(fixtures.inputSvg) sharp(fixtures.inputSvg)
.resize(1024) .resize(1024)
@@ -898,6 +926,21 @@ describe('Input/output', function () {
}); });
}); });
it('Convert SVG to PNG at 14.4DPI', function (done) {
sharp(fixtures.inputSvg, { density: 14.4 })
.toFormat('png')
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(20, info.width);
assert.strictEqual(20, info.height);
fixtures.assertSimilar(fixtures.expected('svg14.4.png'), data, function (err) {
if (err) throw err;
done();
});
});
});
it('Convert SVG with embedded images to PNG, respecting dimensions, autoconvert to PNG', function (done) { it('Convert SVG with embedded images to PNG, respecting dimensions, autoconvert to PNG', function (done) {
sharp(fixtures.inputSvgWithEmbeddedImages) sharp(fixtures.inputSvgWithEmbeddedImages)
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
@@ -1024,7 +1067,7 @@ describe('Input/output', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert(info.size === startSize); assert(info.size === startSize);
fs.unlink(fixtures.outputTiff, done); rimraf(fixtures.outputTiff, done);
}); });
}); });
@@ -1041,7 +1084,7 @@ describe('Input/output', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert(info.size < (startSize / 2)); assert(info.size < (startSize / 2));
fs.unlink(fixtures.outputTiff, done); rimraf(fixtures.outputTiff, done);
}); });
}); });
@@ -1064,7 +1107,7 @@ describe('Input/output', function () {
sharp(fixtures.outputTiff).metadata(function (err, metadata) { sharp(fixtures.outputTiff).metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual(metadata.density, res * 2.54); // convert to dpi assert.strictEqual(metadata.density, res * 2.54); // convert to dpi
fs.unlink(fixtures.outputTiff, done); rimraf(fixtures.outputTiff, done);
}); });
}); });
}); });
@@ -1109,7 +1152,7 @@ describe('Input/output', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert(info.size < startSize); assert(info.size < startSize);
fs.unlink(fixtures.outputTiff, done); rimraf(fixtures.outputTiff, done);
}); });
}); });
@@ -1125,7 +1168,7 @@ describe('Input/output', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert(info.size < startSize); assert(info.size < startSize);
fs.unlink(fixtures.outputTiff, done); rimraf(fixtures.outputTiff, done);
}); });
}); });
@@ -1140,7 +1183,7 @@ describe('Input/output', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert(info.size < startSize); assert(info.size < startSize);
fs.unlink(fixtures.outputTiff, done); rimraf(fixtures.outputTiff, done);
}); });
}); });
@@ -1155,7 +1198,7 @@ describe('Input/output', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert(info.size < startSize); assert(info.size < startSize);
fs.unlink(fixtures.outputTiff, done); rimraf(fixtures.outputTiff, done);
}); });
}); });
@@ -1170,7 +1213,7 @@ describe('Input/output', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert(info.size < startSize); assert(info.size < startSize);
fs.unlink(fixtures.outputTiff, done); rimraf(fixtures.outputTiff, done);
}); });
}); });
@@ -1184,7 +1227,7 @@ describe('Input/output', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert(info.size < startSize); assert(info.size < startSize);
fs.unlink(fixtures.outputTiff, done); rimraf(fixtures.outputTiff, done);
}); });
}); });
@@ -1328,8 +1371,7 @@ describe('Input/output', function () {
assert.strictEqual('v', info.format); assert.strictEqual('v', info.format);
assert.strictEqual(70, info.width); assert.strictEqual(70, info.width);
assert.strictEqual(60, info.height); assert.strictEqual(60, info.height);
fs.unlinkSync(fixtures.outputV); rimraf(fixtures.outputV, done);
done();
}); });
}); });
@@ -1465,11 +1507,6 @@ describe('Input/output', function () {
sharp(null, { density: 'zoinks' }); sharp(null, { density: 'zoinks' });
}); });
}); });
it('Invalid density: float', function () {
assert.throws(function () {
sharp(null, { density: 0.5 });
});
});
it('Ignore unknown attribute', function () { it('Ignore unknown attribute', function () {
sharp(null, { unknown: true }); sharp(null, { unknown: true });
}); });

View File

@@ -1,8 +1,10 @@
'use strict'; 'use strict';
const assert = require('assert'); const assert = require('assert');
const fs = require('fs');
const semver = require('semver'); const semver = require('semver');
const libvips = require('../../lib/libvips'); const libvips = require('../../lib/libvips');
const mockFS = require('mock-fs');
const originalPlatform = process.platform; const originalPlatform = process.platform;
@@ -66,5 +68,41 @@ describe('libvips binaries', function () {
delete process.env.SHARP_IGNORE_GLOBAL_LIBVIPS; delete process.env.SHARP_IGNORE_GLOBAL_LIBVIPS;
}); });
it('cachePath returns a valid path ending with _libvips', function () {
const cachePath = libvips.cachePath();
assert.strictEqual('string', typeof cachePath);
assert.strictEqual('_libvips', cachePath.substr(-8));
assert.strictEqual(true, fs.existsSync(cachePath));
});
});
describe('safe directory creation', function () {
before(function () {
mockFS({
exampleDirA: {
exampleDirB: {
exampleFile: 'Example test file'
}
}
});
});
after(function () { mockFS.restore(); });
it('mkdirSync creates a directory', function () {
const dirPath = 'createdDir';
libvips.mkdirSync(dirPath);
assert.strictEqual(true, fs.existsSync(dirPath));
});
it('mkdirSync does not throw error or overwrite an existing dir', function () {
const dirPath = 'exampleDirA';
const nestedDirPath = 'exampleDirA/exampleDirB';
assert.strictEqual(true, fs.existsSync(dirPath));
libvips.mkdirSync(dirPath);
assert.strictEqual(true, fs.existsSync(dirPath));
assert.strictEqual(true, fs.existsSync(nestedDirPath));
});
}); });
}); });

View File

@@ -13,12 +13,15 @@ describe('Image metadata', function () {
sharp(fixtures.inputJpg).metadata(function (err, metadata) { sharp(fixtures.inputJpg).metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('jpeg', metadata.format); assert.strictEqual('jpeg', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(2725, metadata.width); assert.strictEqual(2725, metadata.width);
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual('undefined', typeof metadata.density);
assert.strictEqual('4:2:0', metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
@@ -32,12 +35,15 @@ describe('Image metadata', function () {
sharp(fixtures.inputJpgWithExif).metadata(function (err, metadata) { sharp(fixtures.inputJpgWithExif).metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('jpeg', metadata.format); assert.strictEqual('jpeg', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(450, metadata.width); assert.strictEqual(450, metadata.width);
assert.strictEqual(600, metadata.height); assert.strictEqual(600, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual(72, metadata.density); assert.strictEqual(72, metadata.density);
assert.strictEqual('4:2:0', metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(true, metadata.hasProfile); assert.strictEqual(true, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual(8, metadata.orientation); assert.strictEqual(8, metadata.orientation);
@@ -79,12 +85,15 @@ describe('Image metadata', function () {
sharp(fixtures.inputTiff).metadata(function (err, metadata) { sharp(fixtures.inputTiff).metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('tiff', metadata.format); assert.strictEqual('tiff', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(2464, metadata.width); assert.strictEqual(2464, metadata.width);
assert.strictEqual(3248, metadata.height); assert.strictEqual(3248, metadata.height);
assert.strictEqual('b-w', metadata.space); assert.strictEqual('b-w', metadata.space);
assert.strictEqual(1, metadata.channels); assert.strictEqual(1, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual(300, metadata.density); assert.strictEqual(300, metadata.density);
assert.strictEqual('undefined', typeof metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual(1, metadata.orientation); assert.strictEqual(1, metadata.orientation);
@@ -98,12 +107,15 @@ describe('Image metadata', function () {
sharp(fixtures.inputPng).metadata(function (err, metadata) { sharp(fixtures.inputPng).metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('png', metadata.format); assert.strictEqual('png', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(2809, metadata.width); assert.strictEqual(2809, metadata.width);
assert.strictEqual(2074, metadata.height); assert.strictEqual(2074, metadata.height);
assert.strictEqual('b-w', metadata.space); assert.strictEqual('b-w', metadata.space);
assert.strictEqual(1, metadata.channels); assert.strictEqual(1, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual(300, metadata.density); assert.strictEqual(300, metadata.density);
assert.strictEqual('undefined', typeof metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
@@ -117,12 +129,15 @@ describe('Image metadata', function () {
sharp(fixtures.inputPngWithTransparency).metadata(function (err, metadata) { sharp(fixtures.inputPngWithTransparency).metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('png', metadata.format); assert.strictEqual('png', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(2048, metadata.width); assert.strictEqual(2048, metadata.width);
assert.strictEqual(1536, metadata.height); assert.strictEqual(1536, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(4, metadata.channels); assert.strictEqual(4, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual(72, metadata.density); assert.strictEqual(72, metadata.density);
assert.strictEqual('undefined', typeof metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(true, metadata.hasAlpha); assert.strictEqual(true, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
@@ -136,12 +151,15 @@ describe('Image metadata', function () {
sharp(fixtures.inputWebP).metadata(function (err, metadata) { sharp(fixtures.inputWebP).metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('webp', metadata.format); assert.strictEqual('webp', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(1024, metadata.width); assert.strictEqual(1024, metadata.width);
assert.strictEqual(772, metadata.height); assert.strictEqual(772, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual('undefined', typeof metadata.density);
assert.strictEqual('undefined', typeof metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
@@ -155,11 +173,14 @@ describe('Image metadata', function () {
sharp(fixtures.inputGif).metadata(function (err, metadata) { sharp(fixtures.inputGif).metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('gif', metadata.format); assert.strictEqual('gif', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(800, metadata.width); assert.strictEqual(800, metadata.width);
assert.strictEqual(533, metadata.height); assert.strictEqual(533, metadata.height);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual('undefined', typeof metadata.density);
assert.strictEqual('undefined', typeof metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
@@ -172,11 +193,14 @@ describe('Image metadata', function () {
sharp(fixtures.inputGifGreyPlusAlpha).metadata(function (err, metadata) { sharp(fixtures.inputGifGreyPlusAlpha).metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('gif', metadata.format); assert.strictEqual('gif', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(2, metadata.width); assert.strictEqual(2, metadata.width);
assert.strictEqual(1, metadata.height); assert.strictEqual(1, metadata.height);
assert.strictEqual(2, metadata.channels); assert.strictEqual(2, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual('undefined', typeof metadata.density);
assert.strictEqual('undefined', typeof metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(true, metadata.hasAlpha); assert.strictEqual(true, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
@@ -189,12 +213,15 @@ describe('Image metadata', function () {
it('File in, Promise out', function (done) { it('File in, Promise out', function (done) {
sharp(fixtures.inputJpg).metadata().then(function (metadata) { sharp(fixtures.inputJpg).metadata().then(function (metadata) {
assert.strictEqual('jpeg', metadata.format); assert.strictEqual('jpeg', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(2725, metadata.width); assert.strictEqual(2725, metadata.width);
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual('undefined', typeof metadata.density);
assert.strictEqual('4:2:0', metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
@@ -218,21 +245,22 @@ describe('Image metadata', function () {
const pipeline = sharp(); const pipeline = sharp();
pipeline.metadata().then(function (metadata) { pipeline.metadata().then(function (metadata) {
assert.strictEqual('jpeg', metadata.format); assert.strictEqual('jpeg', metadata.format);
assert.strictEqual(829183, metadata.size);
assert.strictEqual(2725, metadata.width); assert.strictEqual(2725, metadata.width);
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual('undefined', typeof metadata.density);
assert.strictEqual('4:2:0', metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
assert.strictEqual('undefined', typeof metadata.exif); assert.strictEqual('undefined', typeof metadata.exif);
assert.strictEqual('undefined', typeof metadata.icc); assert.strictEqual('undefined', typeof metadata.icc);
done(); done();
}).catch(function (err) { }).catch(done);
throw err;
});
readable.pipe(pipeline); readable.pipe(pipeline);
}); });
@@ -241,12 +269,15 @@ describe('Image metadata', function () {
const pipeline = sharp().metadata(function (err, metadata) { const pipeline = sharp().metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('jpeg', metadata.format); assert.strictEqual('jpeg', metadata.format);
assert.strictEqual(829183, metadata.size);
assert.strictEqual(2725, metadata.width); assert.strictEqual(2725, metadata.width);
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual('undefined', typeof metadata.density);
assert.strictEqual('4:2:0', metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
@@ -262,12 +293,15 @@ describe('Image metadata', function () {
image.metadata(function (err, metadata) { image.metadata(function (err, metadata) {
if (err) throw err; if (err) throw err;
assert.strictEqual('jpeg', metadata.format); assert.strictEqual('jpeg', metadata.format);
assert.strictEqual('undefined', typeof metadata.size);
assert.strictEqual(2725, metadata.width); assert.strictEqual(2725, metadata.width);
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual('uchar', metadata.depth); assert.strictEqual('uchar', metadata.depth);
assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual('undefined', typeof metadata.density);
assert.strictEqual('4:2:0', metadata.chromaSubsampling);
assert.strictEqual(false, metadata.isProgressive);
assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
@@ -346,6 +380,56 @@ describe('Image metadata', function () {
}); });
}); });
it('chromaSubsampling 4:4:4:4 CMYK JPEG', function () {
return sharp(fixtures.inputJpgWithCmykProfile)
.metadata()
.then(function (metadata) {
assert.strictEqual('4:4:4:4', metadata.chromaSubsampling);
});
});
it('chromaSubsampling 4:4:4 RGB JPEG', function () {
return sharp(fixtures.inputJpg)
.resize(10, 10)
.jpeg({ chromaSubsampling: '4:4:4' })
.toBuffer()
.then(function (data) {
return sharp(data)
.metadata()
.then(function (metadata) {
assert.strictEqual('4:4:4', metadata.chromaSubsampling);
});
});
});
it('isProgressive JPEG', function () {
return sharp(fixtures.inputJpg)
.resize(10, 10)
.jpeg({ progressive: true })
.toBuffer()
.then(function (data) {
return sharp(data)
.metadata()
.then(function (metadata) {
assert.strictEqual(true, metadata.isProgressive);
});
});
});
it('isProgressive PNG', function () {
return sharp(fixtures.inputJpg)
.resize(10, 10)
.png({ progressive: true })
.toBuffer()
.then(function (data) {
return sharp(data)
.metadata()
.then(function (metadata) {
assert.strictEqual(true, metadata.isProgressive);
});
});
});
it('File input with corrupt header fails gracefully', function (done) { it('File input with corrupt header fails gracefully', function (done) {
sharp(fixtures.inputJpgWithCorruptHeader) sharp(fixtures.inputJpgWithCorruptHeader)
.metadata(function (err) { .metadata(function (err) {

770
test/unit/resize-contain.js Normal file
View File

@@ -0,0 +1,770 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Resize fit=contain', function () {
it('Allows specifying the position as a string', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240, {
fit: 'contain',
position: 'center'
})
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
fixtures.assertSimilar(fixtures.expected('embed-3-into-3.png'), data, done);
});
});
it('JPEG within PNG, no alpha channel', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240, { fit: 'contain' })
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-3-into-3.png'), data, done);
});
});
it('JPEG within WebP, to include alpha channel', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240, {
fit: 'contain',
background: { r: 0, g: 0, b: 0, alpha: 0 }
})
.webp()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-3-into-4.webp'), data, done);
});
});
it('PNG with alpha channel', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(50, 50, { fit: 'contain' })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(50, info.width);
assert.strictEqual(50, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-4-into-4.png'), data, done);
});
});
it('16-bit PNG with alpha channel', function (done) {
sharp(fixtures.inputPngWithTransparency16bit)
.resize(32, 16, { fit: 'contain' })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-16bit.png'), data, done);
});
});
it('16-bit PNG with alpha channel onto RGBA', function (done) {
sharp(fixtures.inputPngWithTransparency16bit)
.resize(32, 16, {
fit: 'contain',
background: { r: 0, g: 0, b: 0, alpha: 0 }
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-16bit-rgba.png'), data, done);
});
});
it('PNG with 2 channels', function (done) {
sharp(fixtures.inputPngWithGreyAlpha)
.resize(32, 16, {
fit: 'contain',
background: { r: 0, g: 0, b: 0, alpha: 0 }
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-2channel.png'), data, done);
});
});
it.skip('TIFF in LAB colourspace onto RGBA background', function (done) {
sharp(fixtures.inputTiffCielab)
.resize(64, 128, {
fit: 'contain',
background: { r: 255, g: 102, b: 0, alpha: 0.5 }
})
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(64, info.width);
assert.strictEqual(128, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-lab-into-rgba.png'), data, done);
});
});
it('Enlarge', function (done) {
sharp(fixtures.inputPngWithOneColor)
.resize(320, 240, { fit: 'contain' })
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('embed-enlarge.png'), data, done);
});
});
it('Invalid position values should fail', function () {
[-1, 8.1, 9, 1000000, false, 'vallejo'].forEach(function (position) {
assert.throws(function () {
sharp().resize(null, null, { fit: 'contain', position });
});
});
});
it('Position horizontal top', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'top'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a2-n.png'), data, done);
});
});
it('Position horizontal right top', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'right top'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a3-ne.png'), data, done);
});
});
it('Position horizontal right', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'right'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a4-e.png'), data, done);
});
});
it('Position horizontal right bottom', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'right bottom'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a5-se.png'), data, done);
});
});
it('Position horizontal bottom', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'bottom'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a6-s.png'), data, done);
});
});
it('Position horizontal left bottom', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'left bottom'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a7-sw.png'), data, done);
});
});
it('Position horizontal left', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'left'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a8-w.png'), data, done);
});
});
it('Position horizontal left top', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'left top'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a1-nw.png'), data, done);
});
});
it('Position horizontal north', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.north
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a2-n.png'), data, done);
});
});
it('Position horizontal northeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.northeast
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a3-ne.png'), data, done);
});
});
it('Position horizontal east', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.east
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a4-e.png'), data, done);
});
});
it('Position horizontal southeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.southeast
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a5-se.png'), data, done);
});
});
it('Position horizontal south', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.south
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a6-s.png'), data, done);
});
});
it('Position horizontal southwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.southwest
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a7-sw.png'), data, done);
});
});
it('Position horizontal west', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.west
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a8-w.png'), data, done);
});
});
it('Position horizontal northwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.northwest
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a1-nw.png'), data, done);
});
});
it('Position horizontal center', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 100, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.center
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(100, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/a9-c.png'), data, done);
});
});
it('Position vertical top', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'top'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/2-n.png'), data, done);
});
});
it('Position vertical right top', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'right top'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/3-ne.png'), data, done);
});
});
it('Position vertical right', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'right'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/4-e.png'), data, done);
});
});
it('Position vertical right bottom', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'right bottom'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/5-se.png'), data, done);
});
});
it('Position vertical bottom', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'bottom'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/6-s.png'), data, done);
});
});
it('Position vertical left bottom', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'left bottom'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/7-sw.png'), data, done);
});
});
it('Position vertical left', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'left'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/8-w.png'), data, done);
});
});
it('Position vertical left top', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: 'left top'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/1-nw.png'), data, done);
});
});
it('Position vertical north', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.north
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/2-n.png'), data, done);
});
});
it('Position vertical northeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.northeast
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/3-ne.png'), data, done);
});
});
it('Position vertical east', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.east
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/4-e.png'), data, done);
});
});
it('Position vertical southeast', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.southeast
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/5-se.png'), data, done);
});
});
it('Position vertical south', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.south
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/6-s.png'), data, done);
});
});
it('Position vertical southwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.southwest
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/7-sw.png'), data, done);
});
});
it('Position vertical west', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.west
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/8-w.png'), data, done);
});
});
it('Position vertical northwest', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.northwest
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/1-nw.png'), data, done);
});
});
it('Position vertical center', function (done) {
sharp(fixtures.inputPngEmbed)
.resize(200, 200, {
fit: sharp.fit.contain,
background: { r: 0, g: 0, b: 0, alpha: 0 },
position: sharp.gravity.center
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(200, info.width);
assert.strictEqual(200, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('./embedgravitybird/9-c.png'), data, done);
});
});
});

383
test/unit/resize-cover.js Normal file
View File

@@ -0,0 +1,383 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Resize fit=cover', function () {
[
// Position
{
name: 'Position: top',
width: 320,
height: 80,
gravity: sharp.position.top,
fixture: 'gravity-north.jpg'
},
{
name: 'Position: right',
width: 80,
height: 320,
gravity: sharp.position.right,
fixture: 'gravity-east.jpg'
},
{
name: 'Position: bottom',
width: 320,
height: 80,
gravity: sharp.position.bottom,
fixture: 'gravity-south.jpg'
},
{
name: 'Position: left',
width: 80,
height: 320,
gravity: sharp.position.left,
fixture: 'gravity-west.jpg'
},
{
name: 'Position: right top (top)',
width: 320,
height: 80,
gravity: sharp.position['right top'],
fixture: 'gravity-north.jpg'
},
{
name: 'Position: right top (right)',
width: 80,
height: 320,
gravity: sharp.position['right top'],
fixture: 'gravity-east.jpg'
},
{
name: 'Position: right bottom (bottom)',
width: 320,
height: 80,
gravity: sharp.position['right bottom'],
fixture: 'gravity-south.jpg'
},
{
name: 'Position: right bottom (right)',
width: 80,
height: 320,
gravity: sharp.position['right bottom'],
fixture: 'gravity-east.jpg'
},
{
name: 'Position: left bottom (bottom)',
width: 320,
height: 80,
gravity: sharp.position['left bottom'],
fixture: 'gravity-south.jpg'
},
{
name: 'Position: left bottom (left)',
width: 80,
height: 320,
gravity: sharp.position['left bottom'],
fixture: 'gravity-west.jpg'
},
{
name: 'Position: left top (top)',
width: 320,
height: 80,
gravity: sharp.position['left top'],
fixture: 'gravity-north.jpg'
},
{
name: 'Position: left top (left)',
width: 80,
height: 320,
gravity: sharp.position['left top'],
fixture: 'gravity-west.jpg'
},
// Gravity
{
name: 'Gravity: north',
width: 320,
height: 80,
gravity: sharp.gravity.north,
fixture: 'gravity-north.jpg'
},
{
name: 'Gravity: east',
width: 80,
height: 320,
gravity: sharp.gravity.east,
fixture: 'gravity-east.jpg'
},
{
name: 'Gravity: south',
width: 320,
height: 80,
gravity: sharp.gravity.south,
fixture: 'gravity-south.jpg'
},
{
name: 'Gravity: west',
width: 80,
height: 320,
gravity: sharp.gravity.west,
fixture: 'gravity-west.jpg'
},
{
name: 'Gravity: center',
width: 320,
height: 80,
gravity: sharp.gravity.center,
fixture: 'gravity-center.jpg'
},
{
name: 'Gravity: centre',
width: 80,
height: 320,
gravity: sharp.gravity.centre,
fixture: 'gravity-centre.jpg'
},
{
name: 'Default (centre)',
width: 80,
height: 320,
gravity: undefined,
fixture: 'gravity-centre.jpg'
},
{
name: 'Gravity: northeast (north)',
width: 320,
height: 80,
gravity: sharp.gravity.northeast,
fixture: 'gravity-north.jpg'
},
{
name: 'Gravity: northeast (east)',
width: 80,
height: 320,
gravity: sharp.gravity.northeast,
fixture: 'gravity-east.jpg'
},
{
name: 'Gravity: southeast (south)',
width: 320,
height: 80,
gravity: sharp.gravity.southeast,
fixture: 'gravity-south.jpg'
},
{
name: 'Gravity: southeast (east)',
width: 80,
height: 320,
gravity: sharp.gravity.southeast,
fixture: 'gravity-east.jpg'
},
{
name: 'Gravity: southwest (south)',
width: 320,
height: 80,
gravity: sharp.gravity.southwest,
fixture: 'gravity-south.jpg'
},
{
name: 'Gravity: southwest (west)',
width: 80,
height: 320,
gravity: sharp.gravity.southwest,
fixture: 'gravity-west.jpg'
},
{
name: 'Gravity: northwest (north)',
width: 320,
height: 80,
gravity: sharp.gravity.northwest,
fixture: 'gravity-north.jpg'
},
{
name: 'Gravity: northwest (west)',
width: 80,
height: 320,
gravity: sharp.gravity.northwest,
fixture: 'gravity-west.jpg'
}
].forEach(function (settings) {
it(settings.name, function (done) {
sharp(fixtures.inputJpg)
.resize(settings.width, settings.height, {
fit: sharp.fit.cover,
position: settings.gravity
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(settings.width, info.width);
assert.strictEqual(settings.height, info.height);
fixtures.assertSimilar(fixtures.expected(settings.fixture), data, done);
});
});
});
it('Allows specifying the gravity as a string', function (done) {
sharp(fixtures.inputJpg)
.resize(80, 320, {
fit: sharp.fit.cover,
position: 'east'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
fixtures.assertSimilar(fixtures.expected('gravity-east.jpg'), data, done);
});
});
it('Invalid position values fail', function () {
assert.throws(function () {
sharp().resize(null, null, { fit: 'cover', position: 9 });
}, /Expected valid position\/gravity\/strategy for position but received 9 of type number/);
assert.throws(function () {
sharp().resize(null, null, { fit: 'cover', position: 1.1 });
}, /Expected valid position\/gravity\/strategy for position but received 1.1 of type number/);
assert.throws(function () {
sharp().resize(null, null, { fit: 'cover', position: -1 });
}, /Expected valid position\/gravity\/strategy for position but received -1 of type number/);
assert.throws(function () {
sharp().resize(null, null, { fit: 'cover', position: 'zoinks' }).crop();
}, /Expected valid position\/gravity\/strategy for position but received zoinks of type string/);
});
it('Uses default value when none specified', function () {
assert.doesNotThrow(function () {
sharp().resize(null, null, { fit: 'cover' });
});
});
it('Skip crop when post-resize dimensions are at target', function () {
return sharp(fixtures.inputJpg)
.resize(1600, 1200)
.toBuffer()
.then(function (input) {
return sharp(input)
.resize(1110, null, {
fit: sharp.fit.cover,
position: sharp.strategy.attention
})
.toBuffer({ resolveWithObject: true })
.then(function (result) {
assert.strictEqual(1110, result.info.width);
assert.strictEqual(832, result.info.height);
assert.strictEqual(undefined, result.info.cropOffsetLeft);
assert.strictEqual(undefined, result.info.cropOffsetTop);
});
});
});
describe('Entropy-based strategy', function () {
it('JPEG', function (done) {
sharp(fixtures.inputJpg)
.resize(80, 320, {
fit: 'cover',
position: sharp.strategy.entropy
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
assert.strictEqual(-117, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy-entropy.jpg'), data, done);
});
});
it('PNG', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80, {
fit: 'cover',
position: sharp.strategy.entropy
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(-80, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
it('supports the strategy passed as a string', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80, {
fit: 'cover',
position: 'entropy'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(-80, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
});
describe('Attention strategy', function () {
it('JPEG', function (done) {
sharp(fixtures.inputJpg)
.resize(80, 320, {
fit: 'cover',
position: sharp.strategy.attention
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
assert.strictEqual(-143, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy-attention.jpg'), data, done);
});
});
it('PNG', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80, {
fit: 'cover',
position: sharp.strategy.attention
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
it('supports the strategy passed as a string', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80, {
fit: 'cover',
position: 'attention'
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
});
});

View File

@@ -151,8 +151,7 @@ describe('Resize dimensions', function () {
it('TIFF embed known to cause rounding errors', function (done) { it('TIFF embed known to cause rounding errors', function (done) {
sharp(fixtures.inputTiff) sharp(fixtures.inputTiff)
.resize(240, 320) .resize(240, 320, { fit: sharp.fit.contain })
.embed()
.jpeg() .jpeg()
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
@@ -178,10 +177,9 @@ describe('Resize dimensions', function () {
}); });
}); });
it('Max width or height considering ratio (portrait)', function (done) { it('fit=inside, portrait', function (done) {
sharp(fixtures.inputTiff) sharp(fixtures.inputTiff)
.resize(320, 320) .resize(320, 320, { fit: sharp.fit.inside })
.max()
.jpeg() .jpeg()
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
@@ -193,10 +191,9 @@ describe('Resize dimensions', function () {
}); });
}); });
it('Min width or height considering ratio (portrait)', function (done) { it('fit=outside, portrait', function (done) {
sharp(fixtures.inputTiff) sharp(fixtures.inputTiff)
.resize(320, 320) .resize(320, 320, { fit: sharp.fit.outside })
.min()
.jpeg() .jpeg()
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
@@ -208,10 +205,9 @@ describe('Resize dimensions', function () {
}); });
}); });
it('Max width or height considering ratio (landscape)', function (done) { it('fit=inside, landscape', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 320) .resize(320, 320, { fit: sharp.fit.inside })
.max()
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -222,24 +218,9 @@ describe('Resize dimensions', function () {
}); });
}); });
it('Provide only one dimension with max, should default to crop', function (done) { it('fit=outside, landscape', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320) .resize(320, 320, { fit: sharp.fit.outside })
.max()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('Min width or height considering ratio (landscape)', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 320)
.min()
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -250,10 +231,28 @@ describe('Resize dimensions', function () {
}); });
}); });
it('Provide only one dimension with min, should default to crop', function (done) { it('fit=inside, provide only one dimension', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320) .resize({
.min() width: 320,
fit: sharp.fit.inside
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(261, info.height);
done();
});
});
it('fit=outside, provide only one dimension', function (done) {
sharp(fixtures.inputJpg)
.resize({
width: 320,
fit: sharp.fit.outside
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -266,8 +265,10 @@ describe('Resize dimensions', function () {
it('Do not enlarge when input width is already less than output width', function (done) { it('Do not enlarge when input width is already less than output width', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(2800) .resize({
.withoutEnlargement() width: 2800,
withoutEnlargement: true
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -280,8 +281,10 @@ describe('Resize dimensions', function () {
it('Do not enlarge when input height is already less than output height', function (done) { it('Do not enlarge when input height is already less than output height', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(null, 2300) .resize({
.withoutEnlargement() height: 2300,
withoutEnlargement: true
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -294,8 +297,10 @@ describe('Resize dimensions', function () {
it('Do enlarge when input width is less than output width', function (done) { it('Do enlarge when input width is less than output width', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(2800) .resize({
.withoutEnlargement(false) width: 2800,
withoutEnlargement: false
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
@@ -306,103 +311,127 @@ describe('Resize dimensions', function () {
}); });
}); });
it('Downscale width and height, ignoring aspect ratio', function (done) { it('fit=fill, downscale width and height', function (done) {
sharp(fixtures.inputJpg).resize(320, 320).ignoreAspectRatio().toBuffer(function (err, data, info) { sharp(fixtures.inputJpg)
if (err) throw err; .resize(320, 320, { fit: 'fill' })
assert.strictEqual(true, data.length > 0); .toBuffer(function (err, data, info) {
assert.strictEqual('jpeg', info.format); if (err) throw err;
assert.strictEqual(320, info.width); assert.strictEqual(true, data.length > 0);
assert.strictEqual(320, info.height); assert.strictEqual('jpeg', info.format);
done(); assert.strictEqual(320, info.width);
}); assert.strictEqual(320, info.height);
done();
});
}); });
it('Downscale width, ignoring aspect ratio', function (done) { it('fit=fill, downscale width', function (done) {
sharp(fixtures.inputJpg).resize(320).ignoreAspectRatio().toBuffer(function (err, data, info) { sharp(fixtures.inputJpg)
if (err) throw err; .resize({
assert.strictEqual(true, data.length > 0); width: 320,
assert.strictEqual('jpeg', info.format); fit: 'fill'
assert.strictEqual(320, info.width); })
assert.strictEqual(2225, info.height); .toBuffer(function (err, data, info) {
done(); if (err) throw err;
}); assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(2225, info.height);
done();
});
}); });
it('Downscale height, ignoring aspect ratio', function (done) { it('fit=fill, downscale height', function (done) {
sharp(fixtures.inputJpg).resize(null, 320).ignoreAspectRatio().toBuffer(function (err, data, info) { sharp(fixtures.inputJpg)
if (err) throw err; .resize({
assert.strictEqual(true, data.length > 0); height: 320,
assert.strictEqual('jpeg', info.format); fit: 'fill'
assert.strictEqual(2725, info.width); })
assert.strictEqual(320, info.height); .toBuffer(function (err, data, info) {
done(); if (err) throw err;
}); assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(320, info.height);
done();
});
}); });
it('Upscale width and height, ignoring aspect ratio', function (done) { it('fit=fill, upscale width and height', function (done) {
sharp(fixtures.inputJpg).resize(3000, 3000).ignoreAspectRatio().toBuffer(function (err, data, info) { sharp(fixtures.inputJpg)
if (err) throw err; .resize(3000, 3000, { fit: 'fill' })
assert.strictEqual(true, data.length > 0); .toBuffer(function (err, data, info) {
assert.strictEqual('jpeg', info.format); if (err) throw err;
assert.strictEqual(3000, info.width); assert.strictEqual(true, data.length > 0);
assert.strictEqual(3000, info.height); assert.strictEqual('jpeg', info.format);
done(); assert.strictEqual(3000, info.width);
}); assert.strictEqual(3000, info.height);
done();
});
}); });
it('Upscale width, ignoring aspect ratio', function (done) { it('fit=fill, upscale width', function (done) {
sharp(fixtures.inputJpg).resize(3000).ignoreAspectRatio().toBuffer(function (err, data, info) { sharp(fixtures.inputJpg)
if (err) throw err; .resize(3000, null, { fit: 'fill' })
assert.strictEqual(true, data.length > 0); .toBuffer(function (err, data, info) {
assert.strictEqual('jpeg', info.format); if (err) throw err;
assert.strictEqual(3000, info.width); assert.strictEqual(true, data.length > 0);
assert.strictEqual(2225, info.height); assert.strictEqual('jpeg', info.format);
done(); assert.strictEqual(3000, info.width);
}); assert.strictEqual(2225, info.height);
done();
});
}); });
it('Upscale height, ignoring aspect ratio', function (done) { it('fit=fill, upscale height', function (done) {
sharp(fixtures.inputJpg).resize(null, 3000).ignoreAspectRatio().toBuffer(function (err, data, info) { sharp(fixtures.inputJpg)
if (err) throw err; .resize(null, 3000, { fit: 'fill' })
assert.strictEqual(true, data.length > 0); .toBuffer(function (err, data, info) {
assert.strictEqual('jpeg', info.format); if (err) throw err;
assert.strictEqual(2725, info.width); assert.strictEqual(true, data.length > 0);
assert.strictEqual(3000, info.height); assert.strictEqual('jpeg', info.format);
done(); assert.strictEqual(2725, info.width);
}); assert.strictEqual(3000, info.height);
done();
});
}); });
it('Downscale width, upscale height, ignoring aspect ratio', function (done) { it('fit=fill, downscale width, upscale height', function (done) {
sharp(fixtures.inputJpg).resize(320, 3000).ignoreAspectRatio().toBuffer(function (err, data, info) { sharp(fixtures.inputJpg)
if (err) throw err; .resize(320, 3000, { fit: 'fill' })
assert.strictEqual(true, data.length > 0); .toBuffer(function (err, data, info) {
assert.strictEqual('jpeg', info.format); if (err) throw err;
assert.strictEqual(320, info.width); assert.strictEqual(true, data.length > 0);
assert.strictEqual(3000, info.height); assert.strictEqual('jpeg', info.format);
done(); assert.strictEqual(320, info.width);
}); assert.strictEqual(3000, info.height);
done();
});
}); });
it('Upscale width, downscale height, ignoring aspect ratio', function (done) { it('fit=fill, upscale width, downscale height', function (done) {
sharp(fixtures.inputJpg).resize(3000, 320).ignoreAspectRatio().toBuffer(function (err, data, info) { sharp(fixtures.inputJpg)
if (err) throw err; .resize(3000, 320, { fit: 'fill' })
assert.strictEqual(true, data.length > 0); .toBuffer(function (err, data, info) {
assert.strictEqual('jpeg', info.format); if (err) throw err;
assert.strictEqual(3000, info.width); assert.strictEqual(true, data.length > 0);
assert.strictEqual(320, info.height); assert.strictEqual('jpeg', info.format);
done(); assert.strictEqual(3000, info.width);
}); assert.strictEqual(320, info.height);
done();
});
}); });
it('Identity transform, ignoring aspect ratio', function (done) { it('fit=fill, identity transform', function (done) {
sharp(fixtures.inputJpg).ignoreAspectRatio().toBuffer(function (err, data, info) { sharp(fixtures.inputJpg)
if (err) throw err; .resize(null, null, { fit: 'fill' })
assert.strictEqual(true, data.length > 0); .toBuffer(function (err, data, info) {
assert.strictEqual('jpeg', info.format); if (err) throw err;
assert.strictEqual(2725, info.width); assert.strictEqual(true, data.length > 0);
assert.strictEqual(2225, info.height); assert.strictEqual('jpeg', info.format);
done(); assert.strictEqual(2725, info.width);
}); assert.strictEqual(2225, info.height);
done();
});
}); });
it('Dimensions that result in differing even shrinks on each axis', function (done) { it('Dimensions that result in differing even shrinks on each axis', function (done) {
@@ -500,4 +529,16 @@ describe('Resize dimensions', function () {
sharp().resize(null, null, { kernel: 'unknown' }); sharp().resize(null, null, { kernel: 'unknown' });
}); });
}); });
it('unknown fit throws', function () {
assert.throws(function () {
sharp().resize(null, null, { fit: 'unknown' });
});
});
it('unknown position throws', function () {
assert.throws(function () {
sharp().resize(null, null, { position: 'unknown' });
});
});
}); });

View File

@@ -23,6 +23,33 @@ describe('Rotation', function () {
}); });
}); });
it('Rotate by 30 degrees with semi-transparent background', function (done) {
sharp(fixtures.inputJpg)
.rotate(30, {background: { r: 255, g: 0, b: 0, alpha: 0.5 }})
.resize(320)
.png()
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(408, info.width);
assert.strictEqual(386, info.height);
fixtures.assertSimilar(fixtures.expected('rotate-transparent-bg.png'), data, done);
});
});
it('Rotate by 30 degrees with solid background', function (done) {
sharp(fixtures.inputJpg)
.rotate(30, {background: { r: 255, g: 0, b: 0, alpha: 0.5 }})
.resize(320)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(408, info.width);
assert.strictEqual(386, info.height);
fixtures.assertSimilar(fixtures.expected('rotate-solid-bg.jpg'), data, done);
});
});
it('Rotate by 90 degrees, respecting output input size', function (done) { it('Rotate by 90 degrees, respecting output input size', function (done) {
sharp(fixtures.inputJpg).rotate(90).resize(320, 240).toBuffer(function (err, data, info) { sharp(fixtures.inputJpg).rotate(90).resize(320, 240).toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
@@ -34,6 +61,17 @@ describe('Rotation', function () {
}); });
}); });
it('Rotate by 30 degrees, respecting output input size', function (done) {
sharp(fixtures.inputJpg).rotate(30).resize(320, 240).toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(397, info.width);
assert.strictEqual(368, info.height);
done();
});
});
[-3690, -450, -90, 90, 450, 3690].forEach(function (angle) { [-3690, -450, -90, 90, 450, 3690].forEach(function (angle) {
it('Rotate by any 90-multiple angle (' + angle + 'deg)', function (done) { it('Rotate by any 90-multiple angle (' + angle + 'deg)', function (done) {
sharp(fixtures.inputJpg320x240).rotate(angle).toBuffer(function (err, data, info) { sharp(fixtures.inputJpg320x240).rotate(angle).toBuffer(function (err, data, info) {
@@ -45,6 +83,17 @@ describe('Rotation', function () {
}); });
}); });
[-3750, -510, -150, 30, 390, 3630].forEach(function (angle) {
it('Rotate by any 30-multiple angle (' + angle + 'deg)', function (done) {
sharp(fixtures.inputJpg320x240).rotate(angle).toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(397, info.width);
assert.strictEqual(368, info.height);
done();
});
});
});
[-3780, -540, 0, 180, 540, 3780].forEach(function (angle) { [-3780, -540, 0, 180, 540, 3780].forEach(function (angle) {
it('Rotate by any 180-multiple angle (' + angle + 'deg)', function (done) { it('Rotate by any 180-multiple angle (' + angle + 'deg)', function (done) {
sharp(fixtures.inputJpg320x240).rotate(angle).toBuffer(function (err, data, info) { sharp(fixtures.inputJpg320x240).rotate(angle).toBuffer(function (err, data, info) {
@@ -58,8 +107,7 @@ describe('Rotation', function () {
it('Rotate by 270 degrees, square output ignoring aspect ratio', function (done) { it('Rotate by 270 degrees, square output ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(240, 240) .resize(240, 240, { fit: sharp.fit.fill })
.ignoreAspectRatio()
.rotate(270) .rotate(270)
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
@@ -74,10 +122,26 @@ describe('Rotation', function () {
}); });
}); });
it('Rotate by 315 degrees, square output ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(240, 240, { fit: sharp.fit.fill })
.rotate(315)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(339, info.width);
assert.strictEqual(339, info.height);
sharp(data).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual(339, metadata.width);
assert.strictEqual(339, metadata.height);
done();
});
});
});
it('Rotate by 270 degrees, rectangular output ignoring aspect ratio', function (done) { it('Rotate by 270 degrees, rectangular output ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 240) .resize(320, 240, { fit: sharp.fit.fill })
.ignoreAspectRatio()
.rotate(270) .rotate(270)
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
@@ -92,6 +156,23 @@ describe('Rotation', function () {
}); });
}); });
it('Rotate by 30 degrees, rectangular output ignoring aspect ratio', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 240, { fit: sharp.fit.fill })
.rotate(30)
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(397, info.width);
assert.strictEqual(368, info.height);
sharp(data).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual(397, metadata.width);
assert.strictEqual(368, metadata.height);
done();
});
});
});
it('Input image has Orientation EXIF tag but do not rotate output', function (done) { it('Input image has Orientation EXIF tag but do not rotate output', function (done) {
sharp(fixtures.inputJpgWithExif) sharp(fixtures.inputJpgWithExif)
.resize(320) .resize(320)
@@ -185,9 +266,9 @@ describe('Rotation', function () {
}); });
}); });
it('Rotate to an invalid angle, should fail', function () { it('Rotate with a string argument, should fail', function () {
assert.throws(function () { assert.throws(function () {
sharp(fixtures.inputJpg).rotate(1); sharp(fixtures.inputJpg).rotate('not-a-number');
}); });
}); });

View File

@@ -24,6 +24,7 @@ describe('Image Stats', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, stats.isOpaque); assert.strictEqual(true, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 7.319914765248541));
// red channel // red channel
assert.strictEqual(0, stats.channels[0]['min']); assert.strictEqual(0, stats.channels[0]['min']);
@@ -82,6 +83,7 @@ describe('Image Stats', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, stats.isOpaque); assert.strictEqual(true, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 0.3409031108021736));
// red channel // red channel
assert.strictEqual(0, stats.channels[0]['min']); assert.strictEqual(0, stats.channels[0]['min']);
@@ -105,7 +107,9 @@ describe('Image Stats', function () {
it('PNG with transparency', function (done) { it('PNG with transparency', function (done) {
sharp(fixtures.inputPngWithTransparency).stats(function (err, stats) { sharp(fixtures.inputPngWithTransparency).stats(function (err, stats) {
if (err) throw err; if (err) throw err;
assert.strictEqual(false, stats.isOpaque); assert.strictEqual(false, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 0.06778064835816622));
// red channel // red channel
assert.strictEqual(0, stats.channels[0]['min']); assert.strictEqual(0, stats.channels[0]['min']);
@@ -180,6 +184,7 @@ describe('Image Stats', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual(false, stats.isOpaque); assert.strictEqual(false, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 0));
// alpha channel // alpha channel
assert.strictEqual(0, stats.channels[3]['min']); assert.strictEqual(0, stats.channels[3]['min']);
@@ -204,7 +209,9 @@ describe('Image Stats', function () {
it('Tiff', function (done) { it('Tiff', function (done) {
sharp(fixtures.inputTiff).stats(function (err, stats) { sharp(fixtures.inputTiff).stats(function (err, stats) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, stats.isOpaque); assert.strictEqual(true, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 0.3851250782608986));
// red channel // red channel
assert.strictEqual(0, stats.channels[0]['min']); assert.strictEqual(0, stats.channels[0]['min']);
@@ -231,6 +238,7 @@ describe('Image Stats', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, stats.isOpaque); assert.strictEqual(true, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 7.51758075132966));
// red channel // red channel
assert.strictEqual(0, stats.channels[0]['min']); assert.strictEqual(0, stats.channels[0]['min']);
@@ -289,6 +297,7 @@ describe('Image Stats', function () {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, stats.isOpaque); assert.strictEqual(true, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 6.087309412541799));
// red channel // red channel
assert.strictEqual(35, stats.channels[0]['min']); assert.strictEqual(35, stats.channels[0]['min']);
@@ -345,7 +354,9 @@ describe('Image Stats', function () {
it('Grayscale GIF with alpha', function (done) { it('Grayscale GIF with alpha', function (done) {
sharp(fixtures.inputGifGreyPlusAlpha).stats(function (err, stats) { sharp(fixtures.inputGifGreyPlusAlpha).stats(function (err, stats) {
if (err) throw err; if (err) throw err;
assert.strictEqual(false, stats.isOpaque); assert.strictEqual(false, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 1));
// gray channel // gray channel
assert.strictEqual(0, stats.channels[0]['min']); assert.strictEqual(0, stats.channels[0]['min']);
@@ -387,7 +398,9 @@ describe('Image Stats', function () {
const readable = fs.createReadStream(fixtures.inputJpg); const readable = fs.createReadStream(fixtures.inputJpg);
const pipeline = sharp().stats(function (err, stats) { const pipeline = sharp().stats(function (err, stats) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, stats.isOpaque); assert.strictEqual(true, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 7.319914765248541));
// red channel // red channel
assert.strictEqual(0, stats.channels[0]['min']); assert.strictEqual(0, stats.channels[0]['min']);
@@ -449,6 +462,7 @@ describe('Image Stats', function () {
return pipeline.stats().then(function (stats) { return pipeline.stats().then(function (stats) {
assert.strictEqual(true, stats.isOpaque); assert.strictEqual(true, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 7.319914765248541));
// red channel // red channel
assert.strictEqual(0, stats.channels[0]['min']); assert.strictEqual(0, stats.channels[0]['min']);
@@ -505,6 +519,7 @@ describe('Image Stats', function () {
it('File in, Promise out', function () { it('File in, Promise out', function () {
return sharp(fixtures.inputJpg).stats().then(function (stats) { return sharp(fixtures.inputJpg).stats().then(function (stats) {
assert.strictEqual(true, stats.isOpaque); assert.strictEqual(true, stats.isOpaque);
assert.strictEqual(true, isInAcceptableRange(stats.entropy, 7.319914765248541));
// red channel // red channel
assert.strictEqual(0, stats.channels[0]['min']); assert.strictEqual(0, stats.channels[0]['min']);

View File

@@ -46,6 +46,51 @@ const assertDeepZoomTiles = function (directory, expectedSize, expectedLevels, d
}, done); }, done);
}; };
const assertZoomifyTiles = function (directory, expectedTileSize, expectedLevels, done) {
fs.stat(path.join(directory, 'ImageProperties.xml'), function (err, stat) {
if (err) throw err;
assert.ok(stat.isFile());
assert.ok(stat.size > 0);
let maxTileLevel = -1;
fs.readdirSync(path.join(directory, 'TileGroup0')).forEach(function (tile) {
// Verify tile file name
assert.ok(/^[0-9]+-[0-9]+-[0-9]+\.jpg$/.test(tile));
let level = parseInt(tile.split('-')[0]);
maxTileLevel = Math.max(maxTileLevel, level);
});
assert.strictEqual(maxTileLevel + 1, expectedLevels); // add one to account for zero level tile
done();
});
};
const assertGoogleTiles = function (directory, expectedTileSize, expectedLevels, done) {
const levels = fs.readdirSync(directory);
assert.strictEqual(expectedLevels, levels.length - 1); // subtract one to account for default blank tile
fs.stat(path.join(directory, 'blank.png'), function (err, stat) {
if (err) throw err;
assert.ok(stat.isFile());
assert.ok(stat.size > 0);
// Basic check to confirm lowest and highest level tiles exist
fs.stat(path.join(directory, '0', '0', '0.jpg'), function (err, stat) {
if (err) throw err;
assert.strictEqual(true, stat.isFile());
assert.strictEqual(true, stat.size > 0);
fs.stat(path.join(directory, (expectedLevels - 1).toString(), '0', '0.jpg'), function (err, stat) {
if (err) throw err;
assert.strictEqual(true, stat.isFile());
assert.strictEqual(true, stat.size > 0);
done();
});
});
});
};
describe('Tile', function () { describe('Tile', function () {
it('Valid size values pass', function () { it('Valid size values pass', function () {
[1, 8192].forEach(function (size) { [1, 8192].forEach(function (size) {
@@ -144,6 +189,26 @@ describe('Tile', function () {
}); });
}); });
it('Valid depths pass', function () {
['onepixel', 'onetile', 'one'].forEach(function (depth) {
assert.doesNotThrow(function (depth) {
sharp().tile({
depth: depth
});
});
});
});
it('Invalid depths fail', function () {
['depth', 1].forEach(function (depth) {
assert.throws(function () {
sharp().tile({
depth: depth
});
});
});
});
it('Prevent larger overlap than default size', function () { it('Prevent larger overlap than default size', function () {
assert.throws(function () { assert.throws(function () {
sharp().tile({ sharp().tile({
@@ -251,6 +316,54 @@ describe('Tile', function () {
}); });
}); });
it('Deep Zoom layout with depth of one', function (done) {
const directory = fixtures.path('output.512_depth_one.dzi_files');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({
size: 512,
depth: 'one'
})
.toFile(fixtures.path('output.512_depth_one.dzi'), function (err, info) {
if (err) throw err;
// Verify only one depth generated
assertDeepZoomTiles(directory, 512, 1, done);
});
});
});
it('Deep Zoom layout with depth of onepixel', function (done) {
const directory = fixtures.path('output.512_depth_onepixel.dzi_files');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({
size: 512,
depth: 'onepixel'
})
.toFile(fixtures.path('output.512_depth_onepixel.dzi'), function (err, info) {
if (err) throw err;
// Verify only one depth generated
assertDeepZoomTiles(directory, 512, 13, done);
});
});
});
it('Deep Zoom layout with depth of onetile', function (done) {
const directory = fixtures.path('output.256_depth_onetile.dzi_files');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({
size: 256,
depth: 'onetile'
})
.toFile(fixtures.path('output.256_depth_onetile.dzi'), function (err, info) {
if (err) throw err;
// Verify only one depth generated
assertDeepZoomTiles(directory, 256, 5, done);
});
});
});
it('Zoomify layout', function (done) { it('Zoomify layout', function (done) {
const directory = fixtures.path('output.zoomify.dzi'); const directory = fixtures.path('output.zoomify.dzi');
rimraf(directory, function () { rimraf(directory, function () {
@@ -275,6 +388,69 @@ describe('Tile', function () {
}); });
}); });
it('Zoomify layout with depth one', function (done) {
const directory = fixtures.path('output.zoomify.depth_one.dzi');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({
size: 256,
layout: 'zoomify',
depth: 'one'
})
.toFile(directory, function (err, info) {
if (err) throw err;
assert.strictEqual('dz', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
assert.strictEqual(3, info.channels);
assert.strictEqual('number', typeof info.size);
assertZoomifyTiles(directory, 256, 1, done);
});
});
});
it('Zoomify layout with depth onetile', function (done) {
const directory = fixtures.path('output.zoomify.depth_onetile.dzi');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({
size: 256,
layout: 'zoomify',
depth: 'onetile'
})
.toFile(directory, function (err, info) {
if (err) throw err;
assert.strictEqual('dz', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
assert.strictEqual(3, info.channels);
assert.strictEqual('number', typeof info.size);
assertZoomifyTiles(directory, 256, 5, done);
});
});
});
it('Zoomify layout with depth onepixel', function (done) {
const directory = fixtures.path('output.zoomify.depth_onepixel.dzi');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({
size: 256,
layout: 'zoomify',
depth: 'onepixel'
})
.toFile(directory, function (err, info) {
if (err) throw err;
assert.strictEqual('dz', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
assert.strictEqual(3, info.channels);
assert.strictEqual('number', typeof info.size);
assertZoomifyTiles(directory, 256, 13, done);
});
});
});
it('Google layout', function (done) { it('Google layout', function (done) {
const directory = fixtures.path('output.google.dzi'); const directory = fixtures.path('output.google.dzi');
rimraf(directory, function () { rimraf(directory, function () {
@@ -410,6 +586,72 @@ describe('Tile', function () {
}); });
}); });
it('Google layout with depth one', function (done) {
const directory = fixtures.path('output.google_depth_one.dzi');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({
layout: 'google',
depth: 'one',
size: 256
})
.toFile(directory, function (err, info) {
if (err) throw err;
assert.strictEqual('dz', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
assert.strictEqual(3, info.channels);
assert.strictEqual('number', typeof info.size);
assertGoogleTiles(directory, 256, 1, done);
});
});
});
it('Google layout with depth onepixel', function (done) {
const directory = fixtures.path('output.google_depth_onepixel.dzi');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({
layout: 'google',
depth: 'onepixel',
size: 256
})
.toFile(directory, function (err, info) {
if (err) throw err;
assert.strictEqual('dz', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
assert.strictEqual(3, info.channels);
assert.strictEqual('number', typeof info.size);
assertGoogleTiles(directory, 256, 13, done);
});
});
});
it('Google layout with depth onetile', function (done) {
const directory = fixtures.path('output.google_depth_onetile.dzi');
rimraf(directory, function () {
sharp(fixtures.inputJpg)
.tile({
layout: 'google',
depth: 'onetile',
size: 256
})
.toFile(directory, function (err, info) {
if (err) throw err;
assert.strictEqual('dz', info.format);
assert.strictEqual(2725, info.width);
assert.strictEqual(2225, info.height);
assert.strictEqual(3, info.channels);
assert.strictEqual('number', typeof info.size);
assertGoogleTiles(directory, 256, 5, done);
});
});
});
it('Write to ZIP container using file extension', function (done) { it('Write to ZIP container using file extension', function (done) {
const container = fixtures.path('output.dz.container.zip'); const container = fixtures.path('output.dz.container.zip');
const extractTo = fixtures.path('output.dz.container'); const extractTo = fixtures.path('output.dz.container');

View File

@@ -9,12 +9,12 @@ describe('Tint', function () {
it('tints rgb image red', function (done) { it('tints rgb image red', function (done) {
const output = fixtures.path('output.tint-red.jpg'); const output = fixtures.path('output.tint-red.jpg');
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 240) .resize(320, 240, { fastShrinkOnLoad: false })
.tint('#FF0000') .tint('#FF0000')
.toFile(output, function (err, info) { .toFile(output, function (err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0); assert.strictEqual(true, info.size > 0);
fixtures.assertMaxColourDistance(output, fixtures.expected('tint-red.jpg'), 10); fixtures.assertMaxColourDistance(output, fixtures.expected('tint-red.jpg'), 18);
done(); done();
}); });
}); });
@@ -22,12 +22,12 @@ describe('Tint', function () {
it('tints rgb image green', function (done) { it('tints rgb image green', function (done) {
const output = fixtures.path('output.tint-green.jpg'); const output = fixtures.path('output.tint-green.jpg');
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 240) .resize(320, 240, { fastShrinkOnLoad: false })
.tint('#00FF00') .tint('#00FF00')
.toFile(output, function (err, info) { .toFile(output, function (err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0); assert.strictEqual(true, info.size > 0);
fixtures.assertMaxColourDistance(output, fixtures.expected('tint-green.jpg'), 10); fixtures.assertMaxColourDistance(output, fixtures.expected('tint-green.jpg'), 27);
done(); done();
}); });
}); });
@@ -35,12 +35,12 @@ describe('Tint', function () {
it('tints rgb image blue', function (done) { it('tints rgb image blue', function (done) {
const output = fixtures.path('output.tint-blue.jpg'); const output = fixtures.path('output.tint-blue.jpg');
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 240) .resize(320, 240, { fastShrinkOnLoad: false })
.tint('#0000FF') .tint('#0000FF')
.toFile(output, function (err, info) { .toFile(output, function (err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0); assert.strictEqual(true, info.size > 0);
fixtures.assertMaxColourDistance(output, fixtures.expected('tint-blue.jpg'), 10); fixtures.assertMaxColourDistance(output, fixtures.expected('tint-blue.jpg'), 14);
done(); done();
}); });
}); });
@@ -48,7 +48,7 @@ describe('Tint', function () {
it('tints rgb image with sepia tone', function (done) { it('tints rgb image with sepia tone', function (done) {
const output = fixtures.path('output.tint-sepia.jpg'); const output = fixtures.path('output.tint-sepia.jpg');
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 240) .resize(320, 240, { fastShrinkOnLoad: false })
.tint('#704214') .tint('#704214')
.toFile(output, function (err, info) { .toFile(output, function (err, info) {
if (err) throw err; if (err) throw err;
@@ -62,7 +62,7 @@ describe('Tint', function () {
it('tints rgb image with sepia tone with rgb colour', function (done) { it('tints rgb image with sepia tone with rgb colour', function (done) {
const output = fixtures.path('output.tint-sepia.jpg'); const output = fixtures.path('output.tint-sepia.jpg');
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 240) .resize(320, 240, { fastShrinkOnLoad: false })
.tint([112, 66, 20]) .tint([112, 66, 20])
.toFile(output, function (err, info) { .toFile(output, function (err, info) {
if (err) throw err; if (err) throw err;
@@ -76,7 +76,7 @@ describe('Tint', function () {
it('tints rgb image with alpha channel', function (done) { it('tints rgb image with alpha channel', function (done) {
const output = fixtures.path('output.tint-alpha.png'); const output = fixtures.path('output.tint-alpha.png');
sharp(fixtures.inputPngRGBWithAlpha) sharp(fixtures.inputPngRGBWithAlpha)
.resize(320, 240) .resize(320, 240, { fastShrinkOnLoad: false })
.tint('#704214') .tint('#704214')
.toFile(output, function (err, info) { .toFile(output, function (err, info) {
if (err) throw err; if (err) throw err;
@@ -90,12 +90,12 @@ describe('Tint', function () {
it('tints cmyk image red', function (done) { it('tints cmyk image red', function (done) {
const output = fixtures.path('output.tint-cmyk.jpg'); const output = fixtures.path('output.tint-cmyk.jpg');
sharp(fixtures.inputJpgWithCmykProfile) sharp(fixtures.inputJpgWithCmykProfile)
.resize(320, 240) .resize(320, 240, { fastShrinkOnLoad: false })
.tint('#FF0000') .tint('#FF0000')
.toFile(output, function (err, info) { .toFile(output, function (err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0); assert.strictEqual(true, info.size > 0);
fixtures.assertMaxColourDistance(output, fixtures.expected('tint-cmyk.jpg'), 10); fixtures.assertMaxColourDistance(output, fixtures.expected('tint-cmyk.jpg'), 15);
done(); done();
}); });
}); });

View File

@@ -3,6 +3,7 @@
const assert = require('assert'); const assert = require('assert');
const sharp = require('../../'); const sharp = require('../../');
const inRange = require('../../lib/is').inRange;
const fixtures = require('../fixtures'); const fixtures = require('../fixtures');
describe('Trim borders', function () { describe('Trim borders', function () {
@@ -16,6 +17,8 @@ describe('Trim borders', function () {
assert.strictEqual('png', info.format); assert.strictEqual('png', info.format);
assert.strictEqual(450, info.width); assert.strictEqual(450, info.width);
assert.strictEqual(322, info.height); assert.strictEqual(322, info.height);
assert.strictEqual(-204, info.trimOffsetLeft);
assert.strictEqual(0, info.trimOffsetTop);
fixtures.assertSimilar(expected, data, done); fixtures.assertSimilar(expected, data, done);
}); });
}); });
@@ -24,12 +27,16 @@ describe('Trim borders', function () {
const expected = fixtures.expected('alpha-layer-2-trim-resize.jpg'); const expected = fixtures.expected('alpha-layer-2-trim-resize.jpg');
sharp(fixtures.inputJpgOverlayLayer2) sharp(fixtures.inputJpgOverlayLayer2)
.trim() .trim()
.resize(300) .resize({
width: 300,
fastShrinkOnLoad: false
})
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(300, info.width); assert.strictEqual(300, info.width);
assert.strictEqual(300, info.height); assert.strictEqual(true, inRange(info.trimOffsetLeft, -873, -870));
assert.strictEqual(-554, info.trimOffsetTop);
fixtures.assertSimilar(expected, data, done); fixtures.assertSimilar(expected, data, done);
}); });
}); });
@@ -45,12 +52,14 @@ describe('Trim borders', function () {
assert.strictEqual(32, info.width); assert.strictEqual(32, info.width);
assert.strictEqual(32, info.height); assert.strictEqual(32, info.height);
assert.strictEqual(4, info.channels); assert.strictEqual(4, info.channels);
assert.strictEqual(-2, info.trimOffsetLeft);
assert.strictEqual(-2, info.trimOffsetTop);
fixtures.assertSimilar(fixtures.expected('trim-16bit-rgba.png'), data, done); fixtures.assertSimilar(fixtures.expected('trim-16bit-rgba.png'), data, done);
}); });
}); });
describe('Invalid thresholds', function () { describe('Invalid thresholds', function () {
[-1, 100, 'fail', {}].forEach(function (threshold) { [-1, 'fail', {}].forEach(function (threshold) {
it(JSON.stringify(threshold), function () { it(JSON.stringify(threshold), function () {
assert.throws(function () { assert.throws(function () {
sharp().trim(threshold); sharp().trim(threshold);