mirror of
https://github.com/lovell/sharp.git
synced 2026-02-05 14:16:17 +01:00
Compare commits
24 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f4cb577cb4 | ||
|
|
91be57cbce | ||
|
|
78596545b0 | ||
|
|
9f6cc33858 | ||
|
|
d82de45b7e | ||
|
|
b7bbf58624 | ||
|
|
945d941c7b | ||
|
|
2605bf966f | ||
|
|
83b72a1ede | ||
|
|
6190ca4307 | ||
|
|
c2fcf7fc4a | ||
|
|
37cb4339e2 | ||
|
|
46f229e308 | ||
|
|
7f8f38f666 | ||
|
|
fb0769a327 | ||
|
|
b84cc3d49e | ||
|
|
0cba506bc4 | ||
|
|
5cdfbba55c | ||
|
|
6145231936 | ||
|
|
513b07ddcf | ||
|
|
150971fa92 | ||
|
|
ac85d88c9c | ||
|
|
1c79d6fb5d | ||
|
|
d41321254a |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -14,3 +14,6 @@ build
|
|||||||
node_modules
|
node_modules
|
||||||
tests/fixtures/output.*
|
tests/fixtures/output.*
|
||||||
tests/libvips.supp
|
tests/libvips.supp
|
||||||
|
|
||||||
|
# Mac OS X
|
||||||
|
.DS_Store
|
||||||
|
|||||||
185
README.md
185
README.md
@@ -45,37 +45,29 @@ The _gettext_ dependency of _libvips_ [can lead](https://github.com/lovell/sharp
|
|||||||
|
|
||||||
brew link gettext --force
|
brew link gettext --force
|
||||||
|
|
||||||
### Install libvips on Ubuntu Linux
|
### Install libvips on Linux
|
||||||
|
|
||||||
#### Ubuntu 14.x
|
#### Ubuntu 14.04 LTS
|
||||||
|
|
||||||
sudo apt-get install libvips-dev
|
sudo apt-get install libvips-dev
|
||||||
|
|
||||||
#### Ubuntu 13.x
|
#### Ubuntu 12.04 LTS
|
||||||
|
|
||||||
Compiling from source is recommended:
|
sudo add-apt-repository -y ppa:lyrasis/precise-backports
|
||||||
|
sudo apt-get update
|
||||||
sudo apt-get install automake build-essential git gobject-introspection gtk-doc-tools libglib2.0-dev libjpeg-turbo8-dev libpng12-dev libwebp-dev libtiff5-dev libexif-dev libxml2-dev swig libmagickwand-dev
|
sudo apt-get install -y automake build-essential git gobject-introspection gtk-doc-tools libglib2.0-dev libjpeg-turbo8-dev libpng12-dev libwebp-dev libtiff4-dev libexif-dev libxml2-dev swig libmagickwand-dev
|
||||||
git clone https://github.com/jcupitt/libvips.git
|
git clone https://github.com/jcupitt/libvips.git
|
||||||
cd libvips
|
cd libvips
|
||||||
git checkout 7.38
|
git checkout 7.40
|
||||||
./bootstrap.sh
|
./bootstrap.sh
|
||||||
./configure --enable-debug=no --enable-cxx=yes --without-python --without-orc --without-fftw
|
./configure --enable-debug=no --enable-cxx=yes --without-python --without-orc --without-fftw
|
||||||
make
|
make
|
||||||
sudo make install
|
sudo make install
|
||||||
sudo ldconfig
|
sudo ldconfig
|
||||||
|
|
||||||
#### Ubuntu 12.x
|
#### Debian Jessie
|
||||||
|
|
||||||
Requires `libtiff4-dev` instead of `libtiff5-dev` and has [a bug](https://bugs.launchpad.net/ubuntu/+source/libwebp/+bug/1108731) in the libwebp package. Work around these problems by running these commands first:
|
apt-get install libvips-dev
|
||||||
|
|
||||||
sudo add-apt-repository ppa:lyrasis/precise-backports
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install libtiff4-dev
|
|
||||||
|
|
||||||
Then follow Ubuntu 13.x instructions.
|
|
||||||
|
|
||||||
### Install libvips on Redhat/Centos Linux
|
|
||||||
|
|
||||||
#### Centos 6
|
#### Centos 6
|
||||||
|
|
||||||
@@ -98,6 +90,10 @@ Then follow Ubuntu 13.x instructions.
|
|||||||
|
|
||||||
[Alessandro Tagliapietra](https://github.com/alex88) maintains an [Heroku buildpack for libvips](https://github.com/alex88/heroku-buildpack-vips) and its dependencies.
|
[Alessandro Tagliapietra](https://github.com/alex88) maintains an [Heroku buildpack for libvips](https://github.com/alex88/heroku-buildpack-vips) and its dependencies.
|
||||||
|
|
||||||
|
### Using with gulp.js
|
||||||
|
|
||||||
|
[Mohammad Prabowo](https://github.com/rizalp) maintains a [gulp.js plugin](https://github.com/rizalp/gulp-sharp).
|
||||||
|
|
||||||
## Usage examples
|
## Usage examples
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
@@ -149,14 +145,17 @@ readableStream.pipe(pipeline);
|
|||||||
sharp('input.png')
|
sharp('input.png')
|
||||||
.rotate(180)
|
.rotate(180)
|
||||||
.resize(300)
|
.resize(300)
|
||||||
|
.flatten()
|
||||||
|
.background('#ff6600')
|
||||||
.sharpen()
|
.sharpen()
|
||||||
.withMetadata()
|
.withMetadata()
|
||||||
.quality(90)
|
.quality(90)
|
||||||
.webp()
|
.webp()
|
||||||
.toBuffer()
|
.toBuffer()
|
||||||
.then(function(outputBuffer) {
|
.then(function(outputBuffer) {
|
||||||
// outputBuffer contains 300px wide, upside down, sharpened,
|
// outputBuffer contains upside down, 300px wide, alpha channel flattened
|
||||||
// with metadata, 90% quality WebP image data
|
// onto orange background, sharpened, with metadata, 90% quality WebP image
|
||||||
|
// data
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -169,11 +168,28 @@ http.createServer(function(request, response) {
|
|||||||
// resized to 200 pixels wide, in WebP format
|
// resized to 200 pixels wide, in WebP format
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
sharp(input)
|
||||||
|
.extract(top, left, width, height)
|
||||||
|
.toFile(output);
|
||||||
|
// Extract a region of the input image, saving in the same format.
|
||||||
|
```
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
sharp(input)
|
||||||
|
.extract(topOffsetPre, leftOffsetPre, widthPre, heightPre)
|
||||||
|
.resize(width, height)
|
||||||
|
.extract(topOffsetPost, leftOffsetPost, widthPost, heightPost)
|
||||||
|
.toFile(output);
|
||||||
|
// Extract a region, resize, then extract from the resized image
|
||||||
|
```
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
sharp(inputBuffer)
|
sharp(inputBuffer)
|
||||||
.resize(200, 300)
|
.resize(200, 300)
|
||||||
.interpolateWith(sharp.interpolator.nohalo)
|
.interpolateWith(sharp.interpolator.nohalo)
|
||||||
.embedWhite()
|
.background('white')
|
||||||
|
.embed()
|
||||||
.toFile('output.tiff')
|
.toFile('output.tiff')
|
||||||
.then(function() {
|
.then(function() {
|
||||||
// output.tiff is a 200 pixels wide and 300 pixels high image
|
// output.tiff is a 200 pixels wide and 300 pixels high image
|
||||||
@@ -183,20 +199,29 @@ sharp(inputBuffer)
|
|||||||
```
|
```
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
sharp('input.gif').resize(200, 300).embedBlack().webp().toBuffer(function(err, outputBuffer) {
|
sharp('input.gif')
|
||||||
if (err) {
|
.resize(200, 300)
|
||||||
throw err;
|
.background({r: 0, g: 0, b: 0, a: 0})
|
||||||
}
|
.embed()
|
||||||
// outputBuffer contains WebP image data of a 200 pixels wide and 300 pixels high
|
.webp()
|
||||||
// containing a scaled version, embedded on a black canvas, of input.gif
|
.toBuffer(function(err, outputBuffer) {
|
||||||
});
|
if (err) {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
// outputBuffer contains WebP image data of a 200 pixels wide and 300 pixels high
|
||||||
|
// containing a scaled version, embedded on a transparent canvas, of input.gif
|
||||||
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
sharp(inputBuffer).resize(200, 200).max().jpeg().toBuffer().then(function(outputBuffer) {
|
sharp(inputBuffer)
|
||||||
// outputBuffer contains JPEG image data no wider than 200 pixels and no higher
|
.resize(200, 200)
|
||||||
// than 200 pixels regardless of the inputBuffer image dimensions
|
.max()
|
||||||
});
|
.jpeg()
|
||||||
|
.toBuffer().then(function(outputBuffer) {
|
||||||
|
// outputBuffer contains JPEG image data no wider than 200 pixels and no higher
|
||||||
|
// than 200 pixels regardless of the inputBuffer image dimensions
|
||||||
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
## API
|
## API
|
||||||
@@ -227,6 +252,7 @@ Fast access to image metadata without decoding any compressed image data.
|
|||||||
* `height`: Number of pixels high
|
* `height`: Number of pixels high
|
||||||
* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L502)
|
* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L502)
|
||||||
* `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
|
* `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
|
||||||
|
* `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
|
||||||
* `orientation`: Number value of the EXIF Orientation header, if present
|
* `orientation`: Number value of the EXIF Orientation header, if present
|
||||||
|
|
||||||
A Promises/A+ promise is returned when `callback` is not provided.
|
A Promises/A+ promise is returned when `callback` is not provided.
|
||||||
@@ -245,6 +271,16 @@ Scale output to `width` x `height`. By default, the resized image is cropped to
|
|||||||
|
|
||||||
`height` is the Number of pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
|
`height` is the Number of pixels high the resultant image should be. Use `null` or `undefined` to auto-scale the height to match the width.
|
||||||
|
|
||||||
|
#### extract(top, left, width, height)
|
||||||
|
|
||||||
|
Extract a region of the image. Can be used with or without a `resize` operation.
|
||||||
|
|
||||||
|
`top` and `left` are the offset, in pixels, from the top-left corner.
|
||||||
|
|
||||||
|
`width` and `height` are the dimensions of the extracted image.
|
||||||
|
|
||||||
|
Use `extract` before `resize` for pre-resize extraction. Use `extract` after `resize` for post-resize extraction. Use `extract` before and after for both.
|
||||||
|
|
||||||
#### crop([gravity])
|
#### crop([gravity])
|
||||||
|
|
||||||
Crop the resized image to the exact size specified, the default behaviour.
|
Crop the resized image to the exact size specified, the default behaviour.
|
||||||
@@ -255,17 +291,29 @@ Possible values are `north`, `east`, `south`, `west`, `center` and `centre`. The
|
|||||||
|
|
||||||
#### max()
|
#### max()
|
||||||
|
|
||||||
Preserving aspect ratio, resize the image to the maximum width or height specified.
|
Preserving aspect ratio, resize the image to the maximum `width` or `height` specified.
|
||||||
|
|
||||||
Both `width` and `height` must be provided via `resize` otherwise the behaviour will default to `crop`.
|
Both `width` and `height` must be provided via `resize` otherwise the behaviour will default to `crop`.
|
||||||
|
|
||||||
#### embedWhite()
|
#### background(rgba)
|
||||||
|
|
||||||
Embed the resized image on a white background of the exact size specified.
|
Set the background for the `embed` and `flatten` operations.
|
||||||
|
|
||||||
#### embedBlack()
|
`rgba` is parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
|
||||||
|
|
||||||
Embed the resized image on a black background of the exact size specified.
|
The alpha value is a float between `0` (transparent) and `1` (opaque).
|
||||||
|
|
||||||
|
The default background is `{r: 0, g: 0, b: 0, a: 1}`, black without transparency.
|
||||||
|
|
||||||
|
#### embed()
|
||||||
|
|
||||||
|
Preserving aspect ratio, resize the image to the maximum `width` or `height` specified then embed on a background of the exact `width` and `height` specified.
|
||||||
|
|
||||||
|
If the background contains an alpha value then WebP and PNG format output images will contain an alpha channel, even when the input image does not.
|
||||||
|
|
||||||
|
#### flatten()
|
||||||
|
|
||||||
|
Merge alpha transparency channel, if any, with `background`.
|
||||||
|
|
||||||
#### rotate([angle])
|
#### rotate([angle])
|
||||||
|
|
||||||
@@ -283,7 +331,7 @@ This is equivalent to GraphicsMagick's `>` geometry option: "change the dimensio
|
|||||||
|
|
||||||
#### sharpen()
|
#### sharpen()
|
||||||
|
|
||||||
Perform a mild sharpen of the resultant image. This typically reduces performance by 30%.
|
Perform a mild sharpen of the output image. This typically reduces performance by 10%.
|
||||||
|
|
||||||
#### interpolateWith(interpolator)
|
#### interpolateWith(interpolator)
|
||||||
|
|
||||||
@@ -308,6 +356,14 @@ This can improve the perceived brightness of a resized image in non-linear colou
|
|||||||
|
|
||||||
JPEG input images will not take advantage of the shrink-on-load performance optimisation when applying a gamma correction.
|
JPEG input images will not take advantage of the shrink-on-load performance optimisation when applying a gamma correction.
|
||||||
|
|
||||||
|
#### grayscale() / greyscale()
|
||||||
|
|
||||||
|
Convert to 8-bit greyscale; 256 shades of grey.
|
||||||
|
|
||||||
|
This is a linear operation. If the input image is in a non-linear colour space such as sRGB, use `gamma()` with `greyscale()` for the best results.
|
||||||
|
|
||||||
|
The output image will still be web-friendly sRGB and contain three (identical) channels.
|
||||||
|
|
||||||
### Output options
|
### Output options
|
||||||
|
|
||||||
#### jpeg()
|
#### jpeg()
|
||||||
@@ -332,7 +388,7 @@ The output quality to use for lossy JPEG, WebP and TIFF output formats. The defa
|
|||||||
|
|
||||||
Use progressive (interlace) scan for JPEG and PNG output. This typically reduces compression performance by 30% but results in an image that can be rendered sooner when decompressed.
|
Use progressive (interlace) scan for JPEG and PNG output. This typically reduces compression performance by 30% but results in an image that can be rendered sooner when decompressed.
|
||||||
|
|
||||||
#### withMetadata([boolean])
|
#### withMetadata()
|
||||||
|
|
||||||
Include all metadata (ICC, EXIF, XMP) from the input image in the output image. The default behaviour is to strip all metadata.
|
Include all metadata (ICC, EXIF, XMP) from the input image in the output image. The default behaviour is to strip all metadata.
|
||||||
|
|
||||||
@@ -340,7 +396,7 @@ Include all metadata (ICC, EXIF, XMP) from the input image in the output image.
|
|||||||
|
|
||||||
An advanced setting for the _zlib_ compression level of the lossless PNG output format. The default level is `6`.
|
An advanced setting for the _zlib_ compression level of the lossless PNG output format. The default level is `6`.
|
||||||
|
|
||||||
`compressionLevel` is a Number between -1 and 9.
|
`compressionLevel` is a Number between 0 and 9.
|
||||||
|
|
||||||
### Output methods
|
### Output methods
|
||||||
|
|
||||||
@@ -431,7 +487,7 @@ brew install graphicsmagick
|
|||||||
```
|
```
|
||||||
|
|
||||||
```
|
```
|
||||||
sudo apt-get install -qq imagemagick graphicsmagick
|
sudo apt-get install -qq imagemagick graphicsmagick libmagick++-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -444,16 +500,17 @@ sudo yum install -y --enablerepo=epel GraphicsMagick
|
|||||||
|
|
||||||
### Test environment
|
### Test environment
|
||||||
|
|
||||||
* Intel Xeon [L5520](http://ark.intel.com/products/40201/Intel-Xeon-Processor-L5520-8M-Cache-2_26-GHz-5_86-GTs-Intel-QPI) 2.27GHz 8MB cache
|
* AWS EC2 [c3.xlarge](http://aws.amazon.com/ec2/instance-types/#Compute_Optimized)
|
||||||
* Ubuntu 13.10
|
* Ubuntu 14.04
|
||||||
* libvips 7.38.5
|
* libvips 7.40.8
|
||||||
|
* liborc 0.4.22
|
||||||
|
|
||||||
### The contenders
|
### The contenders
|
||||||
|
|
||||||
* [imagemagick-native](https://github.com/mash/node-imagemagick-native) - Supports Buffers only and blocks main V8 thread whilst processing.
|
* [imagemagick-native](https://github.com/mash/node-imagemagick-native) v1.2.2 - Supports Buffers only and blocks main V8 thread whilst processing.
|
||||||
* [imagemagick](https://github.com/rsms/node-imagemagick) - Supports filesystem only and "has been unmaintained for a long time".
|
* [imagemagick](https://github.com/yourdeveloper/node-imagemagick) v0.1.3 - Supports filesystem only and "has been unmaintained for a long time".
|
||||||
* [gm](https://github.com/aheckmann/gm) - Fully featured wrapper around GraphicsMagick.
|
* [gm](https://github.com/aheckmann/gm) v1.16.0 - Fully featured wrapper around GraphicsMagick.
|
||||||
* sharp - Caching within libvips disabled to ensure a fair comparison.
|
* sharp v0.6.2 - Caching within libvips disabled to ensure a fair comparison.
|
||||||
|
|
||||||
### The task
|
### The task
|
||||||
|
|
||||||
@@ -463,21 +520,23 @@ Decompress a 2725x2225 JPEG image, resize and crop to 720x480, then compress to
|
|||||||
|
|
||||||
| Module | Input | Output | Ops/sec | Speed-up |
|
| Module | Input | Output | Ops/sec | Speed-up |
|
||||||
| :-------------------- | :----- | :----- | ------: | -------: |
|
| :-------------------- | :----- | :----- | ------: | -------: |
|
||||||
| imagemagick-native | buffer | buffer | 0.97 | 1 |
|
| imagemagick-native | buffer | buffer | 1.58 | 1 |
|
||||||
| imagemagick | file | file | 2.49 | 2.6 |
|
| imagemagick | file | file | 6.23 | 3.9 |
|
||||||
| gm | buffer | file | 3.72 | 3.8 |
|
| gm | buffer | file | 5.32 | 3.4 |
|
||||||
| gm | buffer | buffer | 3.80 | 3.9 |
|
| gm | buffer | buffer | 5.32 | 3.4 |
|
||||||
| gm | file | file | 3.67 | 3.8 |
|
| gm | file | file | 5.36 | 3.4 |
|
||||||
| gm | file | buffer | 3.67 | 3.8 |
|
| gm | file | buffer | 5.36 | 3.4 |
|
||||||
| sharp | buffer | file | 13.62 | 14.0 |
|
| sharp | buffer | file | 22.05 | 14.0 |
|
||||||
| sharp | buffer | buffer | 12.43 | 12.8 |
|
| sharp | buffer | buffer | 22.14 | 14.0 |
|
||||||
| sharp | file | file | 13.02 | 13.4 |
|
| sharp | file | file | 21.79 | 13.8 |
|
||||||
| sharp | file | buffer | 11.15 | 11.5 |
|
| sharp | file | buffer | 21.90 | 13.9 |
|
||||||
| sharp +sharpen | file | buffer | 10.26 | 10.6 |
|
| sharp | stream | stream | 20.87 | 13.2 |
|
||||||
| sharp +progressive | file | buffer | 9.44 | 9.7 |
|
| sharp +promise | file | buffer | 21.89 | 13.9 |
|
||||||
| sharp +sequentialRead | file | buffer | 11.94 | 12.3 |
|
| sharp +sharpen | file | buffer | 19.69 | 12.5 |
|
||||||
|
| sharp +progressive | file | buffer | 16.93 | 10.7 |
|
||||||
|
| sharp +sequentialRead | file | buffer | 21.60 | 13.7 |
|
||||||
|
|
||||||
You can expect much greater performance with caching enabled (default) and using 16+ core machines.
|
You can expect greater performance with caching enabled (default) and using 8+ core machines.
|
||||||
|
|
||||||
## Thanks
|
## Thanks
|
||||||
|
|
||||||
@@ -488,6 +547,10 @@ This module would never have been possible without the help and code contributio
|
|||||||
* [Jonathan Ong](https://github.com/jonathanong)
|
* [Jonathan Ong](https://github.com/jonathanong)
|
||||||
* [Chanon Sajjamanochai](https://github.com/chanon)
|
* [Chanon Sajjamanochai](https://github.com/chanon)
|
||||||
* [Juliano Julio](https://github.com/julianojulio)
|
* [Juliano Julio](https://github.com/julianojulio)
|
||||||
|
* [Daniel Gasienica](https://github.com/gasi)
|
||||||
|
* [Julian Walker](https://github.com/julianwa)
|
||||||
|
* [Amit Pitaru](https://github.com/apitaru)
|
||||||
|
* [Brandon Aaron](https://github.com/brandonaaron)
|
||||||
|
|
||||||
Thank you!
|
Thank you!
|
||||||
|
|
||||||
|
|||||||
114
index.js
114
index.js
@@ -3,7 +3,10 @@
|
|||||||
|
|
||||||
var util = require('util');
|
var util = require('util');
|
||||||
var stream = require('stream');
|
var stream = require('stream');
|
||||||
|
|
||||||
|
var color = require('color');
|
||||||
var Promise = require('bluebird');
|
var Promise = require('bluebird');
|
||||||
|
|
||||||
var sharp = require('./build/Release/sharp');
|
var sharp = require('./build/Release/sharp');
|
||||||
|
|
||||||
var Sharp = function(input) {
|
var Sharp = function(input) {
|
||||||
@@ -12,23 +15,38 @@ var Sharp = function(input) {
|
|||||||
}
|
}
|
||||||
stream.Duplex.call(this);
|
stream.Duplex.call(this);
|
||||||
this.options = {
|
this.options = {
|
||||||
|
// input options
|
||||||
|
streamIn: false,
|
||||||
|
sequentialRead: false,
|
||||||
|
// resize options
|
||||||
|
topOffsetPre: -1,
|
||||||
|
leftOffsetPre: -1,
|
||||||
|
widthPre: -1,
|
||||||
|
heightPre: -1,
|
||||||
|
topOffsetPost: -1,
|
||||||
|
leftOffsetPost: -1,
|
||||||
|
widthPost: -1,
|
||||||
|
heightPost: -1,
|
||||||
width: -1,
|
width: -1,
|
||||||
height: -1,
|
height: -1,
|
||||||
canvas: 'c',
|
canvas: 'c',
|
||||||
gravity: 0,
|
gravity: 0,
|
||||||
angle: 0,
|
angle: 0,
|
||||||
withoutEnlargement: false,
|
withoutEnlargement: false,
|
||||||
sharpen: false,
|
|
||||||
interpolator: 'bilinear',
|
interpolator: 'bilinear',
|
||||||
|
// operations
|
||||||
|
background: [0, 0, 0, 255],
|
||||||
|
flatten: false,
|
||||||
|
sharpen: false,
|
||||||
gamma: 0,
|
gamma: 0,
|
||||||
|
greyscale: false,
|
||||||
|
// output options
|
||||||
|
output: '__input',
|
||||||
progressive: false,
|
progressive: false,
|
||||||
sequentialRead: false,
|
|
||||||
quality: 80,
|
quality: 80,
|
||||||
compressionLevel: 6,
|
compressionLevel: 6,
|
||||||
streamIn: false,
|
|
||||||
streamOut: false,
|
streamOut: false,
|
||||||
withMetadata: false,
|
withMetadata: false
|
||||||
output: '__input'
|
|
||||||
};
|
};
|
||||||
if (typeof input === 'string') {
|
if (typeof input === 'string') {
|
||||||
// input=file
|
// input=file
|
||||||
@@ -53,6 +71,7 @@ util.inherits(Sharp, stream.Duplex);
|
|||||||
Handle incoming chunk on Writable Stream
|
Handle incoming chunk on Writable Stream
|
||||||
*/
|
*/
|
||||||
Sharp.prototype._write = function(chunk, encoding, callback) {
|
Sharp.prototype._write = function(chunk, encoding, callback) {
|
||||||
|
/*jslint unused: false */
|
||||||
if (this.options.streamIn) {
|
if (this.options.streamIn) {
|
||||||
if (typeof chunk === 'object' || chunk instanceof Buffer) {
|
if (typeof chunk === 'object' || chunk instanceof Buffer) {
|
||||||
if (typeof this.options.bufferIn === 'undefined') {
|
if (typeof this.options.bufferIn === 'undefined') {
|
||||||
@@ -91,13 +110,40 @@ Sharp.prototype.crop = function(gravity) {
|
|||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
Sharp.prototype.embedWhite = function() {
|
Sharp.prototype.extract = function(topOffset, leftOffset, width, height) {
|
||||||
this.options.canvas = 'w';
|
/*jslint unused: false */
|
||||||
|
var suffix = this.options.width === -1 && this.options.height === -1 ? 'Pre' : 'Post';
|
||||||
|
var values = arguments;
|
||||||
|
['topOffset', 'leftOffset', 'width', 'height'].forEach(function(name, index) {
|
||||||
|
this.options[name + suffix] = values[index];
|
||||||
|
}.bind(this));
|
||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
Sharp.prototype.embedBlack = function() {
|
/*
|
||||||
this.options.canvas = 'b';
|
Deprecated embed* methods, to be removed in v0.8.0
|
||||||
|
*/
|
||||||
|
Sharp.prototype.embedWhite = util.deprecate(function() {
|
||||||
|
return this.background('white').embed();
|
||||||
|
}, "embedWhite() is deprecated, use background('white').embed() instead");
|
||||||
|
Sharp.prototype.embedBlack = util.deprecate(function() {
|
||||||
|
return this.background('black').embed();
|
||||||
|
}, "embedBlack() is deprecated, use background('black').embed() instead");
|
||||||
|
|
||||||
|
/*
|
||||||
|
Set the background colour for embed and flatten operations.
|
||||||
|
Delegates to the 'Color' module, which can throw an Error
|
||||||
|
but is liberal in what it accepts, clamping values to sensible min/max.
|
||||||
|
*/
|
||||||
|
Sharp.prototype.background = function(rgba) {
|
||||||
|
var colour = color(rgba);
|
||||||
|
this.options.background = colour.rgbArray();
|
||||||
|
this.options.background.push(colour.alpha() * 255);
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
|
||||||
|
Sharp.prototype.embed = function() {
|
||||||
|
this.options.canvas = 'e';
|
||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -106,6 +152,11 @@ Sharp.prototype.max = function() {
|
|||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Sharp.prototype.flatten = function(flatten) {
|
||||||
|
this.options.flatten = (typeof flatten === 'boolean') ? flatten : true;
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Rotate output image by 0, 90, 180 or 270 degrees
|
Rotate output image by 0, 90, 180 or 270 degrees
|
||||||
Auto-rotation based on the EXIF Orientation tag is represented by an angle of -1
|
Auto-rotation based on the EXIF Orientation tag is represented by an angle of -1
|
||||||
@@ -152,19 +203,6 @@ Sharp.prototype.interpolateWith = function(interpolator) {
|
|||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
/*
|
|
||||||
Deprecated interpolation methods, to be removed in v0.7.0
|
|
||||||
*/
|
|
||||||
Sharp.prototype.bilinearInterpolation = util.deprecate(function() {
|
|
||||||
return this.interpolateWith(module.exports.interpolator.bilinear);
|
|
||||||
}, 'bilinearInterpolation() is deprecated, use interpolateWith(sharp.interpolator.bilinear) instead');
|
|
||||||
Sharp.prototype.bicubicInterpolation = util.deprecate(function() {
|
|
||||||
return this.interpolateWith(module.exports.interpolator.bicubic);
|
|
||||||
}, 'bicubicInterpolation() is deprecated, use interpolateWith(sharp.interpolator.bicubic) instead');
|
|
||||||
Sharp.prototype.nohaloInterpolation = util.deprecate(function() {
|
|
||||||
return this.interpolateWith(module.exports.interpolator.nohalo);
|
|
||||||
}, 'nohaloInterpolation() is deprecated, use interpolateWith(sharp.interpolator.nohalo) instead');
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Darken image pre-resize (1/gamma) and brighten post-resize (gamma).
|
Darken image pre-resize (1/gamma) and brighten post-resize (gamma).
|
||||||
Improves brightness of resized image in non-linear colour spaces.
|
Improves brightness of resized image in non-linear colour spaces.
|
||||||
@@ -181,6 +219,15 @@ Sharp.prototype.gamma = function(gamma) {
|
|||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
Convert to greyscale
|
||||||
|
*/
|
||||||
|
Sharp.prototype.greyscale = function(greyscale) {
|
||||||
|
this.options.greyscale = (typeof greyscale === 'boolean') ? greyscale : true;
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
Sharp.prototype.grayscale = Sharp.prototype.greyscale;
|
||||||
|
|
||||||
Sharp.prototype.progressive = function(progressive) {
|
Sharp.prototype.progressive = function(progressive) {
|
||||||
this.options.progressive = (typeof progressive === 'boolean') ? progressive : true;
|
this.options.progressive = (typeof progressive === 'boolean') ? progressive : true;
|
||||||
return this;
|
return this;
|
||||||
@@ -201,17 +248,17 @@ Sharp.prototype.quality = function(quality) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
Sharp.prototype.compressionLevel = function(compressionLevel) {
|
Sharp.prototype.compressionLevel = function(compressionLevel) {
|
||||||
if (!Number.isNaN(compressionLevel) && compressionLevel >= -1 && compressionLevel <= 9) {
|
if (!Number.isNaN(compressionLevel) && compressionLevel >= 0 && compressionLevel <= 9) {
|
||||||
this.options.compressionLevel = compressionLevel;
|
this.options.compressionLevel = compressionLevel;
|
||||||
} else {
|
} else {
|
||||||
throw new Error('Invalid compressionLevel (-1 to 9) ' + compressionLevel);
|
throw new Error('Invalid compressionLevel (0 to 9) ' + compressionLevel);
|
||||||
}
|
}
|
||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
Sharp.prototype.withMetadata = function(withMetadata) {
|
Sharp.prototype.withMetadata = function(withMetadata) {
|
||||||
this.options.withMetadata = (typeof withMetadata === 'boolean') ? withMetadata : true;
|
this.options.withMetadata = (typeof withMetadata === 'boolean') ? withMetadata : true;
|
||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
Sharp.prototype.resize = function(width, height) {
|
Sharp.prototype.resize = function(width, height) {
|
||||||
@@ -269,31 +316,16 @@ Sharp.prototype.toBuffer = function(callback) {
|
|||||||
|
|
||||||
Sharp.prototype.jpeg = function() {
|
Sharp.prototype.jpeg = function() {
|
||||||
this.options.output = '__jpeg';
|
this.options.output = '__jpeg';
|
||||||
if (arguments.length > 0) {
|
|
||||||
console.error('Use of the jpeg() method with a callback is deprecated in 0.6.x and will be removed in 0.7.x');
|
|
||||||
console.error('Please add toFile(), toBuffer() or Stream methods e.g. pipe() for JPEG output');
|
|
||||||
this._sharp(arguments);
|
|
||||||
}
|
|
||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
Sharp.prototype.png = function() {
|
Sharp.prototype.png = function() {
|
||||||
this.options.output = '__png';
|
this.options.output = '__png';
|
||||||
if (arguments.length > 0) {
|
|
||||||
console.error('Use of the png() method with a callback is deprecated in 0.6.x and will be removed in 0.7.x');
|
|
||||||
console.error('Please add toFile(), toBuffer() or Stream methods e.g. pipe() for PNG output');
|
|
||||||
this._sharp(arguments);
|
|
||||||
}
|
|
||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
Sharp.prototype.webp = function() {
|
Sharp.prototype.webp = function() {
|
||||||
this.options.output = '__webp';
|
this.options.output = '__webp';
|
||||||
if (arguments.length > 0) {
|
|
||||||
console.error('Use of the webp() method with a callback is deprecated in 0.6.x and will be removed in 0.7.x');
|
|
||||||
console.error('Please add toFile(), toBuffer() or Stream methods e.g. pipe() for WebP output');
|
|
||||||
this._sharp(arguments);
|
|
||||||
}
|
|
||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
16
package.json
16
package.json
@@ -1,12 +1,16 @@
|
|||||||
{
|
{
|
||||||
"name": "sharp",
|
"name": "sharp",
|
||||||
"version": "0.6.2",
|
"version": "0.7.0",
|
||||||
"author": "Lovell Fuller <npm@lovell.info>",
|
"author": "Lovell Fuller <npm@lovell.info>",
|
||||||
"contributors": [
|
"contributors": [
|
||||||
"Pierre Inglebert <pierre.inglebert@gmail.com>",
|
"Pierre Inglebert <pierre.inglebert@gmail.com>",
|
||||||
"Jonathan Ong <jonathanrichardong@gmail.com>",
|
"Jonathan Ong <jonathanrichardong@gmail.com>",
|
||||||
"Chanon Sajjamanochai <chanon.s@gmail.com>",
|
"Chanon Sajjamanochai <chanon.s@gmail.com>",
|
||||||
"Juliano Julio <julianojulio@gmail.com>"
|
"Juliano Julio <julianojulio@gmail.com>",
|
||||||
|
"Daniel Gasienica <daniel@gasienica.ch>",
|
||||||
|
"Julian Walker <julian@fiftythree.com>",
|
||||||
|
"Amit Pitaru <pitaru.amit@gmail.com>",
|
||||||
|
"Brandon Aaron <hello.brandon@aaron.sh>"
|
||||||
],
|
],
|
||||||
"description": "High performance Node.js module to resize JPEG, PNG and WebP images using the libvips library",
|
"description": "High performance Node.js module to resize JPEG, PNG and WebP images using the libvips library",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -27,6 +31,7 @@
|
|||||||
"thumbnail",
|
"thumbnail",
|
||||||
"sharpen",
|
"sharpen",
|
||||||
"crop",
|
"crop",
|
||||||
|
"extract",
|
||||||
"embed",
|
"embed",
|
||||||
"libvips",
|
"libvips",
|
||||||
"vips",
|
"vips",
|
||||||
@@ -35,12 +40,13 @@
|
|||||||
"stream"
|
"stream"
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"nan": "^1.3.0",
|
"bluebird": "^2.3.5",
|
||||||
"bluebird": "^2.3.2"
|
"color": "^0.7.1",
|
||||||
|
"nan": "^1.3.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"imagemagick": "^0.1.3",
|
"imagemagick": "^0.1.3",
|
||||||
"imagemagick-native": "^1.2.2",
|
"imagemagick-native": "^1.4.0",
|
||||||
"gm": "^1.16.0",
|
"gm": "^1.16.0",
|
||||||
"async": "^0.9.0",
|
"async": "^0.9.0",
|
||||||
"benchmark": "^1.0.0"
|
"benchmark": "^1.0.0"
|
||||||
|
|||||||
504
src/sharp.cc
504
src/sharp.cc
@@ -11,6 +11,12 @@
|
|||||||
using namespace v8;
|
using namespace v8;
|
||||||
using namespace node;
|
using namespace node;
|
||||||
|
|
||||||
|
typedef enum {
|
||||||
|
CROP,
|
||||||
|
MAX,
|
||||||
|
EMBED
|
||||||
|
} Canvas;
|
||||||
|
|
||||||
struct resize_baton {
|
struct resize_baton {
|
||||||
std::string file_in;
|
std::string file_in;
|
||||||
void* buffer_in;
|
void* buffer_in;
|
||||||
@@ -19,36 +25,49 @@ struct resize_baton {
|
|||||||
std::string output_format;
|
std::string output_format;
|
||||||
void* buffer_out;
|
void* buffer_out;
|
||||||
size_t buffer_out_len;
|
size_t buffer_out_len;
|
||||||
|
int topOffsetPre;
|
||||||
|
int leftOffsetPre;
|
||||||
|
int widthPre;
|
||||||
|
int heightPre;
|
||||||
|
int topOffsetPost;
|
||||||
|
int leftOffsetPost;
|
||||||
|
int widthPost;
|
||||||
|
int heightPost;
|
||||||
int width;
|
int width;
|
||||||
int height;
|
int height;
|
||||||
bool crop;
|
Canvas canvas;
|
||||||
int gravity;
|
int gravity;
|
||||||
bool max;
|
|
||||||
VipsExtend extend;
|
|
||||||
bool sharpen;
|
|
||||||
std::string interpolator;
|
std::string interpolator;
|
||||||
|
double background[4];
|
||||||
|
bool flatten;
|
||||||
|
bool sharpen;
|
||||||
double gamma;
|
double gamma;
|
||||||
|
bool greyscale;
|
||||||
bool progressive;
|
bool progressive;
|
||||||
bool without_enlargement;
|
bool without_enlargement;
|
||||||
VipsAccess access_method;
|
VipsAccess access_method;
|
||||||
int quality;
|
int quality;
|
||||||
int compressionLevel;
|
int compression_level;
|
||||||
int angle;
|
int angle;
|
||||||
std::string err;
|
std::string err;
|
||||||
bool withMetadata;
|
bool with_metadata;
|
||||||
|
|
||||||
resize_baton():
|
resize_baton():
|
||||||
buffer_in_len(0),
|
buffer_in_len(0),
|
||||||
output_format(""),
|
output_format(""),
|
||||||
buffer_out_len(0),
|
buffer_out_len(0),
|
||||||
crop(false),
|
topOffsetPre(-1),
|
||||||
|
topOffsetPost(-1),
|
||||||
|
canvas(CROP),
|
||||||
gravity(0),
|
gravity(0),
|
||||||
max(false),
|
background{0.0, 0.0, 0.0, 255.0},
|
||||||
|
flatten(false),
|
||||||
sharpen(false),
|
sharpen(false),
|
||||||
gamma(0.0),
|
gamma(0.0),
|
||||||
|
greyscale(false),
|
||||||
progressive(false),
|
progressive(false),
|
||||||
without_enlargement(false),
|
without_enlargement(false),
|
||||||
withMetadata(false) {}
|
with_metadata(false) {}
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef enum {
|
typedef enum {
|
||||||
@@ -89,14 +108,22 @@ static bool is_tiff(std::string const &str) {
|
|||||||
return ends_with(str, ".tif") || ends_with(str, ".tiff") || ends_with(str, ".TIF") || ends_with(str, ".TIFF");
|
return ends_with(str, ".tif") || ends_with(str, ".tiff") || ends_with(str, ".TIF") || ends_with(str, ".TIFF");
|
||||||
}
|
}
|
||||||
|
|
||||||
static void resize_error(resize_baton *baton, VipsImage *unref) {
|
static void resize_error(resize_baton *baton, VipsObject *hook) {
|
||||||
(baton->err).append(vips_error_buffer());
|
(baton->err).append(vips_error_buffer());
|
||||||
vips_error_clear();
|
vips_error_clear();
|
||||||
g_object_unref(unref);
|
g_object_unref(hook);
|
||||||
vips_thread_shutdown();
|
vips_thread_shutdown();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
typedef enum {
|
||||||
|
ANGLE_0,
|
||||||
|
ANGLE_90,
|
||||||
|
ANGLE_180,
|
||||||
|
ANGLE_270,
|
||||||
|
ANGLE_LAST
|
||||||
|
} Angle;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Calculate the angle of rotation for the output image.
|
Calculate the angle of rotation for the output image.
|
||||||
In order of priority:
|
In order of priority:
|
||||||
@@ -104,27 +131,27 @@ static void resize_error(resize_baton *baton, VipsImage *unref) {
|
|||||||
2. Use input image EXIF Orientation header (does not support mirroring)
|
2. Use input image EXIF Orientation header (does not support mirroring)
|
||||||
3. Otherwise default to zero, i.e. no rotation
|
3. Otherwise default to zero, i.e. no rotation
|
||||||
*/
|
*/
|
||||||
static VipsAngle
|
static Angle
|
||||||
sharp_calc_rotation(int const angle, VipsImage const *input) {
|
sharp_calc_rotation(int const angle, VipsImage const *input) {
|
||||||
VipsAngle rotate = VIPS_ANGLE_0;
|
Angle rotate = ANGLE_0;
|
||||||
if (angle == -1) {
|
if (angle == -1) {
|
||||||
const char *exif;
|
const char *exif;
|
||||||
if (!vips_image_get_string(input, "exif-ifd0-Orientation", &exif)) {
|
if (!vips_image_get_string(input, "exif-ifd0-Orientation", &exif)) {
|
||||||
if (exif[0] == 0x36) { // "6"
|
if (exif[0] == 0x36) { // "6"
|
||||||
rotate = VIPS_ANGLE_90;
|
rotate = ANGLE_90;
|
||||||
} else if (exif[0] == 0x33) { // "3"
|
} else if (exif[0] == 0x33) { // "3"
|
||||||
rotate = VIPS_ANGLE_180;
|
rotate = ANGLE_180;
|
||||||
} else if (exif[0] == 0x38) { // "8"
|
} else if (exif[0] == 0x38) { // "8"
|
||||||
rotate = VIPS_ANGLE_270;
|
rotate = ANGLE_270;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (angle == 90) {
|
if (angle == 90) {
|
||||||
rotate = VIPS_ANGLE_90;
|
rotate = ANGLE_90;
|
||||||
} else if (angle == 180) {
|
} else if (angle == 180) {
|
||||||
rotate = VIPS_ANGLE_180;
|
rotate = ANGLE_180;
|
||||||
} else if (angle == 270) {
|
} else if (angle == 270) {
|
||||||
rotate = VIPS_ANGLE_270;
|
rotate = ANGLE_270;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return rotate;
|
return rotate;
|
||||||
@@ -160,6 +187,19 @@ sharp_calc_crop(int const inWidth, int const inHeight, int const outWidth, int c
|
|||||||
return std::make_tuple(left, top);
|
return std::make_tuple(left, top);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Does this image have an alpha channel?
|
||||||
|
Uses colour space interpretation with number of channels to guess this.
|
||||||
|
*/
|
||||||
|
static bool
|
||||||
|
sharp_image_has_alpha(VipsImage *image) {
|
||||||
|
return (
|
||||||
|
(image->Bands == 2 && image->Type == VIPS_INTERPRETATION_B_W) ||
|
||||||
|
(image->Bands == 4 && image->Type != VIPS_INTERPRETATION_CMYK) ||
|
||||||
|
(image->Bands == 5 && image->Type == VIPS_INTERPRETATION_CMYK)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Initialise a VipsImage from a buffer. Supports JPEG, PNG and WebP.
|
Initialise a VipsImage from a buffer. Supports JPEG, PNG and WebP.
|
||||||
Returns the ImageType detected, if any.
|
Returns the ImageType detected, if any.
|
||||||
@@ -227,6 +267,7 @@ struct metadata_baton {
|
|||||||
int height;
|
int height;
|
||||||
std::string space;
|
std::string space;
|
||||||
int channels;
|
int channels;
|
||||||
|
bool has_alpha;
|
||||||
int orientation;
|
int orientation;
|
||||||
std::string err;
|
std::string err;
|
||||||
|
|
||||||
@@ -246,7 +287,7 @@ class MetadataWorker : public NanAsyncWorker {
|
|||||||
g_atomic_int_dec_and_test(&counter_queue);
|
g_atomic_int_dec_and_test(&counter_queue);
|
||||||
|
|
||||||
ImageType imageType = UNKNOWN;
|
ImageType imageType = UNKNOWN;
|
||||||
VipsImage *image = vips_image_new();
|
VipsImage *image;
|
||||||
if (baton->buffer_in_len > 1) {
|
if (baton->buffer_in_len > 1) {
|
||||||
// From buffer
|
// From buffer
|
||||||
imageType = sharp_init_image_from_buffer(&image, baton->buffer_in, baton->buffer_in_len, VIPS_ACCESS_RANDOM);
|
imageType = sharp_init_image_from_buffer(&image, baton->buffer_in, baton->buffer_in_len, VIPS_ACCESS_RANDOM);
|
||||||
@@ -275,6 +316,7 @@ class MetadataWorker : public NanAsyncWorker {
|
|||||||
baton->height = image->Ysize;
|
baton->height = image->Ysize;
|
||||||
baton->space = vips_enum_nick(VIPS_TYPE_INTERPRETATION, image->Type);
|
baton->space = vips_enum_nick(VIPS_TYPE_INTERPRETATION, image->Type);
|
||||||
baton->channels = image->Bands;
|
baton->channels = image->Bands;
|
||||||
|
baton->has_alpha = sharp_image_has_alpha(image);
|
||||||
// EXIF Orientation
|
// EXIF Orientation
|
||||||
const char *exif;
|
const char *exif;
|
||||||
if (!vips_image_get_string(image, "exif-ifd0-Orientation", &exif)) {
|
if (!vips_image_get_string(image, "exif-ifd0-Orientation", &exif)) {
|
||||||
@@ -302,6 +344,7 @@ class MetadataWorker : public NanAsyncWorker {
|
|||||||
info->Set(NanNew<String>("height"), NanNew<Number>(baton->height));
|
info->Set(NanNew<String>("height"), NanNew<Number>(baton->height));
|
||||||
info->Set(NanNew<String>("space"), NanNew<String>(baton->space));
|
info->Set(NanNew<String>("space"), NanNew<String>(baton->space));
|
||||||
info->Set(NanNew<String>("channels"), NanNew<Number>(baton->channels));
|
info->Set(NanNew<String>("channels"), NanNew<Number>(baton->channels));
|
||||||
|
info->Set(NanNew<String>("hasAlpha"), NanNew<Boolean>(baton->has_alpha));
|
||||||
if (baton->orientation > 0) {
|
if (baton->orientation > 0) {
|
||||||
info->Set(NanNew<String>("orientation"), NanNew<Number>(baton->orientation));
|
info->Set(NanNew<String>("orientation"), NanNew<Number>(baton->orientation));
|
||||||
}
|
}
|
||||||
@@ -359,33 +402,49 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
// Increment processing task counter
|
// Increment processing task counter
|
||||||
g_atomic_int_inc(&counter_process);
|
g_atomic_int_inc(&counter_process);
|
||||||
|
|
||||||
|
// Hang image references from this hook object
|
||||||
|
VipsObject *hook = reinterpret_cast<VipsObject*>(vips_image_new());
|
||||||
|
|
||||||
// Input
|
// Input
|
||||||
ImageType inputImageType = UNKNOWN;
|
ImageType inputImageType = UNKNOWN;
|
||||||
VipsImage *in = vips_image_new();
|
VipsImage *image = vips_image_new();
|
||||||
|
vips_object_local(hook, image);
|
||||||
|
|
||||||
if (baton->buffer_in_len > 1) {
|
if (baton->buffer_in_len > 1) {
|
||||||
// From buffer
|
// From buffer
|
||||||
inputImageType = sharp_init_image_from_buffer(&in, baton->buffer_in, baton->buffer_in_len, baton->access_method);
|
inputImageType = sharp_init_image_from_buffer(&image, baton->buffer_in, baton->buffer_in_len, baton->access_method);
|
||||||
if (inputImageType == UNKNOWN) {
|
if (inputImageType == UNKNOWN) {
|
||||||
(baton->err).append("Input buffer contains unsupported image format");
|
(baton->err).append("Input buffer contains unsupported image format");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// From file
|
// From file
|
||||||
inputImageType = sharp_init_image_from_file(&in, baton->file_in.c_str(), baton->access_method);
|
inputImageType = sharp_init_image_from_file(&image, baton->file_in.c_str(), baton->access_method);
|
||||||
if (inputImageType == UNKNOWN) {
|
if (inputImageType == UNKNOWN) {
|
||||||
(baton->err).append("File is of an unsupported image format");
|
(baton->err).append("File is of an unsupported image format");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (inputImageType == UNKNOWN) {
|
if (inputImageType == UNKNOWN) {
|
||||||
return resize_error(baton, in);
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pre extraction
|
||||||
|
if (baton->topOffsetPre != -1) {
|
||||||
|
VipsImage *extractedPre = vips_image_new();
|
||||||
|
vips_object_local(hook, extractedPre);
|
||||||
|
if (vips_extract_area(image, &extractedPre, baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
g_object_unref(image);
|
||||||
|
image = extractedPre;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get input image width and height
|
// Get input image width and height
|
||||||
int inputWidth = in->Xsize;
|
int inputWidth = image->Xsize;
|
||||||
int inputHeight = in->Ysize;
|
int inputHeight = image->Ysize;
|
||||||
|
|
||||||
// Calculate angle of rotation, to be carried out later
|
// Calculate angle of rotation, to be carried out later
|
||||||
VipsAngle rotation = sharp_calc_rotation(baton->angle, in);
|
Angle rotation = sharp_calc_rotation(baton->angle, image);
|
||||||
if (rotation == VIPS_ANGLE_90 || rotation == VIPS_ANGLE_270) {
|
if (rotation == ANGLE_90 || rotation == ANGLE_270) {
|
||||||
// Swap input output width and height when rotating by 90 or 270 degrees
|
// Swap input output width and height when rotating by 90 or 270 degrees
|
||||||
int swap = inputWidth;
|
int swap = inputWidth;
|
||||||
inputWidth = inputHeight;
|
inputWidth = inputHeight;
|
||||||
@@ -398,9 +457,9 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
// Fixed width and height
|
// Fixed width and height
|
||||||
double xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width);
|
double xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width);
|
||||||
double yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height);
|
double yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height);
|
||||||
factor = baton->crop ? std::min(xfactor, yfactor) : std::max(xfactor, yfactor);
|
factor = (baton->canvas == CROP) ? std::min(xfactor, yfactor) : std::max(xfactor, yfactor);
|
||||||
// if max is set, we need to compute the real size of the thumb image
|
// if max is set, we need to compute the real size of the thumb image
|
||||||
if (baton->max) {
|
if (baton->canvas == MAX) {
|
||||||
if (xfactor > yfactor) {
|
if (xfactor > yfactor) {
|
||||||
baton->height = round(static_cast<double>(inputHeight) / xfactor);
|
baton->height = round(static_cast<double>(inputHeight) / xfactor);
|
||||||
} else {
|
} else {
|
||||||
@@ -438,9 +497,9 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try to use libjpeg shrink-on-load, but not when applying gamma correction
|
// Try to use libjpeg shrink-on-load, but not when applying gamma correction or pre-resize extract
|
||||||
int shrink_on_load = 1;
|
int shrink_on_load = 1;
|
||||||
if (inputImageType == JPEG && baton->gamma == 0) {
|
if (inputImageType == JPEG && baton->gamma == 0 && baton->topOffsetPre == -1) {
|
||||||
if (shrink >= 8) {
|
if (shrink >= 8) {
|
||||||
factor = factor / 8;
|
factor = factor / 8;
|
||||||
shrink_on_load = 8;
|
shrink_on_load = 8;
|
||||||
@@ -452,47 +511,99 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
shrink_on_load = 2;
|
shrink_on_load = 2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
VipsImage *shrunk_on_load = vips_image_new();
|
|
||||||
if (shrink_on_load > 1) {
|
if (shrink_on_load > 1) {
|
||||||
// Recalculate integral shrink and double residual
|
// Recalculate integral shrink and double residual
|
||||||
factor = std::max(factor, 1.0);
|
factor = std::max(factor, 1.0);
|
||||||
shrink = floor(factor);
|
shrink = floor(factor);
|
||||||
residual = static_cast<double>(shrink) / factor;
|
residual = static_cast<double>(shrink) / factor;
|
||||||
// Reload input using shrink-on-load
|
// Reload input using shrink-on-load
|
||||||
|
g_object_unref(image);
|
||||||
if (baton->buffer_in_len > 1) {
|
if (baton->buffer_in_len > 1) {
|
||||||
if (vips_jpegload_buffer(baton->buffer_in, baton->buffer_in_len, &shrunk_on_load, "shrink", shrink_on_load, NULL)) {
|
if (vips_jpegload_buffer(baton->buffer_in, baton->buffer_in_len, &image, "shrink", shrink_on_load, NULL)) {
|
||||||
return resize_error(baton, in);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (vips_jpegload((baton->file_in).c_str(), &shrunk_on_load, "shrink", shrink_on_load, NULL)) {
|
if (vips_jpegload((baton->file_in).c_str(), &image, "shrink", shrink_on_load, NULL)) {
|
||||||
return resize_error(baton, in);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
vips_copy(in, &shrunk_on_load, NULL);
|
|
||||||
}
|
}
|
||||||
g_object_unref(in);
|
|
||||||
|
// Handle colour profile, if any, for non sRGB images
|
||||||
|
if (image->Type != VIPS_INTERPRETATION_sRGB && vips_image_get_typeof(image, VIPS_META_ICC_NAME)) {
|
||||||
|
// Import embedded profile
|
||||||
|
VipsImage *profile = vips_image_new();
|
||||||
|
vips_object_local(hook, profile);
|
||||||
|
if (vips_icc_import(image, &profile, NULL, "embedded", TRUE, "pcs", VIPS_PCS_XYZ, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
g_object_unref(image);
|
||||||
|
image = profile;
|
||||||
|
// Convert to sRGB colour space
|
||||||
|
VipsImage *colourspaced = vips_image_new();
|
||||||
|
vips_object_local(hook, colourspaced);
|
||||||
|
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
g_object_unref(image);
|
||||||
|
image = colourspaced;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flatten image to remove alpha channel
|
||||||
|
if (baton->flatten && sharp_image_has_alpha(image)) {
|
||||||
|
// Background colour
|
||||||
|
VipsArrayDouble *background = vips_array_double_newv(
|
||||||
|
3, // Ignore alpha channel as we're about to remove it
|
||||||
|
baton->background[0],
|
||||||
|
baton->background[1],
|
||||||
|
baton->background[2]
|
||||||
|
);
|
||||||
|
VipsImage *flattened = vips_image_new();
|
||||||
|
vips_object_local(hook, flattened);
|
||||||
|
if (vips_flatten(image, &flattened, "background", background, NULL)) {
|
||||||
|
vips_area_unref(reinterpret_cast<VipsArea*>(background));
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
};
|
||||||
|
vips_area_unref(reinterpret_cast<VipsArea*>(background));
|
||||||
|
g_object_unref(image);
|
||||||
|
image = flattened;
|
||||||
|
}
|
||||||
|
|
||||||
// Gamma encoding (darken)
|
// Gamma encoding (darken)
|
||||||
if (baton->gamma >= 1 && baton->gamma <= 3) {
|
if (baton->gamma >= 1 && baton->gamma <= 3) {
|
||||||
VipsImage *gamma_encoded = vips_image_new();
|
VipsImage *gamma_encoded = vips_image_new();
|
||||||
if (vips_gamma(shrunk_on_load, &gamma_encoded, "exponent", 1.0 / baton->gamma, NULL)) {
|
vips_object_local(hook, gamma_encoded);
|
||||||
return resize_error(baton, shrunk_on_load);
|
if (vips_gamma(image, &gamma_encoded, "exponent", 1.0 / baton->gamma, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
g_object_unref(shrunk_on_load);
|
g_object_unref(image);
|
||||||
shrunk_on_load = gamma_encoded;
|
image = gamma_encoded;
|
||||||
}
|
}
|
||||||
|
|
||||||
VipsImage *shrunk = vips_image_new();
|
// Convert to greyscale (linear, therefore after gamma encoding, if any)
|
||||||
if (shrink > 1) {
|
if (baton->greyscale) {
|
||||||
// Use vips_shrink with the integral reduction
|
VipsImage *greyscale = vips_image_new();
|
||||||
if (vips_shrink(shrunk_on_load, &shrunk, shrink, shrink, NULL)) {
|
vips_object_local(hook, greyscale);
|
||||||
return resize_error(baton, shrunk_on_load);
|
if (vips_colourspace(image, &greyscale, VIPS_INTERPRETATION_B_W, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
|
g_object_unref(image);
|
||||||
|
image = greyscale;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shrink > 1) {
|
||||||
|
VipsImage *shrunk = vips_image_new();
|
||||||
|
vips_object_local(hook, shrunk);
|
||||||
|
// Use vips_shrink with the integral reduction
|
||||||
|
if (vips_shrink(image, &shrunk, shrink, shrink, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
g_object_unref(image);
|
||||||
|
image = shrunk;
|
||||||
// Recalculate residual float based on dimensions of required vs shrunk images
|
// Recalculate residual float based on dimensions of required vs shrunk images
|
||||||
double shrunkWidth = shrunk->Xsize;
|
double shrunkWidth = shrunk->Xsize;
|
||||||
double shrunkHeight = shrunk->Ysize;
|
double shrunkHeight = shrunk->Ysize;
|
||||||
if (rotation == VIPS_ANGLE_90 || rotation == VIPS_ANGLE_270) {
|
if (rotation == ANGLE_90 || rotation == ANGLE_270) {
|
||||||
// Swap input output width and height when rotating by 90 or 270 degrees
|
// Swap input output width and height when rotating by 90 or 270 degrees
|
||||||
int swap = shrunkWidth;
|
int swap = shrunkWidth;
|
||||||
shrunkWidth = shrunkHeight;
|
shrunkWidth = shrunkHeight;
|
||||||
@@ -500,134 +611,202 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
}
|
}
|
||||||
double residualx = static_cast<double>(baton->width) / static_cast<double>(shrunkWidth);
|
double residualx = static_cast<double>(baton->width) / static_cast<double>(shrunkWidth);
|
||||||
double residualy = static_cast<double>(baton->height) / static_cast<double>(shrunkHeight);
|
double residualy = static_cast<double>(baton->height) / static_cast<double>(shrunkHeight);
|
||||||
if (baton->crop || baton->max) {
|
if (baton->canvas == EMBED) {
|
||||||
residual = std::max(residualx, residualy);
|
|
||||||
} else {
|
|
||||||
residual = std::min(residualx, residualy);
|
residual = std::min(residualx, residualy);
|
||||||
|
} else {
|
||||||
|
residual = std::max(residualx, residualy);
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
vips_copy(shrunk_on_load, &shrunk, NULL);
|
|
||||||
}
|
}
|
||||||
g_object_unref(shrunk_on_load);
|
|
||||||
|
|
||||||
// Use vips_affine with the remaining float part
|
// Use vips_affine with the remaining float part
|
||||||
VipsImage *affined = vips_image_new();
|
|
||||||
if (residual != 0) {
|
if (residual != 0) {
|
||||||
|
VipsImage *affined = vips_image_new();
|
||||||
|
vips_object_local(hook, affined);
|
||||||
// Create interpolator - "bilinear" (default), "bicubic" or "nohalo"
|
// Create interpolator - "bilinear" (default), "bicubic" or "nohalo"
|
||||||
VipsInterpolate *interpolator = vips_interpolate_new(baton->interpolator.c_str());
|
VipsInterpolate *interpolator = vips_interpolate_new(baton->interpolator.c_str());
|
||||||
// Perform affine transformation
|
// Perform affine transformation
|
||||||
if (vips_affine(shrunk, &affined, residual, 0, 0, residual, "interpolate", interpolator, NULL)) {
|
if (vips_affine(image, &affined, residual, 0, 0, residual, "interpolate", interpolator, NULL)) {
|
||||||
g_object_unref(interpolator);
|
g_object_unref(interpolator);
|
||||||
return resize_error(baton, shrunk);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
g_object_unref(interpolator);
|
g_object_unref(interpolator);
|
||||||
} else {
|
g_object_unref(image);
|
||||||
vips_copy(shrunk, &affined, NULL);
|
image = affined;
|
||||||
}
|
}
|
||||||
g_object_unref(shrunk);
|
|
||||||
|
|
||||||
// Rotate
|
// Rotate
|
||||||
VipsImage *rotated = vips_image_new();
|
if (rotation != ANGLE_0) {
|
||||||
if (rotation != VIPS_ANGLE_0) {
|
VipsImage *rotated = vips_image_new();
|
||||||
if (vips_rot(affined, &rotated, rotation, NULL)) {
|
vips_object_local(hook, rotated);
|
||||||
return resize_error(baton, affined);
|
if (vips_rot(image, &rotated, static_cast<VipsAngle>(rotation), NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
} else {
|
g_object_unref(image);
|
||||||
vips_copy(affined, &rotated, NULL);
|
image = rotated;
|
||||||
}
|
}
|
||||||
g_object_unref(affined);
|
|
||||||
|
|
||||||
// Crop/embed
|
// Crop/embed
|
||||||
VipsImage *canvased = vips_image_new();
|
if (image->Xsize != baton->width || image->Ysize != baton->height) {
|
||||||
if (rotated->Xsize != baton->width || rotated->Ysize != baton->height) {
|
if (baton->canvas == EMBED) {
|
||||||
if (baton->crop || baton->max) {
|
// Match background colour space, namely sRGB
|
||||||
|
if (image->Type != VIPS_INTERPRETATION_sRGB) {
|
||||||
|
// Convert to sRGB colour space
|
||||||
|
VipsImage *colourspaced = vips_image_new();
|
||||||
|
vips_object_local(hook, colourspaced);
|
||||||
|
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
g_object_unref(image);
|
||||||
|
image = colourspaced;
|
||||||
|
}
|
||||||
|
// Add non-transparent alpha channel, if required
|
||||||
|
if (baton->background[3] < 255.0 && !sharp_image_has_alpha(image)) {
|
||||||
|
// Create single-channel transparency
|
||||||
|
VipsImage *black = vips_image_new();
|
||||||
|
vips_object_local(hook, black);
|
||||||
|
if (vips_black(&black, image->Xsize, image->Ysize, "bands", 1, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
// Invert to become non-transparent
|
||||||
|
VipsImage *alpha = vips_image_new();
|
||||||
|
vips_object_local(hook, alpha);
|
||||||
|
if (vips_invert(black, &alpha, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
g_object_unref(black);
|
||||||
|
// Append alpha channel to existing image
|
||||||
|
VipsImage *joined = vips_image_new();
|
||||||
|
vips_object_local(hook, joined);
|
||||||
|
if (vips_bandjoin2(image, alpha, &joined, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
g_object_unref(alpha);
|
||||||
|
g_object_unref(image);
|
||||||
|
image = joined;
|
||||||
|
}
|
||||||
|
// Create background
|
||||||
|
VipsArrayDouble *background;
|
||||||
|
if (baton->background[3] < 255.0) {
|
||||||
|
background = vips_array_double_newv(
|
||||||
|
4, baton->background[0], baton->background[1], baton->background[2], baton->background[3]
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
background = vips_array_double_newv(
|
||||||
|
3, baton->background[0], baton->background[1], baton->background[2]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Embed
|
||||||
|
int left = (baton->width - image->Xsize) / 2;
|
||||||
|
int top = (baton->height - image->Ysize) / 2;
|
||||||
|
VipsImage *embedded = vips_image_new();
|
||||||
|
vips_object_local(hook, embedded);
|
||||||
|
if (vips_embed(image, &embedded, left, top, baton->width, baton->height,
|
||||||
|
"extend", VIPS_EXTEND_BACKGROUND, "background", background, NULL
|
||||||
|
)) {
|
||||||
|
vips_area_unref(reinterpret_cast<VipsArea*>(background));
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
vips_area_unref(reinterpret_cast<VipsArea*>(background));
|
||||||
|
g_object_unref(image);
|
||||||
|
image = embedded;
|
||||||
|
} else {
|
||||||
// Crop/max
|
// Crop/max
|
||||||
int left;
|
int left;
|
||||||
int top;
|
int top;
|
||||||
std::tie(left, top) = sharp_calc_crop(rotated->Xsize, rotated->Ysize, baton->width, baton->height, baton->gravity);
|
std::tie(left, top) = sharp_calc_crop(image->Xsize, image->Ysize, baton->width, baton->height, baton->gravity);
|
||||||
int width = std::min(rotated->Xsize, baton->width);
|
int width = std::min(image->Xsize, baton->width);
|
||||||
int height = std::min(rotated->Ysize, baton->height);
|
int height = std::min(image->Ysize, baton->height);
|
||||||
if (vips_extract_area(rotated, &canvased, left, top, width, height, NULL)) {
|
VipsImage *extracted = vips_image_new();
|
||||||
return resize_error(baton, rotated);
|
vips_object_local(hook, extracted);
|
||||||
}
|
if (vips_extract_area(image, &extracted, left, top, width, height, NULL)) {
|
||||||
} else {
|
return resize_error(baton, hook);
|
||||||
// Embed
|
|
||||||
int left = (baton->width - rotated->Xsize) / 2;
|
|
||||||
int top = (baton->height - rotated->Ysize) / 2;
|
|
||||||
if (vips_embed(rotated, &canvased, left, top, baton->width, baton->height, "extend", baton->extend, NULL)) {
|
|
||||||
return resize_error(baton, rotated);
|
|
||||||
}
|
}
|
||||||
|
g_object_unref(image);
|
||||||
|
image = extracted;
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
vips_copy(rotated, &canvased, NULL);
|
|
||||||
}
|
}
|
||||||
g_object_unref(rotated);
|
|
||||||
|
// Post extraction
|
||||||
|
if (baton->topOffsetPost != -1) {
|
||||||
|
VipsImage *extractedPost = vips_image_new();
|
||||||
|
vips_object_local(hook, extractedPost);
|
||||||
|
if (vips_extract_area(image, &extractedPost, baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
g_object_unref(image);
|
||||||
|
image = extractedPost;
|
||||||
|
}
|
||||||
|
|
||||||
// Mild sharpen
|
// Mild sharpen
|
||||||
VipsImage *sharpened = vips_image_new();
|
|
||||||
if (baton->sharpen) {
|
if (baton->sharpen) {
|
||||||
|
VipsImage *sharpened = vips_image_new();
|
||||||
|
vips_object_local(hook, sharpened);
|
||||||
VipsImage *sharpen = vips_image_new_matrixv(3, 3,
|
VipsImage *sharpen = vips_image_new_matrixv(3, 3,
|
||||||
-1.0, -1.0, -1.0,
|
-1.0, -1.0, -1.0,
|
||||||
-1.0, 32.0, -1.0,
|
-1.0, 32.0, -1.0,
|
||||||
-1.0, -1.0, -1.0);
|
-1.0, -1.0, -1.0);
|
||||||
vips_image_set_double(sharpen, "scale", 24);
|
vips_image_set_double(sharpen, "scale", 24);
|
||||||
if (vips_conv(canvased, &sharpened, sharpen, NULL)) {
|
vips_object_local(hook, sharpen);
|
||||||
g_object_unref(sharpen);
|
if (vips_conv(image, &sharpened, sharpen, NULL)) {
|
||||||
return resize_error(baton, canvased);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
g_object_unref(sharpen);
|
g_object_unref(image);
|
||||||
} else {
|
image = sharpened;
|
||||||
vips_copy(canvased, &sharpened, NULL);
|
|
||||||
}
|
}
|
||||||
g_object_unref(canvased);
|
|
||||||
|
|
||||||
// Gamma decoding (brighten)
|
// Gamma decoding (brighten)
|
||||||
if (baton->gamma >= 1 && baton->gamma <= 3) {
|
if (baton->gamma >= 1 && baton->gamma <= 3) {
|
||||||
VipsImage *gamma_decoded = vips_image_new();
|
VipsImage *gamma_decoded = vips_image_new();
|
||||||
if (vips_gamma(sharpened, &gamma_decoded, "exponent", baton->gamma, NULL)) {
|
vips_object_local(hook, gamma_decoded);
|
||||||
return resize_error(baton, sharpened);
|
if (vips_gamma(image, &gamma_decoded, "exponent", baton->gamma, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
g_object_unref(sharpened);
|
g_object_unref(image);
|
||||||
sharpened = gamma_decoded;
|
image = gamma_decoded;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Always convert to sRGB colour space
|
// Convert to sRGB colour space, if not already
|
||||||
VipsImage *colourspaced = vips_image_new();
|
if (image->Type != VIPS_INTERPRETATION_sRGB) {
|
||||||
vips_colourspace(sharpened, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL);
|
VipsImage *colourspaced = vips_image_new();
|
||||||
g_object_unref(sharpened);
|
vips_object_local(hook, colourspaced);
|
||||||
|
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
|
}
|
||||||
|
g_object_unref(image);
|
||||||
|
image = colourspaced;
|
||||||
|
}
|
||||||
|
|
||||||
// Generate image tile cache when interlace output is required
|
// Generate image tile cache when interlace output is required
|
||||||
VipsImage *cached = vips_image_new();
|
|
||||||
if (baton->progressive) {
|
if (baton->progressive) {
|
||||||
if (vips_tilecache(colourspaced, &cached, "threaded", TRUE, "persistent", TRUE, "max_tiles", -1, NULL)) {
|
VipsImage *cached = vips_image_new();
|
||||||
return resize_error(baton, colourspaced);
|
vips_object_local(hook, cached);
|
||||||
|
if (vips_tilecache(image, &cached, "threaded", TRUE, "persistent", TRUE, "max_tiles", -1, NULL)) {
|
||||||
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
} else {
|
g_object_unref(image);
|
||||||
vips_copy(colourspaced, &cached, NULL);
|
image = cached;
|
||||||
}
|
}
|
||||||
g_object_unref(colourspaced);
|
|
||||||
|
|
||||||
// Output
|
// Output
|
||||||
VipsImage *output = cached;
|
|
||||||
if (baton->output == "__jpeg" || (baton->output == "__input" && inputImageType == JPEG)) {
|
if (baton->output == "__jpeg" || (baton->output == "__input" && inputImageType == JPEG)) {
|
||||||
// Write JPEG to buffer
|
// Write JPEG to buffer
|
||||||
if (vips_jpegsave_buffer(output, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->withMetadata,
|
if (vips_jpegsave_buffer(image, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->with_metadata,
|
||||||
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
|
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
|
||||||
return resize_error(baton, output);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
baton->output_format = "jpeg";
|
baton->output_format = "jpeg";
|
||||||
} else if (baton->output == "__png" || (baton->output == "__input" && inputImageType == PNG)) {
|
} else if (baton->output == "__png" || (baton->output == "__input" && inputImageType == PNG)) {
|
||||||
// Write PNG to buffer
|
// Write PNG to buffer
|
||||||
if (vips_pngsave_buffer(output, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->withMetadata,
|
if (vips_pngsave_buffer(image, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->with_metadata,
|
||||||
"compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) {
|
"compression", baton->compression_level, "interlace", baton->progressive, NULL)) {
|
||||||
return resize_error(baton, output);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
baton->output_format = "png";
|
baton->output_format = "png";
|
||||||
} else if (baton->output == "__webp" || (baton->output == "__input" && inputImageType == WEBP)) {
|
} else if (baton->output == "__webp" || (baton->output == "__input" && inputImageType == WEBP)) {
|
||||||
// Write WEBP to buffer
|
// Write WEBP to buffer
|
||||||
if (vips_webpsave_buffer(output, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->withMetadata,
|
if (vips_webpsave_buffer(image, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->with_metadata,
|
||||||
"Q", baton->quality, NULL)) {
|
"Q", baton->quality, NULL)) {
|
||||||
return resize_error(baton, output);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
baton->output_format = "webp";
|
baton->output_format = "webp";
|
||||||
} else {
|
} else {
|
||||||
@@ -638,39 +817,41 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
bool match_input = !(output_jpeg || output_png || output_webp || output_tiff);
|
bool match_input = !(output_jpeg || output_png || output_webp || output_tiff);
|
||||||
if (output_jpeg || (match_input && inputImageType == JPEG)) {
|
if (output_jpeg || (match_input && inputImageType == JPEG)) {
|
||||||
// Write JPEG to file
|
// Write JPEG to file
|
||||||
if (vips_jpegsave(output, baton->output.c_str(), "strip", !baton->withMetadata,
|
if (vips_jpegsave(image, baton->output.c_str(), "strip", !baton->with_metadata,
|
||||||
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
|
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
|
||||||
return resize_error(baton, output);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
baton->output_format = "jpeg";
|
baton->output_format = "jpeg";
|
||||||
} else if (output_png || (match_input && inputImageType == PNG)) {
|
} else if (output_png || (match_input && inputImageType == PNG)) {
|
||||||
// Write PNG to file
|
// Write PNG to file
|
||||||
if (vips_pngsave(output, baton->output.c_str(), "strip", !baton->withMetadata,
|
if (vips_pngsave(image, baton->output.c_str(), "strip", !baton->with_metadata,
|
||||||
"compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) {
|
"compression", baton->compression_level, "interlace", baton->progressive, NULL)) {
|
||||||
return resize_error(baton, output);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
baton->output_format = "png";
|
baton->output_format = "png";
|
||||||
} else if (output_webp || (match_input && inputImageType == WEBP)) {
|
} else if (output_webp || (match_input && inputImageType == WEBP)) {
|
||||||
// Write WEBP to file
|
// Write WEBP to file
|
||||||
if (vips_webpsave(output, baton->output.c_str(), "strip", !baton->withMetadata,
|
if (vips_webpsave(image, baton->output.c_str(), "strip", !baton->with_metadata,
|
||||||
"Q", baton->quality, NULL)) {
|
"Q", baton->quality, NULL)) {
|
||||||
return resize_error(baton, output);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
baton->output_format = "webp";
|
baton->output_format = "webp";
|
||||||
} else if (output_tiff || (match_input && inputImageType == TIFF)) {
|
} else if (output_tiff || (match_input && inputImageType == TIFF)) {
|
||||||
// Write TIFF to file
|
// Write TIFF to file
|
||||||
if (vips_tiffsave(output, baton->output.c_str(), "strip", !baton->withMetadata,
|
if (vips_tiffsave(image, baton->output.c_str(), "strip", !baton->with_metadata,
|
||||||
"compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG, "Q", baton->quality, NULL)) {
|
"compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG, "Q", baton->quality, NULL)) {
|
||||||
return resize_error(baton, output);
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
baton->output_format = "tiff";
|
baton->output_format = "tiff";
|
||||||
} else {
|
} else {
|
||||||
(baton->err).append("Unsupported output " + baton->output);
|
(baton->err).append("Unsupported output " + baton->output);
|
||||||
return resize_error(baton, output);
|
g_object_unref(image);
|
||||||
|
return resize_error(baton, hook);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
g_object_unref(output);
|
// Clean up any dangling image references
|
||||||
|
g_object_unref(image);
|
||||||
|
g_object_unref(hook);
|
||||||
// Clean up libvips' per-request data and threads
|
// Clean up libvips' per-request data and threads
|
||||||
vips_error_clear();
|
vips_error_clear();
|
||||||
vips_thread_shutdown();
|
vips_thread_shutdown();
|
||||||
@@ -684,11 +865,21 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
// Error
|
// Error
|
||||||
argv[0] = NanNew<String>(baton->err.data(), baton->err.size());
|
argv[0] = NanNew<String>(baton->err.data(), baton->err.size());
|
||||||
} else {
|
} else {
|
||||||
|
int width = baton->width;
|
||||||
|
int height = baton->height;
|
||||||
|
if (baton->topOffsetPre != -1 && (baton->width == -1 || baton->height == -1)) {
|
||||||
|
width = baton->widthPre;
|
||||||
|
height = baton->heightPre;
|
||||||
|
}
|
||||||
|
if (baton->topOffsetPost != -1) {
|
||||||
|
width = baton->widthPost;
|
||||||
|
height = baton->heightPost;
|
||||||
|
}
|
||||||
// Info Object
|
// Info Object
|
||||||
Local<Object> info = NanNew<Object>();
|
Local<Object> info = NanNew<Object>();
|
||||||
info->Set(NanNew<String>("format"), NanNew<String>(baton->output_format));
|
info->Set(NanNew<String>("format"), NanNew<String>(baton->output_format));
|
||||||
info->Set(NanNew<String>("width"), NanNew<Number>(baton->width));
|
info->Set(NanNew<String>("width"), NanNew<Number>(width));
|
||||||
info->Set(NanNew<String>("height"), NanNew<Number>(baton->height));
|
info->Set(NanNew<String>("height"), NanNew<Number>(height));
|
||||||
|
|
||||||
if (baton->buffer_out_len > 0) {
|
if (baton->buffer_out_len > 0) {
|
||||||
// Buffer
|
// Buffer
|
||||||
@@ -713,6 +904,7 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
resize_baton* baton;
|
resize_baton* baton;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
resize(options, output, callback)
|
resize(options, output, callback)
|
||||||
*/
|
*/
|
||||||
@@ -725,38 +917,54 @@ NAN_METHOD(resize) {
|
|||||||
|
|
||||||
// Input filename
|
// Input filename
|
||||||
baton->file_in = *String::Utf8Value(options->Get(NanNew<String>("fileIn"))->ToString());
|
baton->file_in = *String::Utf8Value(options->Get(NanNew<String>("fileIn"))->ToString());
|
||||||
|
baton->access_method = options->Get(NanNew<String>("sequentialRead"))->BooleanValue() ? VIPS_ACCESS_SEQUENTIAL : VIPS_ACCESS_RANDOM;
|
||||||
// Input Buffer object
|
// Input Buffer object
|
||||||
if (options->Get(NanNew<String>("bufferIn"))->IsObject()) {
|
if (options->Get(NanNew<String>("bufferIn"))->IsObject()) {
|
||||||
Local<Object> buffer = options->Get(NanNew<String>("bufferIn"))->ToObject();
|
Local<Object> buffer = options->Get(NanNew<String>("bufferIn"))->ToObject();
|
||||||
baton->buffer_in_len = Buffer::Length(buffer);
|
baton->buffer_in_len = Buffer::Length(buffer);
|
||||||
baton->buffer_in = Buffer::Data(buffer);
|
baton->buffer_in = Buffer::Data(buffer);
|
||||||
}
|
}
|
||||||
|
// Extract image options
|
||||||
|
baton->topOffsetPre = options->Get(NanNew<String>("topOffsetPre"))->Int32Value();
|
||||||
|
baton->leftOffsetPre = options->Get(NanNew<String>("leftOffsetPre"))->Int32Value();
|
||||||
|
baton->widthPre = options->Get(NanNew<String>("widthPre"))->Int32Value();
|
||||||
|
baton->heightPre = options->Get(NanNew<String>("heightPre"))->Int32Value();
|
||||||
|
baton->topOffsetPost = options->Get(NanNew<String>("topOffsetPost"))->Int32Value();
|
||||||
|
baton->leftOffsetPost = options->Get(NanNew<String>("leftOffsetPost"))->Int32Value();
|
||||||
|
baton->widthPost = options->Get(NanNew<String>("widthPost"))->Int32Value();
|
||||||
|
baton->heightPost = options->Get(NanNew<String>("heightPost"))->Int32Value();
|
||||||
// Output image dimensions
|
// Output image dimensions
|
||||||
baton->width = options->Get(NanNew<String>("width"))->Int32Value();
|
baton->width = options->Get(NanNew<String>("width"))->Int32Value();
|
||||||
baton->height = options->Get(NanNew<String>("height"))->Int32Value();
|
baton->height = options->Get(NanNew<String>("height"))->Int32Value();
|
||||||
// Canvas options
|
// Canvas option
|
||||||
Local<String> canvas = options->Get(NanNew<String>("canvas"))->ToString();
|
Local<String> canvas = options->Get(NanNew<String>("canvas"))->ToString();
|
||||||
if (canvas->Equals(NanNew<String>("c"))) {
|
if (canvas->Equals(NanNew<String>("c"))) {
|
||||||
baton->crop = true;
|
baton->canvas = CROP;
|
||||||
} else if (canvas->Equals(NanNew<String>("w"))) {
|
|
||||||
baton->extend = VIPS_EXTEND_WHITE;
|
|
||||||
} else if (canvas->Equals(NanNew<String>("b"))) {
|
|
||||||
baton->extend = VIPS_EXTEND_BLACK;
|
|
||||||
} else if (canvas->Equals(NanNew<String>("m"))) {
|
} else if (canvas->Equals(NanNew<String>("m"))) {
|
||||||
baton->max = true;
|
baton->canvas = MAX;
|
||||||
|
} else if (canvas->Equals(NanNew<String>("e"))) {
|
||||||
|
baton->canvas = EMBED;
|
||||||
}
|
}
|
||||||
// Other options
|
// Background colour
|
||||||
baton->gravity = options->Get(NanNew<String>("gravity"))->Int32Value();
|
Local<Array> background = Local<Array>::Cast(options->Get(NanNew<String>("background")));
|
||||||
baton->sharpen = options->Get(NanNew<String>("sharpen"))->BooleanValue();
|
for (int i = 0; i < 4; i++) {
|
||||||
baton->interpolator = *String::Utf8Value(options->Get(NanNew<String>("interpolator"))->ToString());
|
baton->background[i] = background->Get(i)->NumberValue();
|
||||||
baton->gamma = options->Get(NanNew<String>("gamma"))->NumberValue();
|
}
|
||||||
baton->progressive = options->Get(NanNew<String>("progressive"))->BooleanValue();
|
// Resize options
|
||||||
baton->without_enlargement = options->Get(NanNew<String>("withoutEnlargement"))->BooleanValue();
|
baton->without_enlargement = options->Get(NanNew<String>("withoutEnlargement"))->BooleanValue();
|
||||||
baton->access_method = options->Get(NanNew<String>("sequentialRead"))->BooleanValue() ? VIPS_ACCESS_SEQUENTIAL : VIPS_ACCESS_RANDOM;
|
baton->gravity = options->Get(NanNew<String>("gravity"))->Int32Value();
|
||||||
baton->quality = options->Get(NanNew<String>("quality"))->Int32Value();
|
baton->interpolator = *String::Utf8Value(options->Get(NanNew<String>("interpolator"))->ToString());
|
||||||
baton->compressionLevel = options->Get(NanNew<String>("compressionLevel"))->Int32Value();
|
// Operators
|
||||||
|
baton->flatten = options->Get(NanNew<String>("flatten"))->BooleanValue();
|
||||||
|
baton->sharpen = options->Get(NanNew<String>("sharpen"))->BooleanValue();
|
||||||
|
baton->gamma = options->Get(NanNew<String>("gamma"))->NumberValue();
|
||||||
|
baton->greyscale = options->Get(NanNew<String>("greyscale"))->BooleanValue();
|
||||||
baton->angle = options->Get(NanNew<String>("angle"))->Int32Value();
|
baton->angle = options->Get(NanNew<String>("angle"))->Int32Value();
|
||||||
baton->withMetadata = options->Get(NanNew<String>("withMetadata"))->BooleanValue();
|
// Output options
|
||||||
|
baton->progressive = options->Get(NanNew<String>("progressive"))->BooleanValue();
|
||||||
|
baton->quality = options->Get(NanNew<String>("quality"))->Int32Value();
|
||||||
|
baton->compression_level = options->Get(NanNew<String>("compressionLevel"))->Int32Value();
|
||||||
|
baton->with_metadata = options->Get(NanNew<String>("withMetadata"))->BooleanValue();
|
||||||
// Output filename or __format for Buffer
|
// Output filename or __format for Buffer
|
||||||
baton->output = *String::Utf8Value(options->Get(NanNew<String>("output"))->ToString());
|
baton->output = *String::Utf8Value(options->Get(NanNew<String>("output"))->ToString());
|
||||||
|
|
||||||
|
|||||||
BIN
tests/fixtures/Channel_digital_image_CMYK_color.jpg
vendored
Normal file
BIN
tests/fixtures/Channel_digital_image_CMYK_color.jpg
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 714 KiB |
BIN
tests/fixtures/blackbug.png
vendored
Normal file
BIN
tests/fixtures/blackbug.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 105 KiB |
@@ -4,4 +4,4 @@ if ! type valgrind >/dev/null; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
curl -O https://raw.githubusercontent.com/jcupitt/libvips/master/libvips.supp
|
curl -O https://raw.githubusercontent.com/jcupitt/libvips/master/libvips.supp
|
||||||
G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind --suppressions=libvips.supp --suppressions=sharp.supp --leak-check=full --show-leak-kinds=definite,indirect,possible node unit.js
|
G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind --suppressions=libvips.supp --suppressions=sharp.supp --leak-check=full --show-leak-kinds=definite,indirect,possible --num-callers=20 node unit.js
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
var sharp = require("../index");
|
/*jslint node: true */
|
||||||
var fs = require("fs");
|
'use strict';
|
||||||
var path = require("path");
|
|
||||||
var assert = require("assert");
|
|
||||||
var async = require("async");
|
|
||||||
|
|
||||||
var inputJpg = path.join(__dirname, "fixtures/2569067123_aca715a2ee_o.jpg"); // http://www.flickr.com/photos/grizdave/2569067123/
|
var sharp = require('../index');
|
||||||
|
var path = require('path');
|
||||||
|
var assert = require('assert');
|
||||||
|
var async = require('async');
|
||||||
|
|
||||||
|
var inputJpg = path.join(__dirname, 'fixtures/2569067123_aca715a2ee_o.jpg'); // http://www.flickr.com/photos/grizdave/2569067123/
|
||||||
var width = 720;
|
var width = 720;
|
||||||
var height = 480;
|
var height = 480;
|
||||||
|
|
||||||
@@ -18,6 +20,7 @@ async.mapSeries([1, 1, 2, 4, 8, 16, 32, 64, 128], function(parallelism, next) {
|
|||||||
var start = new Date().getTime();
|
var start = new Date().getTime();
|
||||||
async.times(parallelism,
|
async.times(parallelism,
|
||||||
function(id, callback) {
|
function(id, callback) {
|
||||||
|
/*jslint unused: false */
|
||||||
sharp(inputJpg).resize(width, height).toBuffer(function(err, buffer) {
|
sharp(inputJpg).resize(width, height).toBuffer(function(err, buffer) {
|
||||||
buffer = null;
|
buffer = null;
|
||||||
callback(err, new Date().getTime() - start);
|
callback(err, new Date().getTime() - start);
|
||||||
@@ -29,7 +32,7 @@ async.mapSeries([1, 1, 2, 4, 8, 16, 32, 64, 128], function(parallelism, next) {
|
|||||||
var mean = ids.reduce(function(a, b) {
|
var mean = ids.reduce(function(a, b) {
|
||||||
return a + b;
|
return a + b;
|
||||||
}) / ids.length;
|
}) / ids.length;
|
||||||
console.log(parallelism + " parallel calls: fastest=" + ids[0] + "ms slowest=" + ids[ids.length - 1] + "ms mean=" + mean + "ms");
|
console.log(parallelism + ' parallel calls: fastest=' + ids[0] + 'ms slowest=' + ids[ids.length - 1] + 'ms mean=' + mean + 'ms');
|
||||||
next();
|
next();
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|||||||
1173
tests/perf.js
1173
tests/perf.js
File diff suppressed because it is too large
Load Diff
137
tests/random.js
137
tests/random.js
@@ -1,68 +1,69 @@
|
|||||||
var sharp = require("../index");
|
/*jslint node: true */
|
||||||
var fs = require("fs");
|
'use strict';
|
||||||
var path = require("path");
|
|
||||||
var imagemagick = require("imagemagick");
|
var sharp = require('../index');
|
||||||
var gm = require("gm");
|
var path = require('path');
|
||||||
var async = require("async");
|
var imagemagick = require('imagemagick');
|
||||||
var assert = require("assert");
|
var gm = require('gm');
|
||||||
var Benchmark = require("benchmark");
|
var assert = require('assert');
|
||||||
|
var Benchmark = require('benchmark');
|
||||||
var fixturesPath = path.join(__dirname, "fixtures");
|
|
||||||
var inputJpg = path.join(fixturesPath, "2569067123_aca715a2ee_o.jpg"); // http://www.flickr.com/photos/grizdave/2569067123/
|
var fixturesPath = path.join(__dirname, 'fixtures');
|
||||||
var outputJpg = path.join(fixturesPath, "output.jpg");
|
var inputJpg = path.join(fixturesPath, '2569067123_aca715a2ee_o.jpg'); // http://www.flickr.com/photos/grizdave/2569067123/
|
||||||
|
var outputJpg = path.join(fixturesPath, 'output.jpg');
|
||||||
var min = 320;
|
|
||||||
var max = 960;
|
var min = 320;
|
||||||
|
var max = 960;
|
||||||
var randomDimension = function() {
|
|
||||||
return Math.random() * (max - min) + min;
|
var randomDimension = function() {
|
||||||
};
|
return Math.random() * (max - min) + min;
|
||||||
|
};
|
||||||
new Benchmark.Suite("random").add("imagemagick", {
|
|
||||||
defer: true,
|
new Benchmark.Suite('random').add('imagemagick', {
|
||||||
fn: function(deferred) {
|
defer: true,
|
||||||
imagemagick.resize({
|
fn: function(deferred) {
|
||||||
srcPath: inputJpg,
|
imagemagick.resize({
|
||||||
dstPath: outputJpg,
|
srcPath: inputJpg,
|
||||||
quality: 0.8,
|
dstPath: outputJpg,
|
||||||
width: randomDimension(),
|
quality: 0.8,
|
||||||
height: randomDimension()
|
width: randomDimension(),
|
||||||
}, function(err) {
|
height: randomDimension()
|
||||||
if (err) {
|
}, function(err) {
|
||||||
throw err;
|
if (err) {
|
||||||
} else {
|
throw err;
|
||||||
deferred.resolve();
|
} else {
|
||||||
}
|
deferred.resolve();
|
||||||
});
|
}
|
||||||
}
|
});
|
||||||
}).add("gm", {
|
}
|
||||||
defer: true,
|
}).add('gm', {
|
||||||
fn: function(deferred) {
|
defer: true,
|
||||||
gm(inputJpg).resize(randomDimension(), randomDimension()).quality(80).toBuffer(function (err, buffer) {
|
fn: function(deferred) {
|
||||||
if (err) {
|
gm(inputJpg).resize(randomDimension(), randomDimension()).quality(80).toBuffer(function (err, buffer) {
|
||||||
throw err;
|
if (err) {
|
||||||
} else {
|
throw err;
|
||||||
assert.notStrictEqual(null, buffer);
|
} else {
|
||||||
deferred.resolve();
|
assert.notStrictEqual(null, buffer);
|
||||||
}
|
deferred.resolve();
|
||||||
});
|
}
|
||||||
}
|
});
|
||||||
}).add("sharp", {
|
}
|
||||||
defer: true,
|
}).add('sharp', {
|
||||||
fn: function(deferred) {
|
defer: true,
|
||||||
sharp(inputJpg).resize(randomDimension(), randomDimension()).toBuffer(function(err, buffer) {
|
fn: function(deferred) {
|
||||||
if (err) {
|
sharp(inputJpg).resize(randomDimension(), randomDimension()).toBuffer(function(err, buffer) {
|
||||||
throw err;
|
if (err) {
|
||||||
} else {
|
throw err;
|
||||||
assert.notStrictEqual(null, buffer);
|
} else {
|
||||||
deferred.resolve();
|
assert.notStrictEqual(null, buffer);
|
||||||
}
|
deferred.resolve();
|
||||||
});
|
}
|
||||||
}
|
});
|
||||||
}).on("cycle", function(event) {
|
}
|
||||||
console.log(String(event.target));
|
}).on('cycle', function(event) {
|
||||||
}).on("complete", function() {
|
console.log(String(event.target));
|
||||||
var winner = this.filter("fastest").pluck("name");
|
}).on('complete', function() {
|
||||||
assert.strictEqual("sharp", String(winner), "sharp was slower than " + winner);
|
var winner = this.filter('fastest').pluck('name');
|
||||||
console.dir(sharp.cache());
|
assert.strictEqual('sharp', String(winner), 'sharp was slower than ' + winner);
|
||||||
}).run();
|
console.dir(sharp.cache());
|
||||||
|
}).run();
|
||||||
|
|||||||
270
tests/unit.js
270
tests/unit.js
@@ -2,27 +2,28 @@
|
|||||||
/*jslint es5: true */
|
/*jslint es5: true */
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
var sharp = require("../index");
|
var sharp = require('../index');
|
||||||
var fs = require("fs");
|
var fs = require('fs');
|
||||||
var path = require("path");
|
var path = require('path');
|
||||||
var assert = require("assert");
|
var assert = require('assert');
|
||||||
var async = require("async");
|
var async = require('async');
|
||||||
|
|
||||||
var fixturesPath = path.join(__dirname, "fixtures");
|
var fixturesPath = path.join(__dirname, 'fixtures');
|
||||||
|
|
||||||
var inputJpg = path.join(fixturesPath, "2569067123_aca715a2ee_o.jpg"); // http://www.flickr.com/photos/grizdave/2569067123/
|
var inputJpg = path.join(fixturesPath, '2569067123_aca715a2ee_o.jpg'); // http://www.flickr.com/photos/grizdave/2569067123/
|
||||||
var outputJpg = path.join(fixturesPath, "output.jpg");
|
var inputJpgWithExif = path.join(fixturesPath, 'Landscape_8.jpg'); // https://github.com/recurser/exif-orientation-examples/blob/master/Landscape_8.jpg
|
||||||
|
var inputJpgWithGammaHoliness = path.join(fixturesPath, 'gamma_dalai_lama_gray.jpg'); // http://www.4p8.com/eric.brasseur/gamma.html
|
||||||
|
var inputJpgWithCmykProfile = path.join(fixturesPath, 'Channel_digital_image_CMYK_color.jpg'); // http://en.wikipedia.org/wiki/File:Channel_digital_image_CMYK_color.jpg
|
||||||
|
|
||||||
var inputTiff = path.join(fixturesPath, "G31D.TIF"); // http://www.fileformat.info/format/tiff/sample/e6c9a6e5253348f4aef6d17b534360ab/index.htm
|
var inputPng = path.join(fixturesPath, '50020484-00001.png'); // http://c.searspartsdirect.com/lis_png/PLDM/50020484-00001.png
|
||||||
var outputTiff = path.join(fixturesPath, "output.tiff");
|
var inputPngWithTransparency = path.join(fixturesPath, 'blackbug.png'); // public domain
|
||||||
|
|
||||||
var inputJpgWithExif = path.join(fixturesPath, "Landscape_8.jpg"); // https://github.com/recurser/exif-orientation-examples/blob/master/Landscape_8.jpg
|
var inputWebP = path.join(fixturesPath, '4.webp'); // http://www.gstatic.com/webp/gallery/4.webp
|
||||||
|
var inputTiff = path.join(fixturesPath, 'G31D.TIF'); // http://www.fileformat.info/format/tiff/sample/e6c9a6e5253348f4aef6d17b534360ab/index.htm
|
||||||
|
var inputGif = path.join(fixturesPath, 'Crash_test.gif'); // http://upload.wikimedia.org/wikipedia/commons/e/e3/Crash_test.gif
|
||||||
|
|
||||||
var inputJpgWithGammaHoliness = path.join(fixturesPath, "gamma_dalai_lama_gray.jpg"); // http://www.4p8.com/eric.brasseur/gamma.html
|
var outputJpg = path.join(fixturesPath, 'output.jpg');
|
||||||
|
var outputZoinks = path.join(fixturesPath, 'output.zoinks'); // an 'unknown' file extension
|
||||||
var inputPng = path.join(fixturesPath, "50020484-00001.png"); // http://c.searspartsdirect.com/lis_png/PLDM/50020484-00001.png
|
|
||||||
var inputWebP = path.join(fixturesPath, "4.webp"); // http://www.gstatic.com/webp/gallery/4.webp
|
|
||||||
var inputGif = path.join(fixturesPath, "Crash_test.gif"); // http://upload.wikimedia.org/wikipedia/commons/e/e3/Crash_test.gif
|
|
||||||
|
|
||||||
// Ensure cache limits can be set
|
// Ensure cache limits can be set
|
||||||
sharp.cache(0); // Disable
|
sharp.cache(0); // Disable
|
||||||
@@ -91,6 +92,45 @@ async.series([
|
|||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
// Embed - JPEG within PNG, no alpha channel
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpg)
|
||||||
|
.embed()
|
||||||
|
.resize(320, 240)
|
||||||
|
.png()
|
||||||
|
.toBuffer(function(err, data, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(true, data.length > 0);
|
||||||
|
assert.strictEqual('png', info.format);
|
||||||
|
assert.strictEqual(320, info.width);
|
||||||
|
assert.strictEqual(240, info.height);
|
||||||
|
sharp(data).metadata(function(err, metadata) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(3, metadata.channels);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Embed - JPEG within WebP, to include alpha channel
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpg)
|
||||||
|
.resize(320, 240)
|
||||||
|
.background({r: 0, g: 0, b: 0, a: 0})
|
||||||
|
.embed()
|
||||||
|
.webp()
|
||||||
|
.toBuffer(function(err, data, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(true, data.length > 0);
|
||||||
|
assert.strictEqual('webp', info.format);
|
||||||
|
assert.strictEqual(320, info.width);
|
||||||
|
assert.strictEqual(240, info.height);
|
||||||
|
sharp(data).metadata(function(err, metadata) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(4, metadata.channels);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
},
|
||||||
// Quality
|
// Quality
|
||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputJpg).resize(320, 240).quality(70).toBuffer(function(err, buffer70) {
|
sharp(inputJpg).resize(320, 240).quality(70).toBuffer(function(err, buffer70) {
|
||||||
@@ -98,6 +138,7 @@ async.series([
|
|||||||
sharp(inputJpg).resize(320, 240).toBuffer(function(err, buffer80) {
|
sharp(inputJpg).resize(320, 240).toBuffer(function(err, buffer80) {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
sharp(inputJpg).resize(320, 240).quality(90).toBuffer(function(err, buffer90) {
|
sharp(inputJpg).resize(320, 240).quality(90).toBuffer(function(err, buffer90) {
|
||||||
|
if (err) throw err;
|
||||||
assert(buffer70.length < buffer80.length);
|
assert(buffer70.length < buffer80.length);
|
||||||
assert(buffer80.length < buffer90.length);
|
assert(buffer80.length < buffer90.length);
|
||||||
done();
|
done();
|
||||||
@@ -107,7 +148,7 @@ async.series([
|
|||||||
},
|
},
|
||||||
// TIFF with dimensions known to cause rounding errors
|
// TIFF with dimensions known to cause rounding errors
|
||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputTiff).resize(240, 320).embedBlack().jpeg().toBuffer(function(err, data, info) {
|
sharp(inputTiff).resize(240, 320).embed().jpeg().toBuffer(function(err, data, info) {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
assert.strictEqual(true, data.length > 0);
|
assert.strictEqual(true, data.length > 0);
|
||||||
assert.strictEqual('jpeg', info.format);
|
assert.strictEqual('jpeg', info.format);
|
||||||
@@ -271,10 +312,31 @@ async.series([
|
|||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputTiff).webp().toBuffer(function(err, data, info) {
|
sharp(inputTiff).webp().toBuffer(function(err, data, info) {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
|
assert.strictEqual(true, data.length > 0);
|
||||||
assert.strictEqual('webp', info.format);
|
assert.strictEqual('webp', info.format);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
// Check colour space conversion from CMYK to sRGB
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpgWithCmykProfile).resize(320).toBuffer(function(err, data, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(true, data.length > 0);
|
||||||
|
assert.strictEqual('jpeg', info.format);
|
||||||
|
assert.strictEqual(320, info.width);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Check colour space conversion from CMYK to sRGB works with background colour (yellow=fail)
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpgWithCmykProfile).resize(320, 240).background('white').embed().toFile(path.join(fixturesPath, 'output.cmyk2srgb.jpg'), function(err, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual('jpeg', info.format);
|
||||||
|
assert.strictEqual(320, info.width);
|
||||||
|
assert.strictEqual(240, info.height);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
// Interpolation: nearest neighbour
|
// Interpolation: nearest neighbour
|
||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputJpg).resize(320, 240).interpolateWith(sharp.interpolator.nearest).toBuffer(function(err, data, info) {
|
sharp(inputJpg).resize(320, 240).interpolateWith(sharp.interpolator.nearest).toBuffer(function(err, data, info) {
|
||||||
@@ -392,6 +454,7 @@ async.series([
|
|||||||
var readable = fs.createReadStream(inputJpg);
|
var readable = fs.createReadStream(inputJpg);
|
||||||
var pipeline = sharp().resize(320, 240).toBuffer(function(err, data, info) {
|
var pipeline = sharp().resize(320, 240).toBuffer(function(err, data, info) {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
|
assert.strictEqual(true, data.length > 0);
|
||||||
assert.strictEqual('jpeg', info.format);
|
assert.strictEqual('jpeg', info.format);
|
||||||
assert.strictEqual(320, info.width);
|
assert.strictEqual(320, info.width);
|
||||||
assert.strictEqual(240, info.height);
|
assert.strictEqual(240, info.height);
|
||||||
@@ -425,6 +488,7 @@ async.series([
|
|||||||
anErrorWasEmitted = !!err;
|
anErrorWasEmitted = !!err;
|
||||||
}).on('end', function() {
|
}).on('end', function() {
|
||||||
assert(anErrorWasEmitted);
|
assert(anErrorWasEmitted);
|
||||||
|
fs.unlinkSync(outputJpg);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
var readableButNotAnImage = fs.createReadStream(__filename);
|
var readableButNotAnImage = fs.createReadStream(__filename);
|
||||||
@@ -439,6 +503,7 @@ async.series([
|
|||||||
anErrorWasEmitted = !!err;
|
anErrorWasEmitted = !!err;
|
||||||
}).on('end', function() {
|
}).on('end', function() {
|
||||||
assert(anErrorWasEmitted);
|
assert(anErrorWasEmitted);
|
||||||
|
fs.unlinkSync(outputJpg);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
var writable = fs.createWriteStream(outputJpg);
|
var writable = fs.createWriteStream(outputJpg);
|
||||||
@@ -498,6 +563,69 @@ async.series([
|
|||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
// Extract jpg
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpg).extract(2,2,20,20).toFile(path.join(fixturesPath, 'output.extract.jpg'), function(err, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(20, info.width);
|
||||||
|
assert.strictEqual(20, info.height);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Extract png
|
||||||
|
function(done) {
|
||||||
|
sharp(inputPng).extract(300,200,400,200).toFile(path.join(fixturesPath, 'output.extract.png'), function(err, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(400, info.width);
|
||||||
|
assert.strictEqual(200, info.height);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Extract webp
|
||||||
|
function(done) {
|
||||||
|
sharp(inputWebP).extract(50, 100, 125, 200).toFile(path.join(fixturesPath, 'output.extract.webp'), function(err, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(125, info.width);
|
||||||
|
assert.strictEqual(200, info.height);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Extract tiff
|
||||||
|
function(done) {
|
||||||
|
sharp(inputTiff).extract(63, 34, 341, 529).toFile(path.join(fixturesPath, 'output.extract.tiff'), function(err, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(341, info.width);
|
||||||
|
assert.strictEqual(529, info.height);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Extract before resize
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpg).extract(10, 10, 10, 500, 500).resize(100, 100).toFile(path.join(fixturesPath, 'output.extract.resize.jpg'), function(err, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(100, info.width);
|
||||||
|
assert.strictEqual(100, info.height);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Extract after resize and crop
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpg).resize(500, 500).crop(sharp.gravity.north).extract(10, 10, 100, 100).toFile(path.join(fixturesPath, 'output.resize.crop.extract.jpg'), function(err, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(100, info.width);
|
||||||
|
assert.strictEqual(100, info.height);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Extract before and after resize and crop
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpg).extract(0, 0, 700, 700).resize(500, 500).crop(sharp.gravity.north).extract(10, 10, 100, 100).toFile(path.join(fixturesPath, 'output.extract.resize.crop.extract.jpg'), function(err, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual(100, info.width);
|
||||||
|
assert.strictEqual(100, info.height);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
// Keeps Metadata after a resize
|
// Keeps Metadata after a resize
|
||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputJpgWithExif).resize(320, 240).withMetadata().toBuffer(function(err, buffer) {
|
sharp(inputJpgWithExif).resize(320, 240).withMetadata().toBuffer(function(err, buffer) {
|
||||||
@@ -522,16 +650,27 @@ async.series([
|
|||||||
},
|
},
|
||||||
// Output filename without extension should mirror input format
|
// Output filename without extension should mirror input format
|
||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputJpg).resize(320, 80).toFile(path.join(fixturesPath, 'output.zoinks'), function(err, info) {
|
sharp(inputJpg).resize(320, 80).toFile(outputZoinks, function(err, info) {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
assert.strictEqual('jpeg', info.format);
|
assert.strictEqual('jpeg', info.format);
|
||||||
assert.strictEqual(320, info.width);
|
assert.strictEqual(320, info.width);
|
||||||
assert.strictEqual(80, info.height);
|
assert.strictEqual(80, info.height);
|
||||||
|
fs.unlinkSync(outputZoinks);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputPng).resize(320, 80).toFile(path.join(fixturesPath, 'output.zoinks'), function(err, info) {
|
sharp(inputPng).resize(320, 80).toFile(outputZoinks, function(err, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual('png', info.format);
|
||||||
|
assert.strictEqual(320, info.width);
|
||||||
|
assert.strictEqual(80, info.height);
|
||||||
|
fs.unlinkSync(outputZoinks);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(done) {
|
||||||
|
sharp(inputPngWithTransparency).resize(320, 80).toFile(outputZoinks, function(err, info) {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
assert.strictEqual('png', info.format);
|
assert.strictEqual('png', info.format);
|
||||||
assert.strictEqual(320, info.width);
|
assert.strictEqual(320, info.width);
|
||||||
@@ -540,26 +679,29 @@ async.series([
|
|||||||
});
|
});
|
||||||
},
|
},
|
||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputWebP).resize(320, 80).toFile(path.join(fixturesPath, 'output.zoinks'), function(err, info) {
|
sharp(inputWebP).resize(320, 80).toFile(outputZoinks, function(err, info) {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
assert.strictEqual('webp', info.format);
|
assert.strictEqual('webp', info.format);
|
||||||
assert.strictEqual(320, info.width);
|
assert.strictEqual(320, info.width);
|
||||||
assert.strictEqual(80, info.height);
|
assert.strictEqual(80, info.height);
|
||||||
|
fs.unlinkSync(outputZoinks);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputTiff).resize(320, 80).toFile(path.join(fixturesPath, 'output.zoinks'), function(err, info) {
|
sharp(inputTiff).resize(320, 80).toFile(outputZoinks, function(err, info) {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
assert.strictEqual('tiff', info.format);
|
assert.strictEqual('tiff', info.format);
|
||||||
assert.strictEqual(320, info.width);
|
assert.strictEqual(320, info.width);
|
||||||
assert.strictEqual(80, info.height);
|
assert.strictEqual(80, info.height);
|
||||||
|
fs.unlinkSync(outputZoinks);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
function(done) {
|
function(done) {
|
||||||
sharp(inputGif).resize(320, 80).toFile(path.join(fixturesPath, 'output.zoinks'), function(err, info) {
|
sharp(inputGif).resize(320, 80).toFile(outputZoinks, function(err) {
|
||||||
assert(!!err);
|
assert(!!err);
|
||||||
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
// Metadata - JPEG
|
// Metadata - JPEG
|
||||||
@@ -584,6 +726,7 @@ async.series([
|
|||||||
assert.strictEqual(600, metadata.height);
|
assert.strictEqual(600, metadata.height);
|
||||||
assert.strictEqual('srgb', metadata.space);
|
assert.strictEqual('srgb', metadata.space);
|
||||||
assert.strictEqual(3, metadata.channels);
|
assert.strictEqual(3, metadata.channels);
|
||||||
|
assert.strictEqual(false, metadata.hasAlpha);
|
||||||
assert.strictEqual(8, metadata.orientation);
|
assert.strictEqual(8, metadata.orientation);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
@@ -597,6 +740,7 @@ async.series([
|
|||||||
assert.strictEqual(3248, metadata.height);
|
assert.strictEqual(3248, metadata.height);
|
||||||
assert.strictEqual('b-w', metadata.space);
|
assert.strictEqual('b-w', metadata.space);
|
||||||
assert.strictEqual(1, metadata.channels);
|
assert.strictEqual(1, metadata.channels);
|
||||||
|
assert.strictEqual(false, metadata.hasAlpha);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@@ -609,6 +753,20 @@ async.series([
|
|||||||
assert.strictEqual(2074, metadata.height);
|
assert.strictEqual(2074, metadata.height);
|
||||||
assert.strictEqual('b-w', metadata.space);
|
assert.strictEqual('b-w', metadata.space);
|
||||||
assert.strictEqual(1, metadata.channels);
|
assert.strictEqual(1, metadata.channels);
|
||||||
|
assert.strictEqual(false, metadata.hasAlpha);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Metadata - Transparent PNG
|
||||||
|
function(done) {
|
||||||
|
sharp(inputPngWithTransparency).metadata(function(err, metadata) {
|
||||||
|
if (err) throw err;
|
||||||
|
assert.strictEqual('png', metadata.format);
|
||||||
|
assert.strictEqual(2048, metadata.width);
|
||||||
|
assert.strictEqual(1536, metadata.height);
|
||||||
|
assert.strictEqual('srgb', metadata.space);
|
||||||
|
assert.strictEqual(4, metadata.channels);
|
||||||
|
assert.strictEqual(true, metadata.hasAlpha);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@@ -621,6 +779,7 @@ async.series([
|
|||||||
assert.strictEqual(772, metadata.height);
|
assert.strictEqual(772, metadata.height);
|
||||||
assert.strictEqual('srgb', metadata.space);
|
assert.strictEqual('srgb', metadata.space);
|
||||||
assert.strictEqual(3, metadata.channels);
|
assert.strictEqual(3, metadata.channels);
|
||||||
|
assert.strictEqual(false, metadata.hasAlpha);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@@ -632,6 +791,7 @@ async.series([
|
|||||||
assert.strictEqual(800, metadata.width);
|
assert.strictEqual(800, metadata.width);
|
||||||
assert.strictEqual(533, metadata.height);
|
assert.strictEqual(533, metadata.height);
|
||||||
assert.strictEqual(3, metadata.channels);
|
assert.strictEqual(3, metadata.channels);
|
||||||
|
assert.strictEqual(false, metadata.hasAlpha);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@@ -643,6 +803,7 @@ async.series([
|
|||||||
assert.strictEqual(2225, metadata.height);
|
assert.strictEqual(2225, metadata.height);
|
||||||
assert.strictEqual('srgb', metadata.space);
|
assert.strictEqual('srgb', metadata.space);
|
||||||
assert.strictEqual(3, metadata.channels);
|
assert.strictEqual(3, metadata.channels);
|
||||||
|
assert.strictEqual(false, metadata.hasAlpha);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
@@ -656,6 +817,7 @@ async.series([
|
|||||||
assert.strictEqual(2225, metadata.height);
|
assert.strictEqual(2225, metadata.height);
|
||||||
assert.strictEqual('srgb', metadata.space);
|
assert.strictEqual('srgb', metadata.space);
|
||||||
assert.strictEqual(3, metadata.channels);
|
assert.strictEqual(3, metadata.channels);
|
||||||
|
assert.strictEqual(false, metadata.hasAlpha);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
readable.pipe(pipeline);
|
readable.pipe(pipeline);
|
||||||
@@ -670,6 +832,7 @@ async.series([
|
|||||||
assert.strictEqual(2225, metadata.height);
|
assert.strictEqual(2225, metadata.height);
|
||||||
assert.strictEqual('srgb', metadata.space);
|
assert.strictEqual('srgb', metadata.space);
|
||||||
assert.strictEqual(3, metadata.channels);
|
assert.strictEqual(3, metadata.channels);
|
||||||
|
assert.strictEqual(false, metadata.hasAlpha);
|
||||||
image.resize(metadata.width / 2).toBuffer(function(err, data, info) {
|
image.resize(metadata.width / 2).toBuffer(function(err, data, info) {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
assert.strictEqual(true, data.length > 0);
|
assert.strictEqual(true, data.length > 0);
|
||||||
@@ -698,11 +861,74 @@ async.series([
|
|||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
// Greyscale conversion
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpg).resize(320, 240).greyscale().toFile(path.join(fixturesPath, 'output.greyscale-gamma-0.0.jpg'), function(err) {
|
||||||
|
if (err) throw err;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpg).resize(320, 240).gamma().greyscale().toFile(path.join(fixturesPath, 'output.greyscale-gamma-2.2.jpg'), function(err) {
|
||||||
|
if (err) throw err;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// Flattening
|
||||||
|
function(done) {
|
||||||
|
sharp(inputPngWithTransparency).flatten().resize(400, 300).toFile(path.join(fixturesPath, 'output.flatten-black.jpg'), function(err) {
|
||||||
|
if (err) throw err;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(done) {
|
||||||
|
sharp(inputPngWithTransparency).flatten().background({r: 255, g: 102, b: 0}).resize(400, 300).toFile(path.join(fixturesPath, 'output.flatten-rgb-orange.jpg'), function(err) {
|
||||||
|
if (err) throw err;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(done) {
|
||||||
|
sharp(inputPngWithTransparency).flatten().background('#ff6600').resize(400, 300).toFile(path.join(fixturesPath, 'output.flatten-hex-orange.jpg'), function(err) {
|
||||||
|
if (err) throw err;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(done) {
|
||||||
|
sharp(inputJpg).background('#ff0000').flatten().resize(500, 400).toFile(path.join(fixturesPath, 'output.flatten-input-jpg.jpg'), function(err) {
|
||||||
|
if (err) throw err;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// PNG compression level - valid
|
||||||
|
function(done) {
|
||||||
|
var isValid = false;
|
||||||
|
try {
|
||||||
|
sharp().compressionLevel(0);
|
||||||
|
isValid = true;
|
||||||
|
} catch (e) {}
|
||||||
|
assert(isValid);
|
||||||
|
done();
|
||||||
|
},
|
||||||
|
// PNG compression level - invalid
|
||||||
|
function(done) {
|
||||||
|
var isValid = false;
|
||||||
|
try {
|
||||||
|
sharp().compressionLevel(-1);
|
||||||
|
isValid = true;
|
||||||
|
} catch (e) {}
|
||||||
|
assert(!isValid);
|
||||||
|
done();
|
||||||
|
},
|
||||||
// Verify internal counters
|
// Verify internal counters
|
||||||
function(done) {
|
function(done) {
|
||||||
var counters = sharp.counters();
|
var counters = sharp.counters();
|
||||||
assert.strictEqual(0, counters.queue);
|
assert.strictEqual(0, counters.queue);
|
||||||
assert.strictEqual(0, counters.process);
|
assert.strictEqual(0, counters.process);
|
||||||
done();
|
done();
|
||||||
|
},
|
||||||
|
// Empty cache
|
||||||
|
function(done) {
|
||||||
|
sharp.cache(0);
|
||||||
|
done();
|
||||||
}
|
}
|
||||||
]);
|
]);
|
||||||
|
|||||||
Reference in New Issue
Block a user