Compare commits

..

35 Commits

Author SHA1 Message Date
Lovell Fuller
850c2ecdd6 Version bumps 2014-12-15 14:02:30 +00:00
Lovell Fuller
926c5603aa Improve documentation on concurrency/parallelism 2014-12-15 14:00:23 +00:00
Lovell Fuller
d3225fa193 Add 'size' attribute to callback's info Object #138 2014-12-15 13:54:19 +00:00
Lovell Fuller
f026a835fd Move unref of input Buffer to C++ #138 2014-12-14 10:31:25 +00:00
Lovell Fuller
47241db789 Let V8 garbage collect the Buffer earlier #138 2014-12-13 08:48:24 +00:00
Lovell Fuller
34a9970bd9 Remove useless re-definition of image #139 2014-12-12 22:04:55 +00:00
Lovell Fuller
57203f841a Copy input Buffer to avoid V8 heap compaction #138 2014-12-12 22:02:42 +00:00
Lovell Fuller
bd20bd1881 Version bumps 2014-12-11 13:32:52 +00:00
Lovell Fuller
60f1fda7ee Change interpretation to sRGB before transformation #133 2014-12-11 13:32:36 +00:00
Lovell Fuller
ea1013f6ec Add support for latest Amazon Linux 2014-12-08 10:52:59 +00:00
Lovell Fuller
247b607afd Add SVG and PSD fixtures and tests 2014-12-05 21:35:18 +00:00
Lovell Fuller
a56102a209 Ensure ICC transform of withMetadata output #133 2014-12-04 11:28:09 +00:00
Lovell Fuller
940b6f505f Add test for Promise rejection path 2014-12-04 10:48:45 +00:00
Lovell Fuller
e1b5574c4a Handle broken, embedded ICC profile #131 2014-12-03 10:23:35 +00:00
Lovell Fuller
f4cc6a2db4 Correct location of Dockerfile 2014-11-26 10:50:47 +00:00
Lovell Fuller
0acf865654 Faster ICC profile transform via lcms #125 2014-11-25 22:52:24 +00:00
Lovell Fuller
8460e50ee0 Remove spurious keywords 2014-11-25 19:16:01 +00:00
Lovell Fuller
f57a0e3b00 Ensure embedded profile, if any, is always used
Perform sRGB conversion at end of pipe only

withMetadata exports profile, should not convert

Convert one fixture to sRGB to help test

Discovered while investigating #125
2014-11-25 18:54:49 +00:00
Lovell Fuller
02b6016390 Add link to Dockerfile for libvips
Thanks @marcbachmann
2014-11-25 10:33:43 +00:00
Lovell Fuller
4e01d63195 Add hasProfile attribute to metadata response
At the very least will be useful investigating #125
2014-11-24 17:24:29 +00:00
Lovell Fuller
94b47508c0 imagemagick-native now supports async and filter 2014-11-24 15:13:47 +00:00
Lovell Fuller
328cda82c5 Updates for 7.42 stable release of libvips 2014-11-24 12:19:44 +00:00
Lovell Fuller
118b17aa2f Apply less blur before affine reduction #121 2014-11-24 11:52:48 +00:00
Lovell Fuller
b7c7fc22f3 Ensure correct Gaussian blur before affine #121
Use double sigma instead of int radius for blur
2014-11-20 13:59:39 +00:00
Lovell Fuller
177a4f574c Minimum version of libvips now 7.40.0 #74 2014-11-17 12:08:05 +00:00
Lovell Fuller
e22d093002 Ubuntu 14 now compiles 7.40.x from source
as packaged version is the outdated 7.38.x

Added support for Ubuntu 15 and Mint 17.1
2014-11-12 20:30:13 +00:00
Lovell Fuller
e7f6d49bc1 Additional blur radii tests #108 2014-11-12 20:11:28 +00:00
Lovell Fuller
b886db4b0d Add bounds checks on blur/sharpen parameters #108 2014-11-12 20:06:28 +00:00
Lovell Fuller
ee513ac7a7 Less C, more C++ e.g. namespace, enum class
Improve image reference handling
2014-11-11 18:28:23 +00:00
Lovell Fuller
e465306d97 Disable libvips cache for unit tests
Aids memory leak detection
2014-11-11 18:09:48 +00:00
Lovell Fuller
32d9bc204a Add 'fast' blur and Gaussian blur feature #108 2014-11-10 22:38:13 +00:00
Lovell Fuller
df5cf402e3 Ensure leak check tests child processes 2014-11-10 22:35:22 +00:00
Lovell Fuller
86681100b7 Control level of sharpening via radius/flat/jagged #108 2014-11-10 16:20:04 +00:00
Lovell Fuller
47927ef47d Shrink less, affine more, maintain performance #75
Affects interpolators with 4x4+ window size

e.g. Bicubic, LBB, Nohalo

Introduces blur before large affine

to improve large PNG reductions
2014-11-08 12:08:27 +00:00
Lovell Fuller
7537adf399 Add features from libvips 7.40+
Load TIFF from Buffer/Stream

Interlaced PNG output no longer needs tilecache

Option to disable PNG adaptive row filtering
2014-11-08 12:08:27 +00:00
32 changed files with 1279 additions and 500 deletions

View File

@@ -10,13 +10,15 @@
The typical use case for this high speed Node.js module is to convert large images of many formats to smaller, web-friendly JPEG, PNG and WebP images of varying dimensions. The typical use case for this high speed Node.js module is to convert large images of many formats to smaller, web-friendly JPEG, PNG and WebP images of varying dimensions.
The performance of JPEG resizing is typically 8x faster than ImageMagick and GraphicsMagick, based mainly on the number of CPU cores available. This module supports reading and writing JPEG, PNG and WebP images to and from Streams, Buffer objects and the filesystem.
It also supports reading images of many other types from the filesystem via libmagick++ or libgraphicsmagick++ if present.
Colour spaces, embedded ICC profiles and alpha transparency channels are all handled correctly.
Memory usage is kept to a minimum, no child processes are spawned, everything remains non-blocking thanks to _libuv_ and Promises/A+ are supported. Only small regions of uncompressed image data are held in memory and processed at a time, taking full advantage of multiple CPU cores and L1/L2/L3 cache. Resizing an image is typically 4x faster than using the quickest ImageMagick and GraphicsMagick settings.
This module supports reading and writing JPEG, PNG and WebP images to and from Streams, Buffer objects and the filesystem. It also supports reading images of many other types from the filesystem via libmagick++ or libgraphicsmagick++ if present. Huffman tables are optimised when generating JPEG output images without having to use separate command line tools like [jpegoptim](https://github.com/tjko/jpegoptim) and [jpegtran](http://jpegclub.org/jpegtran/). PNG filtering can be disabled, which for diagrams and line art often produces the same result as [pngcrush](http://pmt.sourceforge.net/pngcrush/).
When generating JPEG output all metadata is removed and Huffman tables optimised without having to use separate command line tools like [jpegoptim](https://github.com/tjko/jpegoptim) and [jpegtran](http://jpegclub.org/jpegtran/). Everything remains non-blocking thanks to _libuv_, no child processes are spawned and Promises/A+ are supported.
Anyone who has used the Node.js bindings for [GraphicsMagick](https://github.com/aheckmann/gm) will find the API similarly fluent. Anyone who has used the Node.js bindings for [GraphicsMagick](https://github.com/aheckmann/gm) will find the API similarly fluent.
@@ -29,20 +31,21 @@ This module is powered by the blazingly fast [libvips](https://github.com/jcupit
### Prerequisites ### Prerequisites
* Node.js v0.10+ * Node.js v0.10+
* [libvips](https://github.com/jcupitt/libvips) v7.38.5+ * [libvips](https://github.com/jcupitt/libvips) v7.40.0+ (7.42.0+ recommended)
To install the latest version of libvips on the following Operating Systems: To install the most suitable version of libvips on the following Operating Systems:
* Mac OS * Mac OS
* Homebrew * Homebrew
* MacPorts * MacPorts
* Debian Linux * Debian Linux
* Debian 7, 8 * Debian 7, 8
* Ubuntu 12.04, 14.04, 14.10 * Ubuntu 12.04, 14.04, 14.10, 15.04
* Mint 13, 17 * Mint 13, 17
* Red Hat Linux * Red Hat Linux
* RHEL/Centos/Scientific 6, 7 * RHEL/Centos/Scientific 6, 7
* Fedora 21, 22 * Fedora 21, 22
* Amazon Linux 2014.09
run the following as a user with `sudo` access: run the following as a user with `sudo` access:
@@ -68,11 +71,17 @@ The _gettext_ dependency of _libvips_ [can lead](https://github.com/lovell/sharp
brew link gettext --force brew link gettext --force
### Install libvips on Heroku ### Heroku
[Alessandro Tagliapietra](https://github.com/alex88) maintains an [Heroku buildpack for libvips](https://github.com/alex88/heroku-buildpack-vips) and its dependencies. [Alessandro Tagliapietra](https://github.com/alex88) maintains an [Heroku buildpack for libvips](https://github.com/alex88/heroku-buildpack-vips) and its dependencies.
### Using with gulp.js ### Docker
[Marc Bachmann](https://github.com/marcbachmann) maintains a [Dockerfile for libvips](https://github.com/marcbachmann/dockerfile-libvips).
docker pull marcbachmann/libvips
### gulp.js
[Eugeny Vlasenko](https://github.com/mahnunchik) maintains [gulp-responsive](https://www.npmjs.org/package/gulp-responsive) and [Mohammad Prabowo](https://github.com/rizalp) maintains [gulp-sharp](https://www.npmjs.org/package/gulp-sharp). [Eugeny Vlasenko](https://github.com/mahnunchik) maintains [gulp-responsive](https://www.npmjs.org/package/gulp-responsive) and [Mohammad Prabowo](https://github.com/rizalp) maintains [gulp-sharp](https://www.npmjs.org/package/gulp-sharp).
@@ -214,12 +223,12 @@ sharp(inputBuffer)
Constructor to which further methods are chained. `input`, if present, can be one of: Constructor to which further methods are chained. `input`, if present, can be one of:
* Buffer containing JPEG, PNG or WebP image data, or * Buffer containing JPEG, PNG, WebP or TIFF image data, or
* String containing the filename of an image, with most major formats supported. * String containing the filename of an image, with most major formats supported.
The object returned implements the [stream.Duplex](http://nodejs.org/api/stream.html#stream_class_stream_duplex) class. The object returned implements the [stream.Duplex](http://nodejs.org/api/stream.html#stream_class_stream_duplex) class.
JPEG, PNG or WebP format image data can be streamed into the object when `input` is not provided. JPEG, PNG, WebP or TIFF format image data can be streamed into the object when `input` is not provided.
JPEG, PNG or WebP format image data can be streamed out from this object. JPEG, PNG or WebP format image data can be streamed out from this object.
@@ -232,8 +241,9 @@ Fast access to image metadata without decoding any compressed image data.
* `format`: Name of decoder to be used to decompress image data e.g. `jpeg`, `png`, `webp` (for file-based input additionally `tiff` and `magick`) * `format`: Name of decoder to be used to decompress image data e.g. `jpeg`, `png`, `webp` (for file-based input additionally `tiff` and `magick`)
* `width`: Number of pixels wide * `width`: Number of pixels wide
* `height`: Number of pixels high * `height`: Number of pixels high
* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L502) * `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L522)
* `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK * `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
* `hasProfile`: Boolean indicating the presence of an embedded ICC profile
* `hasAlpha`: Boolean indicating the presence of an alpha transparency channel * `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
* `orientation`: Number value of the EXIF Orientation header, if present * `orientation`: Number value of the EXIF Orientation header, if present
@@ -319,9 +329,23 @@ Do not enlarge the output image if the input image width *or* height are already
This is equivalent to GraphicsMagick's `>` geometry option: "change the dimensions of the image only if its width or height exceeds the geometry specification". This is equivalent to GraphicsMagick's `>` geometry option: "change the dimensions of the image only if its width or height exceeds the geometry specification".
#### sharpen() #### blur([sigma])
Perform a mild sharpen of the output image. This typically reduces performance by 10%. When used without parameters, performs a fast, mild blur of the output image. This typically reduces performance by 10%.
When a `sigma` is provided, performs a slower, more accurate Gaussian blur. This typically reduces performance by 25%.
* `sigma`, if present, is a Number between 0.3 and 1000 representing the approximate blur radius in pixels.
#### sharpen([radius], [flat], [jagged])
When used without parameters, performs a fast, mild sharpen of the output image. This typically reduces performance by 10%.
When a `radius` is provided, performs a slower, more accurate sharpen of the L channel in the LAB colour space. Separate control over the level of sharpening in "flat" and "jagged" areas is available. This typically reduces performance by 50%.
* `radius`, if present, is an integral Number representing the sharpen mask radius in pixels.
* `flat`, if present, is a Number representing the level of sharpening to apply to "flat" areas, defaulting to a value of 1.0.
* `jagged`, if present, is a Number representing the level of sharpening to apply to "jagged" areas, defaulting to a value of 2.0.
#### interpolateWith(interpolator) #### interpolateWith(interpolator)
@@ -380,7 +404,9 @@ Use progressive (interlace) scan for JPEG and PNG output. This typically reduces
#### withMetadata() #### withMetadata()
Include all metadata (ICC, EXIF, XMP) from the input image in the output image. The default behaviour is to strip all metadata. Include all metadata (EXIF, XMP, IPTC) from the input image in the output image. This will also convert to and add the latest web-friendly v2 sRGB ICC profile.
The default behaviour is to strip all metadata and convert to the device-independent sRGB colour space.
#### compressionLevel(compressionLevel) #### compressionLevel(compressionLevel)
@@ -388,6 +414,12 @@ An advanced setting for the _zlib_ compression level of the lossless PNG output
`compressionLevel` is a Number between 0 and 9. `compressionLevel` is a Number between 0 and 9.
#### withoutAdaptiveFiltering()
_Requires libvips 7.42.0+_
An advanced setting to disable adaptive row filtering for the lossless PNG output format.
### Output methods ### Output methods
#### toFile(filename, [callback]) #### toFile(filename, [callback])
@@ -397,7 +429,7 @@ An advanced setting for the _zlib_ compression level of the lossless PNG output
`callback`, if present, is called with two arguments `(err, info)` where: `callback`, if present, is called with two arguments `(err, info)` where:
* `err` contains an error message, if any. * `err` contains an error message, if any.
* `info` contains the output image `format`, `width` and `height`. * `info` contains the output image `format`, `size` (bytes), `width` and `height`.
A Promises/A+ promise is returned when `callback` is not provided. A Promises/A+ promise is returned when `callback` is not provided.
@@ -409,7 +441,7 @@ Write image data to a Buffer, the format of which will match the input image by
* `err` is an error message, if any. * `err` is an error message, if any.
* `buffer` is the output image data. * `buffer` is the output image data.
* `info` contains the output image `format`, `width` and `height`. * `info` contains the output image `format`, `size` (bytes), `width` and `height`.
A Promises/A+ promise is returned when `callback` is not provided. A Promises/A+ promise is returned when `callback` is not provided.
@@ -432,7 +464,7 @@ sharp.cache(50, 200); // { current: 49, high: 99, memory: 50, items: 200}
#### sharp.concurrency([threads]) #### sharp.concurrency([threads])
`threads`, if provided, is the Number of threads _libvips'_ should create for image processing. The default value is the number of CPU cores. A value of `0` will reset to this default. `threads`, if provided, is the Number of threads _libvips'_ should create for processing each image. The default value is the number of CPU cores. A value of `0` will reset to this default.
This method always returns the current concurrency. This method always returns the current concurrency.
@@ -442,6 +474,8 @@ sharp.concurrency(2); // 2
sharp.concurrency(0); // 4 sharp.concurrency(0); // 4
``` ```
The maximum number of images that can be processed in parallel is limited by libuv's `UV_THREADPOOL_SIZE` environment variable.
#### sharp.counters() #### sharp.counters()
Provides access to internal task counters. Provides access to internal task counters.
@@ -528,7 +562,7 @@ sudo yum install -y --enablerepo=epel GraphicsMagick
### The contenders ### The contenders
* [imagemagick-native](https://github.com/mash/node-imagemagick-native) v1.2.2 - Supports Buffers only and blocks main V8 thread whilst processing. * [imagemagick-native](https://github.com/mash/node-imagemagick-native) v1.2.2 - Supports Buffers only
* [imagemagick](https://github.com/yourdeveloper/node-imagemagick) v0.1.3 - Supports filesystem only and "has been unmaintained for a long time". * [imagemagick](https://github.com/yourdeveloper/node-imagemagick) v0.1.3 - Supports filesystem only and "has been unmaintained for a long time".
* [gm](https://github.com/aheckmann/gm) v1.16.0 - Fully featured wrapper around GraphicsMagick. * [gm](https://github.com/aheckmann/gm) v1.16.0 - Fully featured wrapper around GraphicsMagick.
* sharp v0.6.2 - Caching within libvips disabled to ensure a fair comparison. * sharp v0.6.2 - Caching within libvips disabled to ensure a fair comparison.

Binary file not shown.

131
index.js
View File

@@ -4,10 +4,12 @@ var path = require('path');
var util = require('util'); var util = require('util');
var stream = require('stream'); var stream = require('stream');
var semver = require('semver');
var color = require('color'); var color = require('color');
var BluebirdPromise = require('bluebird'); var BluebirdPromise = require('bluebird');
var sharp = require('./build/Release/sharp'); var sharp = require('./build/Release/sharp');
var libvipsVersion = sharp.libvipsVersion();
var Sharp = function(input) { var Sharp = function(input) {
if (!(this instanceof Sharp)) { if (!(this instanceof Sharp)) {
@@ -16,10 +18,11 @@ var Sharp = function(input) {
stream.Duplex.call(this); stream.Duplex.call(this);
this.options = { this.options = {
// input options // input options
bufferIn: null,
streamIn: false, streamIn: false,
sequentialRead: false, sequentialRead: false,
// ICC profile to use when input CMYK image has no embedded profile // ICC profiles
iccProfileCmyk: path.join(__dirname, 'icc', 'USWebCoatedSWOP.icc'), iccProfilePath: path.join(__dirname, 'icc') + path.sep,
// resize options // resize options
topOffsetPre: -1, topOffsetPre: -1,
leftOffsetPre: -1, leftOffsetPre: -1,
@@ -41,7 +44,10 @@ var Sharp = function(input) {
// operations // operations
background: [0, 0, 0, 255], background: [0, 0, 0, 255],
flatten: false, flatten: false,
sharpen: false, blurSigma: 0,
sharpenRadius: 0,
sharpenFlat: 1,
sharpenJagged: 2,
gamma: 0, gamma: 0,
greyscale: false, greyscale: false,
// output options // output options
@@ -49,6 +55,7 @@ var Sharp = function(input) {
progressive: false, progressive: false,
quality: 80, quality: 80,
compressionLevel: 6, compressionLevel: 6,
withoutAdaptiveFiltering: false,
streamOut: false, streamOut: false,
withMetadata: false withMetadata: false
}; };
@@ -58,14 +65,20 @@ var Sharp = function(input) {
} else if (typeof input === 'object' && input instanceof Buffer) { } else if (typeof input === 'object' && input instanceof Buffer) {
// input=buffer // input=buffer
if ( if (
(input.length > 1) && (input.length > 3) &&
(input[0] === 0xff && input[1] === 0xd8) || // JPEG // JPEG
(input[0] === 0x89 && input[1] === 0x50) || // PNG (input[0] === 0xFF && input[1] === 0xD8) ||
(input[0] === 0x52 && input[1] === 0x49) // WebP // PNG
(input[0] === 0x89 && input[1] === 0x50) ||
// WebP
(input[0] === 0x52 && input[1] === 0x49) ||
// TIFF
(input[0] === 0x4D && input[1] === 0x4D && input[2] === 0x00 && (input[3] === 0x2A || input[3] === 0x2B)) ||
(input[0] === 0x49 && input[1] === 0x49 && (input[2] === 0x2A || input[2] === 0x2B) && input[3] === 0x00)
) { ) {
this.options.bufferIn = input; this.options.bufferIn = input;
} else { } else {
throw new Error('Buffer contains an unsupported image format. JPEG, PNG and WebP are currently supported.'); throw new Error('Buffer contains an unsupported image format. JPEG, PNG, WebP and TIFF are currently supported.');
} }
} else { } else {
// input=stream // input=stream
@@ -83,16 +96,16 @@ Sharp.prototype._write = function(chunk, encoding, callback) {
/*jslint unused: false */ /*jslint unused: false */
if (this.options.streamIn) { if (this.options.streamIn) {
if (typeof chunk === 'object' && chunk instanceof Buffer) { if (typeof chunk === 'object' && chunk instanceof Buffer) {
if (typeof this.options.bufferIn === 'undefined') { if (this.options.bufferIn instanceof Buffer) {
// Create new Buffer
this.options.bufferIn = new Buffer(chunk.length);
chunk.copy(this.options.bufferIn);
} else {
// Append to existing Buffer // Append to existing Buffer
this.options.bufferIn = Buffer.concat( this.options.bufferIn = Buffer.concat(
[this.options.bufferIn, chunk], [this.options.bufferIn, chunk],
this.options.bufferIn.length + chunk.length this.options.bufferIn.length + chunk.length
); );
} else {
// Create new Buffer
this.options.bufferIn = new Buffer(chunk.length);
chunk.copy(this.options.bufferIn);
} }
callback(); callback();
} else { } else {
@@ -126,16 +139,6 @@ Sharp.prototype.extract = function(topOffset, leftOffset, width, height) {
return this; return this;
}; };
/*
Deprecated embed* methods, to be removed in v0.8.0
*/
Sharp.prototype.embedWhite = util.deprecate(function() {
return this.background('white').embed();
}, "embedWhite() is deprecated, use background('white').embed() instead");
Sharp.prototype.embedBlack = util.deprecate(function() {
return this.background('black').embed();
}, "embedBlack() is deprecated, use background('black').embed() instead");
/* /*
Set the background colour for embed and flatten operations. Set the background colour for embed and flatten operations.
Delegates to the 'Color' module, which can throw an Error Delegates to the 'Color' module, which can throw an Error
@@ -204,8 +207,64 @@ Sharp.prototype.withoutEnlargement = function(withoutEnlargement) {
return this; return this;
}; };
Sharp.prototype.sharpen = function(sharpen) { /*
this.options.sharpen = (typeof sharpen === 'boolean') ? sharpen : true; Blur the output image.
Call without a sigma to use a fast, mild blur.
Call with a sigma to use a slower, more accurate Gaussian blur.
*/
Sharp.prototype.blur = function(sigma) {
if (typeof sigma === 'undefined') {
// No arguments: default to mild blur
this.options.blurSigma = -1;
} else if (typeof sigma === 'boolean') {
// Boolean argument: apply mild blur?
this.options.blurSigma = sigma ? -1 : 0;
} else if (typeof sigma === 'number' && !Number.isNaN(sigma) && sigma >= 0.3 && sigma <= 1000) {
// Numeric argument: specific sigma
this.options.blurSigma = sigma;
} else {
throw new Error('Invalid blur sigma (0.3 to 1000.0) ' + sigma);
}
return this;
};
/*
Sharpen the output image.
Call without a radius to use a fast, mild sharpen.
Call with a radius to use a slow, accurate sharpen using the L of LAB colour space.
radius - size of mask in pixels, must be integer
flat - level of "flat" area sharpen, default 1
jagged - level of "jagged" area sharpen, default 2
*/
Sharp.prototype.sharpen = function(radius, flat, jagged) {
if (typeof radius === 'undefined') {
// No arguments: default to mild sharpen
this.options.sharpenRadius = -1;
} else if (typeof radius === 'boolean') {
// Boolean argument: apply mild sharpen?
this.options.sharpenRadius = radius ? -1 : 0;
} else if (typeof radius === 'number' && !Number.isNaN(radius) && (radius % 1 === 0) && radius >= 1) {
// Numeric argument: specific radius
this.options.sharpenRadius = radius;
// Control over flat areas
if (typeof flat !== 'undefined' && flat !== null) {
if (typeof flat === 'number' && !Number.isNaN(flat) && flat >= 0) {
this.options.sharpenFlat = flat;
} else {
throw new Error('Invalid sharpen level for flat areas ' + flat + ' (expected >= 0)');
}
}
// Control over jagged areas
if (typeof jagged !== 'undefined' && jagged !== null) {
if (typeof jagged === 'number' && !Number.isNaN(jagged) && jagged >= 0) {
this.options.sharpenJagged = jagged;
} else {
throw new Error('Invalid sharpen level for jagged areas ' + jagged + ' (expected >= 0)');
}
}
} else {
throw new Error('Invalid sharpen radius ' + radius + ' (expected integer >= 1)');
}
return this; return this;
}; };
@@ -269,6 +328,9 @@ Sharp.prototype.quality = function(quality) {
return this; return this;
}; };
/*
zlib compression level for PNG output
*/
Sharp.prototype.compressionLevel = function(compressionLevel) { Sharp.prototype.compressionLevel = function(compressionLevel) {
if (!Number.isNaN(compressionLevel) && compressionLevel >= 0 && compressionLevel <= 9) { if (!Number.isNaN(compressionLevel) && compressionLevel >= 0 && compressionLevel <= 9) {
this.options.compressionLevel = compressionLevel; this.options.compressionLevel = compressionLevel;
@@ -278,6 +340,18 @@ Sharp.prototype.compressionLevel = function(compressionLevel) {
return this; return this;
}; };
/*
Disable the use of adaptive row filtering for PNG output - requires libvips 7.41.0+
*/
Sharp.prototype.withoutAdaptiveFiltering = function(withoutAdaptiveFiltering) {
if (semver.gte(libvipsVersion, '7.41.0')) {
this.options.withoutAdaptiveFiltering = (typeof withoutAdaptiveFiltering === 'boolean') ? withoutAdaptiveFiltering : true;
} else {
console.error('withoutAdaptiveFiltering requires libvips 7.41.0+');
}
return this;
};
Sharp.prototype.withMetadata = function(withMetadata) { Sharp.prototype.withMetadata = function(withMetadata) {
this.options.withMetadata = (typeof withMetadata === 'boolean') ? withMetadata : true; this.options.withMetadata = (typeof withMetadata === 'boolean') ? withMetadata : true;
return this; return this;
@@ -506,3 +580,10 @@ module.exports.concurrency = function(concurrency) {
module.exports.counters = function() { module.exports.counters = function() {
return sharp.counters(); return sharp.counters();
}; };
/*
Get the version of the libvips library
*/
module.exports.libvipsVersion = function() {
return libvipsVersion;
};

View File

@@ -1,6 +1,6 @@
{ {
"name": "sharp", "name": "sharp",
"version": "0.7.2", "version": "0.8.3",
"author": "Lovell Fuller <npm@lovell.info>", "author": "Lovell Fuller <npm@lovell.info>",
"contributors": [ "contributors": [
"Pierre Inglebert <pierre.inglebert@gmail.com>", "Pierre Inglebert <pierre.inglebert@gmail.com>",
@@ -13,7 +13,7 @@
"Brandon Aaron <hello.brandon@aaron.sh>", "Brandon Aaron <hello.brandon@aaron.sh>",
"Andreas Lind <andreas@one.com>" "Andreas Lind <andreas@one.com>"
], ],
"description": "High performance Node.js module to resize JPEG, PNG and WebP images using the libvips library", "description": "High performance Node.js module to resize JPEG, PNG, WebP and TIFF images using the libvips library",
"scripts": { "scripts": {
"test": "node ./node_modules/istanbul/lib/cli.js cover ./node_modules/mocha/bin/_mocha -- --slow=5000 --timeout=10000 ./test/unit/*.js" "test": "node ./node_modules/istanbul/lib/cli.js cover ./node_modules/mocha/bin/_mocha -- --slow=5000 --timeout=10000 ./test/unit/*.js"
}, },
@@ -27,28 +27,22 @@
"png", "png",
"webp", "webp",
"tiff", "tiff",
"gif",
"resize", "resize",
"thumbnail", "thumbnail",
"sharpen",
"crop", "crop",
"extract",
"embed",
"libvips", "libvips",
"vips", "vips"
"fast",
"buffer",
"stream"
], ],
"dependencies": { "dependencies": {
"bluebird": "^2.3.10", "bluebird": "^2.3.11",
"color": "^0.7.1", "color": "^0.7.3",
"nan": "^1.4.0" "nan": "^1.4.1",
"semver": "^4.1.0"
}, },
"devDependencies": { "devDependencies": {
"mocha": "^2.0.1", "mocha": "^2.0.1",
"mocha-jshint": "^0.0.9", "mocha-jshint": "^0.0.9",
"istanbul": "^0.3.2", "istanbul": "^0.3.5",
"coveralls": "^2.11.2" "coveralls": "^2.11.2"
}, },
"license": "Apache 2.0", "license": "Apache 2.0",

View File

@@ -5,22 +5,23 @@
# * Mac OS # * Mac OS
# * Debian Linux # * Debian Linux
# * Debian 7, 8 # * Debian 7, 8
# * Ubuntu 12.04, 14.04, 14.10 # * Ubuntu 12.04, 14.04, 14.10, 15.04
# * Mint 13, 17 # * Mint 13, 17
# * Red Hat Linux # * Red Hat Linux
# * RHEL/Centos/Scientific 6, 7 # * RHEL/Centos/Scientific 6, 7
# * Fedora 21, 22 # * Fedora 21, 22
# * Amazon Linux 2014.09
vips_version_minimum=7.38.5 vips_version_minimum=7.40.0
vips_version_latest_major=7.40 vips_version_latest_major=7.42
vips_version_latest_minor=11 vips_version_latest_minor=0
install_libvips_from_source() { install_libvips_from_source() {
echo "Compiling libvips $vips_version_latest_major.$vips_version_latest_minor from source" echo "Compiling libvips $vips_version_latest_major.$vips_version_latest_minor from source"
curl -O http://www.vips.ecs.soton.ac.uk/supported/$vips_version_latest_major/vips-$vips_version_latest_major.$vips_version_latest_minor.tar.gz curl -O http://www.vips.ecs.soton.ac.uk/supported/$vips_version_latest_major/vips-$vips_version_latest_major.$vips_version_latest_minor.tar.gz
tar zvxf vips-$vips_version_latest_major.$vips_version_latest_minor.tar.gz tar zvxf vips-$vips_version_latest_major.$vips_version_latest_minor.tar.gz
cd vips-$vips_version_latest_major.$vips_version_latest_minor cd vips-$vips_version_latest_major.$vips_version_latest_minor
./configure --enable-debug=no --enable-docs=no --enable-cxx=yes --without-python --without-orc --without-fftw $1 ./configure --enable-debug=no --enable-docs=no --enable-cxx=yes --without-python --without-orc --without-fftw --without-gsf $1
make make
make install make install
cd .. cd ..
@@ -86,17 +87,23 @@ case $(uname -s) in
DISTRO=$(lsb_release -c -s) DISTRO=$(lsb_release -c -s)
echo "Detected Debian Linux '$DISTRO'" echo "Detected Debian Linux '$DISTRO'"
case "$DISTRO" in case "$DISTRO" in
jessie|trusty|utopic|qiana) jessie|vivid)
# Debian 8, Ubuntu 14, Mint 17 # Debian 8, Ubuntu 15
echo "Installing libvips via apt-get" echo "Installing libvips via apt-get"
apt-get install -y libvips-dev apt-get install -y libvips-dev
;; ;;
trusty|utopic|qiana|rebecca)
# Ubuntu 14, Mint 17
echo "Installing libvips dependencies via apt-get"
apt-get install -y automake build-essential gobject-introspection gtk-doc-tools libglib2.0-dev libjpeg-turbo8-dev libpng12-dev libwebp-dev libtiff5-dev libexif-dev liblcms2-dev libxml2-dev swig libmagickwand-dev curl
install_libvips_from_source
;;
precise|wheezy|maya) precise|wheezy|maya)
# Debian 7, Ubuntu 12.04, Mint 13 # Debian 7, Ubuntu 12.04, Mint 13
echo "Installing libvips dependencies via apt-get" echo "Installing libvips dependencies via apt-get"
add-apt-repository -y ppa:lyrasis/precise-backports add-apt-repository -y ppa:lyrasis/precise-backports
apt-get update apt-get update
apt-get install -y automake build-essential gobject-introspection gtk-doc-tools libglib2.0-dev libjpeg-turbo8-dev libpng12-dev libwebp-dev libtiff4-dev libexif-dev libxml2-dev swig libmagickwand-dev curl apt-get install -y automake build-essential gobject-introspection gtk-doc-tools libglib2.0-dev libjpeg-turbo8-dev libpng12-dev libwebp-dev libtiff4-dev libexif-dev liblcms2-dev libxml2-dev swig libmagickwand-dev curl
install_libvips_from_source install_libvips_from_source
;; ;;
*) *)
@@ -113,14 +120,14 @@ case $(uname -s) in
# RHEL/CentOS 7 # RHEL/CentOS 7
echo "Installing libvips dependencies via yum" echo "Installing libvips dependencies via yum"
yum groupinstall -y "Development Tools" yum groupinstall -y "Development Tools"
yum install -y gtk-doc libxml2-devel libjpeg-turbo-devel libpng-devel libtiff-devel libexif-devel ImageMagick-devel gobject-introspection-devel libwebp-devel curl yum install -y gtk-doc libxml2-devel libjpeg-turbo-devel libpng-devel libtiff-devel libexif-devel lcms-devel ImageMagick-devel gobject-introspection-devel libwebp-devel curl
install_libvips_from_source "--prefix=/usr" install_libvips_from_source "--prefix=/usr"
;; ;;
"Red Hat Enterprise Linux release 6."*|"CentOS release 6."*|"Scientific Linux release 6."*) "Red Hat Enterprise Linux release 6."*|"CentOS release 6."*|"Scientific Linux release 6."*)
# RHEL/CentOS 6 # RHEL/CentOS 6
echo "Installing libvips dependencies via yum" echo "Installing libvips dependencies via yum"
yum groupinstall -y "Development Tools" yum groupinstall -y "Development Tools"
yum install -y gtk-doc libxml2-devel libjpeg-turbo-devel libpng-devel libtiff-devel libexif-devel ImageMagick-devel curl yum install -y gtk-doc libxml2-devel libjpeg-turbo-devel libpng-devel libtiff-devel libexif-devel lcms-devel ImageMagick-devel curl
yum install -y http://li.nux.ro/download/nux/dextop/el6/x86_64/nux-dextop-release-0-2.el6.nux.noarch.rpm yum install -y http://li.nux.ro/download/nux/dextop/el6/x86_64/nux-dextop-release-0-2.el6.nux.noarch.rpm
yum install -y --enablerepo=nux-dextop gobject-introspection-devel yum install -y --enablerepo=nux-dextop gobject-introspection-devel
yum install -y http://rpms.famillecollet.com/enterprise/remi-release-6.rpm yum install -y http://rpms.famillecollet.com/enterprise/remi-release-6.rpm
@@ -137,6 +144,19 @@ case $(uname -s) in
sorry "$RELEASE" sorry "$RELEASE"
;; ;;
esac esac
elif [ -f /etc/system-release ]; then
# Probably Amazon Linux
RELEASE=$(cat /etc/system-release)
case $RELEASE in
"Amazon Linux AMI release 2014.09")
# Amazon Linux
echo "Detected '$RELEASE'"
echo "Installing libvips dependencies via yum"
yum groupinstall -y "Development Tools"
yum install -y gtk-doc libxml2-devel libjpeg-turbo-devel libpng-devel libtiff-devel libexif-devel lcms-devel ImageMagick-devel gobject-introspection-devel libwebp-devel curl
install_libvips_from_source "--prefix=/usr"
;;
esac
else else
# Unsupported OS # Unsupported OS
sorry "$(uname -a)" sorry "$(uname -a)"

View File

@@ -4,96 +4,162 @@
#include "common.h" #include "common.h"
// How many tasks are in the queue? namespace sharp {
volatile int counter_queue = 0;
// How many tasks are being processed? // How many tasks are in the queue?
volatile int counter_process = 0; volatile int counterQueue = 0;
// Filename extension checkers // How many tasks are being processed?
static bool ends_with(std::string const &str, std::string const &end) { volatile int counterProcess = 0;
return str.length() >= end.length() && 0 == str.compare(str.length() - end.length(), end.length(), end);
}
bool is_jpeg(std::string const &str) {
return ends_with(str, ".jpg") || ends_with(str, ".jpeg") || ends_with(str, ".JPG") || ends_with(str, ".JPEG");
}
bool is_png(std::string const &str) {
return ends_with(str, ".png") || ends_with(str, ".PNG");
}
bool is_webp(std::string const &str) {
return ends_with(str, ".webp") || ends_with(str, ".WEBP");
}
bool is_tiff(std::string const &str) {
return ends_with(str, ".tif") || ends_with(str, ".tiff") || ends_with(str, ".TIF") || ends_with(str, ".TIFF");
}
unsigned char const MARKER_JPEG[] = {0xff, 0xd8}; // Filename extension checkers
unsigned char const MARKER_PNG[] = {0x89, 0x50}; static bool EndsWith(std::string const &str, std::string const &end) {
unsigned char const MARKER_WEBP[] = {0x52, 0x49}; return str.length() >= end.length() && 0 == str.compare(str.length() - end.length(), end.length(), end);
/*
Initialise a VipsImage from a buffer. Supports JPEG, PNG and WebP.
Returns the ImageType detected, if any.
*/
ImageType
sharp_init_image_from_buffer(VipsImage **image, void *buffer, size_t const length, VipsAccess const access) {
ImageType imageType = UNKNOWN;
if (memcmp(MARKER_JPEG, buffer, 2) == 0) {
if (!vips_jpegload_buffer(buffer, length, image, "access", access, NULL)) {
imageType = JPEG;
}
} else if(memcmp(MARKER_PNG, buffer, 2) == 0) {
if (!vips_pngload_buffer(buffer, length, image, "access", access, NULL)) {
imageType = PNG;
}
} else if(memcmp(MARKER_WEBP, buffer, 2) == 0) {
if (!vips_webpload_buffer(buffer, length, image, "access", access, NULL)) {
imageType = WEBP;
}
} }
return imageType; bool IsJpeg(std::string const &str) {
} return EndsWith(str, ".jpg") || EndsWith(str, ".jpeg") || EndsWith(str, ".JPG") || EndsWith(str, ".JPEG");
}
/* bool IsPng(std::string const &str) {
Initialise a VipsImage from a file. return EndsWith(str, ".png") || EndsWith(str, ".PNG");
Returns the ImageType detected, if any. }
*/ bool IsWebp(std::string const &str) {
ImageType return EndsWith(str, ".webp") || EndsWith(str, ".WEBP");
sharp_init_image_from_file(VipsImage **image, char const *file, VipsAccess const access) { }
ImageType imageType = UNKNOWN; bool IsTiff(std::string const &str) {
if (vips_foreign_is_a("jpegload", file)) { return EndsWith(str, ".tif") || EndsWith(str, ".tiff") || EndsWith(str, ".TIF") || EndsWith(str, ".TIFF");
if (!vips_jpegload(file, image, "access", access, NULL)) {
imageType = JPEG;
}
} else if (vips_foreign_is_a("pngload", file)) {
if (!vips_pngload(file, image, "access", access, NULL)) {
imageType = PNG;
}
} else if (vips_foreign_is_a("webpload", file)) {
if (!vips_webpload(file, image, "access", access, NULL)) {
imageType = WEBP;
}
} else if (vips_foreign_is_a("tiffload", file)) {
if (!vips_tiffload(file, image, "access", access, NULL)) {
imageType = TIFF;
}
} else if(vips_foreign_is_a("magickload", file)) {
if (!vips_magickload(file, image, "access", access, NULL)) {
imageType = MAGICK;
}
} }
return imageType;
}
/* // Buffer content checkers
Does this image have an alpha channel? unsigned char const MARKER_JPEG[] = {0xff, 0xd8};
Uses colour space interpretation with number of channels to guess this. unsigned char const MARKER_PNG[] = {0x89, 0x50};
*/ unsigned char const MARKER_WEBP[] = {0x52, 0x49};
bool
sharp_image_has_alpha(VipsImage *image) { static bool buffer_is_tiff(char *buffer, size_t len) {
return ( return (
(image->Bands == 2 && image->Type == VIPS_INTERPRETATION_B_W) || len >= 4 && (
(image->Bands == 4 && image->Type != VIPS_INTERPRETATION_CMYK) || (buffer[0] == 'M' && buffer[1] == 'M' && buffer[2] == '\0' && (buffer[3] == '*' || buffer[3] == '+')) ||
(image->Bands == 5 && image->Type == VIPS_INTERPRETATION_CMYK) (buffer[0] == 'I' && buffer[1] == 'I' && (buffer[2] == '*' || buffer[2] == '+') && buffer[3] == '\0')
); )
} );
}
/*
Determine image format of a buffer.
*/
ImageType DetermineImageType(void *buffer, size_t const length) {
ImageType imageType = ImageType::UNKNOWN;
if (length >= 4) {
if (memcmp(MARKER_JPEG, buffer, 2) == 0) {
imageType = ImageType::JPEG;
} else if (memcmp(MARKER_PNG, buffer, 2) == 0) {
imageType = ImageType::PNG;
} else if (memcmp(MARKER_WEBP, buffer, 2) == 0) {
imageType = ImageType::WEBP;
} else if (buffer_is_tiff(static_cast<char*>(buffer), length)) {
imageType = ImageType::TIFF;
}
}
return imageType;
}
/*
Initialise and return a VipsImage from a buffer. Supports JPEG, PNG, WebP and TIFF.
*/
VipsImage* InitImage(ImageType imageType, void *buffer, size_t const length, VipsAccess const access) {
VipsImage *image = NULL;
if (imageType == ImageType::JPEG) {
vips_jpegload_buffer(buffer, length, &image, "access", access, NULL);
} else if (imageType == ImageType::PNG) {
vips_pngload_buffer(buffer, length, &image, "access", access, NULL);
} else if (imageType == ImageType::WEBP) {
vips_webpload_buffer(buffer, length, &image, "access", access, NULL);
} else if (imageType == ImageType::TIFF) {
vips_tiffload_buffer(buffer, length, &image, "access", access, NULL);
}
return image;
}
/*
Inpect the first 2-4 bytes of a file to determine image format
*/
ImageType DetermineImageType(char const *file) {
ImageType imageType = ImageType::UNKNOWN;
if (vips_foreign_is_a("jpegload", file)) {
imageType = ImageType::JPEG;
} else if (vips_foreign_is_a("pngload", file)) {
imageType = ImageType::PNG;
} else if (vips_foreign_is_a("webpload", file)) {
imageType = ImageType::WEBP;
} else if (vips_foreign_is_a("tiffload", file)) {
imageType = ImageType::TIFF;
} else if(vips_foreign_is_a("magickload", file)) {
imageType = ImageType::MAGICK;
}
return imageType;
}
/*
Initialise and return a VipsImage from a file.
*/
VipsImage* InitImage(ImageType imageType, char const *file, VipsAccess const access) {
VipsImage *image = NULL;
if (imageType == ImageType::JPEG) {
vips_jpegload(file, &image, "access", access, NULL);
} else if (imageType == ImageType::PNG) {
vips_pngload(file, &image, "access", access, NULL);
} else if (imageType == ImageType::WEBP) {
vips_webpload(file, &image, "access", access, NULL);
} else if (imageType == ImageType::TIFF) {
vips_tiffload(file, &image, "access", access, NULL);
} else if (imageType == ImageType::MAGICK) {
vips_magickload(file, &image, "access", access, NULL);
}
return image;
}
/*
Does this image have an embedded profile?
*/
bool HasProfile(VipsImage *image) {
return (vips_image_get_typeof(image, VIPS_META_ICC_NAME) > 0) ? TRUE : FALSE;
}
/*
Does this image have an alpha channel?
Uses colour space interpretation with number of channels to guess this.
*/
bool HasAlpha(VipsImage *image) {
return (
(image->Bands == 2 && image->Type == VIPS_INTERPRETATION_B_W) ||
(image->Bands == 4 && image->Type != VIPS_INTERPRETATION_CMYK) ||
(image->Bands == 5 && image->Type == VIPS_INTERPRETATION_CMYK)
);
}
/*
Get EXIF Orientation of image, if any.
*/
int ExifOrientation(VipsImage const *image) {
int orientation = 0;
const char *exif;
if (
vips_image_get_typeof(image, "exif-ifd0-Orientation") != 0 &&
!vips_image_get_string(image, "exif-ifd0-Orientation", &exif)
) {
orientation = atoi(&exif[0]);
}
return orientation;
}
/*
Returns the window size for the named interpolator. For example,
a window size of 3 means a 3x3 pixel grid is used for the calculation.
*/
int InterpolatorWindowSize(char const *name) {
VipsInterpolate *interpolator = vips_interpolate_new(name);
int window_size = vips_interpolate_get_window_size(interpolator);
g_object_unref(interpolator);
return window_size;
}
} // namespace

View File

@@ -1,46 +1,71 @@
#ifndef SHARP_COMMON_H #ifndef SHARP_COMMON_H
#define SHARP_COMMON_H #define SHARP_COMMON_H
typedef enum { namespace sharp {
UNKNOWN,
JPEG,
PNG,
WEBP,
TIFF,
MAGICK
} ImageType;
// Filename extension checkers enum class ImageType {
bool is_jpeg(std::string const &str); UNKNOWN,
bool is_png(std::string const &str); JPEG,
bool is_webp(std::string const &str); PNG,
bool is_tiff(std::string const &str); WEBP,
TIFF,
MAGICK
};
// How many tasks are in the queue? // How many tasks are in the queue?
extern volatile int counter_queue; extern volatile int counterQueue;
// How many tasks are being processed? // How many tasks are being processed?
extern volatile int counter_process; extern volatile int counterProcess;
/* // Filename extension checkers
Initialise a VipsImage from a buffer. Supports JPEG, PNG and WebP. bool IsJpeg(std::string const &str);
Returns the ImageType detected, if any. bool IsPng(std::string const &str);
*/ bool IsWebp(std::string const &str);
ImageType bool IsTiff(std::string const &str);
sharp_init_image_from_buffer(VipsImage **image, void *buffer, size_t const length, VipsAccess const access);
/* /*
Initialise a VipsImage from a file. Determine image format of a buffer.
Returns the ImageType detected, if any. */
*/ ImageType DetermineImageType(void *buffer, size_t const length);
ImageType
sharp_init_image_from_file(VipsImage **image, char const *file, VipsAccess const access);
/* /*
Does this image have an alpha channel? Determine image format of a file.
Uses colour space interpretation with number of channels to guess this. */
*/ ImageType DetermineImageType(char const *file);
bool
sharp_image_has_alpha(VipsImage *image); /*
Initialise and return a VipsImage from a buffer. Supports JPEG, PNG, WebP and TIFF.
*/
VipsImage* InitImage(ImageType imageType, void *buffer, size_t const length, VipsAccess const access);
/*
Initialise and return a VipsImage from a file.
*/
VipsImage* InitImage(ImageType imageType, char const *file, VipsAccess const access);
/*
Does this image have an embedded profile?
*/
bool HasProfile(VipsImage *image);
/*
Does this image have an alpha channel?
Uses colour space interpretation with number of channels to guess this.
*/
bool HasAlpha(VipsImage *image);
/*
Get EXIF Orientation of image, if any.
*/
int ExifOrientation(VipsImage const *image);
/*
Returns the window size for the named interpolator. For example,
a window size of 3 means a 3x3 pixel grid is used for the calculation.
*/
int InterpolatorWindowSize(char const *name);
} // namespace
#endif #endif

View File

@@ -7,6 +7,7 @@
#include "metadata.h" #include "metadata.h"
using namespace v8; using namespace v8;
using namespace sharp;
struct MetadataBaton { struct MetadataBaton {
// Input // Input
@@ -19,6 +20,7 @@ struct MetadataBaton {
int height; int height;
std::string space; std::string space;
int channels; int channels;
bool hasProfile;
bool hasAlpha; bool hasAlpha;
int orientation; int orientation;
std::string err; std::string err;
@@ -36,49 +38,50 @@ class MetadataWorker : public NanAsyncWorker {
void Execute() { void Execute() {
// Decrement queued task counter // Decrement queued task counter
g_atomic_int_dec_and_test(&counter_queue); g_atomic_int_dec_and_test(&counterQueue);
ImageType imageType = UNKNOWN; ImageType imageType = ImageType::UNKNOWN;
VipsImage *image; VipsImage *image = NULL;
if (baton->bufferInLength > 1) { if (baton->bufferInLength > 1) {
// From buffer // From buffer
imageType = sharp_init_image_from_buffer(&image, baton->bufferIn, baton->bufferInLength, VIPS_ACCESS_RANDOM); imageType = DetermineImageType(baton->bufferIn, baton->bufferInLength);
if (imageType == UNKNOWN) { if (imageType != ImageType::UNKNOWN) {
image = InitImage(imageType, baton->bufferIn, baton->bufferInLength, VIPS_ACCESS_RANDOM);
} else {
(baton->err).append("Input buffer contains unsupported image format"); (baton->err).append("Input buffer contains unsupported image format");
} }
} else { } else {
// From file // From file
imageType = sharp_init_image_from_file(&image, baton->fileIn.c_str(), VIPS_ACCESS_RANDOM); imageType = DetermineImageType(baton->fileIn.c_str());
if (imageType == UNKNOWN) { if (imageType != ImageType::UNKNOWN) {
image = InitImage(imageType, baton->fileIn.c_str(), VIPS_ACCESS_RANDOM);
} else {
(baton->err).append("File is of an unsupported image format"); (baton->err).append("File is of an unsupported image format");
} }
} }
if (imageType != UNKNOWN) { if (image != NULL && imageType != ImageType::UNKNOWN) {
// Image type // Image type
switch (imageType) { switch (imageType) {
case JPEG: baton->format = "jpeg"; break; case ImageType::JPEG: baton->format = "jpeg"; break;
case PNG: baton->format = "png"; break; case ImageType::PNG: baton->format = "png"; break;
case WEBP: baton->format = "webp"; break; case ImageType::WEBP: baton->format = "webp"; break;
case TIFF: baton->format = "tiff"; break; case ImageType::TIFF: baton->format = "tiff"; break;
case MAGICK: baton->format = "magick"; break; case ImageType::MAGICK: baton->format = "magick"; break;
case UNKNOWN: default: baton->format = ""; case ImageType::UNKNOWN: break;
} }
// VipsImage attributes // VipsImage attributes
baton->width = image->Xsize; baton->width = image->Xsize;
baton->height = image->Ysize; baton->height = image->Ysize;
baton->space = vips_enum_nick(VIPS_TYPE_INTERPRETATION, image->Type); baton->space = vips_enum_nick(VIPS_TYPE_INTERPRETATION, image->Type);
baton->channels = image->Bands; baton->channels = image->Bands;
baton->hasAlpha = sharp_image_has_alpha(image); baton->hasProfile = HasProfile(image);
// EXIF Orientation // Derived attributes
const char *exif; baton->hasAlpha = HasAlpha(image);
if (!vips_image_get_string(image, "exif-ifd0-Orientation", &exif)) { baton->orientation = ExifOrientation(image);
baton->orientation = atoi(&exif[0]); // Drop image reference
}
}
// Clean up
if (imageType != UNKNOWN) {
g_object_unref(image); g_object_unref(image);
} }
// Clean up
vips_error_clear(); vips_error_clear();
vips_thread_shutdown(); vips_thread_shutdown();
} }
@@ -98,6 +101,7 @@ class MetadataWorker : public NanAsyncWorker {
info->Set(NanNew<String>("height"), NanNew<Number>(baton->height)); info->Set(NanNew<String>("height"), NanNew<Number>(baton->height));
info->Set(NanNew<String>("space"), NanNew<String>(baton->space)); info->Set(NanNew<String>("space"), NanNew<String>(baton->space));
info->Set(NanNew<String>("channels"), NanNew<Number>(baton->channels)); info->Set(NanNew<String>("channels"), NanNew<Number>(baton->channels));
info->Set(NanNew<String>("hasProfile"), NanNew<Boolean>(baton->hasProfile));
info->Set(NanNew<String>("hasAlpha"), NanNew<Boolean>(baton->hasAlpha)); info->Set(NanNew<String>("hasAlpha"), NanNew<Boolean>(baton->hasAlpha));
if (baton->orientation > 0) { if (baton->orientation > 0) {
info->Set(NanNew<String>("orientation"), NanNew<Number>(baton->orientation)); info->Set(NanNew<String>("orientation"), NanNew<Number>(baton->orientation));
@@ -138,7 +142,7 @@ NAN_METHOD(metadata) {
NanAsyncQueueWorker(new MetadataWorker(callback, baton)); NanAsyncQueueWorker(new MetadataWorker(callback, baton));
// Increment queued task counter // Increment queued task counter
g_atomic_int_inc(&counter_queue); g_atomic_int_inc(&counterQueue);
NanReturnUndefined(); NanReturnUndefined();
} }

View File

@@ -11,26 +11,27 @@
#include "resize.h" #include "resize.h"
using namespace v8; using namespace v8;
using namespace sharp;
typedef enum { enum class Canvas {
CROP, CROP,
MAX, MAX,
EMBED EMBED
} Canvas; };
typedef enum { enum class Angle {
ANGLE_0, D0,
ANGLE_90, D90,
ANGLE_180, D180,
ANGLE_270, D270,
ANGLE_LAST DLAST
} Angle; };
struct ResizeBaton { struct ResizeBaton {
std::string fileIn; std::string fileIn;
void* bufferIn; void* bufferIn;
size_t bufferInLength; size_t bufferInLength;
std::string iccProfileCmyk; std::string iccProfilePath;
std::string output; std::string output;
std::string outputFormat; std::string outputFormat;
void* bufferOut; void* bufferOut;
@@ -50,7 +51,10 @@ struct ResizeBaton {
std::string interpolator; std::string interpolator;
double background[4]; double background[4];
bool flatten; bool flatten;
bool sharpen; double blurSigma;
int sharpenRadius;
double sharpenFlat;
double sharpenJagged;
double gamma; double gamma;
bool greyscale; bool greyscale;
int angle; int angle;
@@ -61,6 +65,7 @@ struct ResizeBaton {
VipsAccess accessMethod; VipsAccess accessMethod;
int quality; int quality;
int compressionLevel; int compressionLevel;
bool withoutAdaptiveFiltering;
std::string err; std::string err;
bool withMetadata; bool withMetadata;
@@ -70,16 +75,23 @@ struct ResizeBaton {
bufferOutLength(0), bufferOutLength(0),
topOffsetPre(-1), topOffsetPre(-1),
topOffsetPost(-1), topOffsetPost(-1),
canvas(CROP), canvas(Canvas::CROP),
gravity(0), gravity(0),
flatten(false), flatten(false),
sharpen(false), blurSigma(0.0),
sharpenRadius(0),
sharpenFlat(1.0),
sharpenJagged(2.0),
gamma(0.0), gamma(0.0),
greyscale(false), greyscale(false),
angle(0),
flip(false), flip(false),
flop(false), flop(false),
progressive(false), progressive(false),
withoutEnlargement(false), withoutEnlargement(false),
quality(80),
compressionLevel(6),
withoutAdaptiveFiltering(false),
withMetadata(false) { withMetadata(false) {
background[0] = 0.0; background[0] = 0.0;
background[1] = 0.0; background[1] = 0.0;
@@ -99,43 +111,48 @@ class ResizeWorker : public NanAsyncWorker {
*/ */
void Execute() { void Execute() {
// Decrement queued task counter // Decrement queued task counter
g_atomic_int_dec_and_test(&counter_queue); g_atomic_int_dec_and_test(&counterQueue);
// Increment processing task counter // Increment processing task counter
g_atomic_int_inc(&counter_process); g_atomic_int_inc(&counterProcess);
// Latest v2 sRGB ICC profile
std::string srgbProfile = baton->iccProfilePath + "sRGB_IEC61966-2-1_black_scaled.icc";
// Hang image references from this hook object // Hang image references from this hook object
VipsObject *hook = reinterpret_cast<VipsObject*>(vips_image_new()); VipsObject *hook = reinterpret_cast<VipsObject*>(vips_image_new());
// Input // Input
ImageType inputImageType = UNKNOWN; ImageType inputImageType = ImageType::UNKNOWN;
VipsImage *image = vips_image_new(); VipsImage *image;
vips_object_local(hook, image);
if (baton->bufferInLength > 1) { if (baton->bufferInLength > 1) {
// From buffer // From buffer
inputImageType = sharp_init_image_from_buffer(&image, baton->bufferIn, baton->bufferInLength, baton->accessMethod); inputImageType = DetermineImageType(baton->bufferIn, baton->bufferInLength);
if (inputImageType == UNKNOWN) { if (inputImageType != ImageType::UNKNOWN) {
image = InitImage(inputImageType, baton->bufferIn, baton->bufferInLength, baton->accessMethod);
} else {
(baton->err).append("Input buffer contains unsupported image format"); (baton->err).append("Input buffer contains unsupported image format");
} }
} else { } else {
// From file // From file
inputImageType = sharp_init_image_from_file(&image, baton->fileIn.c_str(), baton->accessMethod); inputImageType = DetermineImageType(baton->fileIn.c_str());
if (inputImageType == UNKNOWN) { if (inputImageType != ImageType::UNKNOWN) {
image = InitImage(inputImageType, baton->fileIn.c_str(), baton->accessMethod);
} else {
(baton->err).append("File is of an unsupported image format"); (baton->err).append("File is of an unsupported image format");
} }
} }
if (inputImageType == UNKNOWN) { if (inputImageType == ImageType::UNKNOWN) {
return Error(baton, hook); return Error(baton, hook);
} }
vips_object_local(hook, image);
// Pre extraction // Pre extraction
if (baton->topOffsetPre != -1) { if (baton->topOffsetPre != -1) {
VipsImage *extractedPre = vips_image_new(); VipsImage *extractedPre;
vips_object_local(hook, extractedPre);
if (vips_extract_area(image, &extractedPre, baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre, NULL)) { if (vips_extract_area(image, &extractedPre, baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, extractedPre);
image = extractedPre; image = extractedPre;
} }
@@ -147,7 +164,7 @@ class ResizeWorker : public NanAsyncWorker {
Angle rotation; Angle rotation;
bool flip; bool flip;
std::tie(rotation, flip) = CalculateRotationAndFlip(baton->angle, image); std::tie(rotation, flip) = CalculateRotationAndFlip(baton->angle, image);
if (rotation == ANGLE_90 || rotation == ANGLE_270) { if (rotation == Angle::D90 || rotation == Angle::D270) {
// Swap input output width and height when rotating by 90 or 270 degrees // Swap input output width and height when rotating by 90 or 270 degrees
int swap = inputWidth; int swap = inputWidth;
inputWidth = inputHeight; inputWidth = inputHeight;
@@ -158,15 +175,18 @@ class ResizeWorker : public NanAsyncWorker {
baton->flip = TRUE; baton->flip = TRUE;
} }
// Get window size of interpolator, used for determining shrink vs affine
int interpolatorWindowSize = InterpolatorWindowSize(baton->interpolator.c_str());
// Scaling calculations // Scaling calculations
double factor; double factor;
if (baton->width > 0 && baton->height > 0) { if (baton->width > 0 && baton->height > 0) {
// Fixed width and height // Fixed width and height
double xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width); double xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width);
double yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height); double yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height);
factor = (baton->canvas == CROP) ? std::min(xfactor, yfactor) : std::max(xfactor, yfactor); factor = (baton->canvas == Canvas::CROP) ? std::min(xfactor, yfactor) : std::max(xfactor, yfactor);
// if max is set, we need to compute the real size of the thumb image // if max is set, we need to compute the real size of the thumb image
if (baton->canvas == MAX) { if (baton->canvas == Canvas::MAX) {
if (xfactor > yfactor) { if (xfactor > yfactor) {
baton->height = round(static_cast<double>(inputHeight) / xfactor); baton->height = round(static_cast<double>(inputHeight) / xfactor);
} else { } else {
@@ -187,10 +207,20 @@ class ResizeWorker : public NanAsyncWorker {
baton->width = inputWidth; baton->width = inputWidth;
baton->height = inputHeight; baton->height = inputHeight;
} }
int shrink = floor(factor);
// Calculate integral box shrink
int shrink = 1;
if (factor >= 2 && interpolatorWindowSize > 3) {
// Shrink less, affine more with interpolators that use at least 4x4 pixel window, e.g. bicubic
shrink = floor(factor * 3.0 / interpolatorWindowSize);
} else {
shrink = floor(factor);
}
if (shrink < 1) { if (shrink < 1) {
shrink = 1; shrink = 1;
} }
// Calculate residual float affine transformation
double residual = static_cast<double>(shrink) / factor; double residual = static_cast<double>(shrink) / factor;
// Do not enlarge the output if the input width *or* height are already less than the required dimensions // Do not enlarge the output if the input width *or* height are already less than the required dimensions
@@ -206,7 +236,7 @@ class ResizeWorker : public NanAsyncWorker {
// Try to use libjpeg shrink-on-load, but not when applying gamma correction or pre-resize extract // Try to use libjpeg shrink-on-load, but not when applying gamma correction or pre-resize extract
int shrink_on_load = 1; int shrink_on_load = 1;
if (inputImageType == JPEG && baton->gamma == 0 && baton->topOffsetPre == -1) { if (inputImageType == ImageType::JPEG && shrink >= 2 && baton->gamma == 0 && baton->topOffsetPre == -1) {
if (shrink >= 8) { if (shrink >= 8) {
factor = factor / 8; factor = factor / 8;
shrink_on_load = 8; shrink_on_load = 8;
@@ -221,55 +251,49 @@ class ResizeWorker : public NanAsyncWorker {
if (shrink_on_load > 1) { if (shrink_on_load > 1) {
// Recalculate integral shrink and double residual // Recalculate integral shrink and double residual
factor = std::max(factor, 1.0); factor = std::max(factor, 1.0);
shrink = floor(factor); if (factor >= 2 && interpolatorWindowSize > 3) {
shrink = floor(factor * 3.0 / interpolatorWindowSize);
} else {
shrink = floor(factor);
}
residual = static_cast<double>(shrink) / factor; residual = static_cast<double>(shrink) / factor;
// Reload input using shrink-on-load // Reload input using shrink-on-load
g_object_unref(image); VipsImage *shrunkOnLoad;
if (baton->bufferInLength > 1) { if (baton->bufferInLength > 1) {
if (vips_jpegload_buffer(baton->bufferIn, baton->bufferInLength, &image, "shrink", shrink_on_load, NULL)) { if (vips_jpegload_buffer(baton->bufferIn, baton->bufferInLength, &shrunkOnLoad, "shrink", shrink_on_load, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
} else { } else {
if (vips_jpegload((baton->fileIn).c_str(), &image, "shrink", shrink_on_load, NULL)) { if (vips_jpegload((baton->fileIn).c_str(), &shrunkOnLoad, "shrink", shrink_on_load, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
} }
vips_object_local(hook, shrunkOnLoad);
image = shrunkOnLoad;
} }
// Handle colour profile, if any, for non sRGB images // Ensure we're using a device-independent colour space
if (image->Type != VIPS_INTERPRETATION_sRGB) { if (HasProfile(image)) {
// Get the input colour profile // Convert to sRGB using embedded profile
if (vips_image_get_typeof(image, VIPS_META_ICC_NAME)) { VipsImage *transformed;
// Use embedded profile if (!vips_icc_transform(image, &transformed, srgbProfile.c_str(), "embedded", TRUE, NULL)) {
VipsImage *profile = vips_image_new(); // Embedded profile can fail, so only update references on success
vips_object_local(hook, profile); vips_object_local(hook, transformed);
if (vips_icc_import(image, &profile, "pcs", VIPS_PCS_XYZ, "embedded", TRUE, NULL)) { image = transformed;
return Error(baton, hook);
}
g_object_unref(image);
image = profile;
} else if (image->Type == VIPS_INTERPRETATION_CMYK) {
// CMYK with no embedded profile
VipsImage *profile = vips_image_new();
vips_object_local(hook, profile);
if (vips_icc_import(image, &profile, "pcs", VIPS_PCS_XYZ, "input_profile", (baton->iccProfileCmyk).c_str(), NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
image = profile;
} }
// Attempt to convert to sRGB colour space } else if (image->Type == VIPS_INTERPRETATION_CMYK) {
VipsImage *colourspaced = vips_image_new(); // Convert to sRGB using default "USWebCoatedSWOP" CMYK profile
vips_object_local(hook, colourspaced); std::string cmykProfile = baton->iccProfilePath + "USWebCoatedSWOP.icc";
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) { VipsImage *transformed;
if (vips_icc_transform(image, &transformed, srgbProfile.c_str(), "input_profile", cmykProfile.c_str(), NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, transformed);
image = colourspaced; image = transformed;
} }
// Flatten image to remove alpha channel // Flatten image to remove alpha channel
if (baton->flatten && sharp_image_has_alpha(image)) { if (baton->flatten && HasAlpha(image)) {
// Background colour // Background colour
VipsArrayDouble *background = vips_array_double_newv( VipsArrayDouble *background = vips_array_double_newv(
3, // Ignore alpha channel as we're about to remove it 3, // Ignore alpha channel as we're about to remove it
@@ -277,52 +301,48 @@ class ResizeWorker : public NanAsyncWorker {
baton->background[1], baton->background[1],
baton->background[2] baton->background[2]
); );
VipsImage *flattened = vips_image_new(); VipsImage *flattened;
vips_object_local(hook, flattened);
if (vips_flatten(image, &flattened, "background", background, NULL)) { if (vips_flatten(image, &flattened, "background", background, NULL)) {
vips_area_unref(reinterpret_cast<VipsArea*>(background)); vips_area_unref(reinterpret_cast<VipsArea*>(background));
return Error(baton, hook); return Error(baton, hook);
}; };
vips_area_unref(reinterpret_cast<VipsArea*>(background)); vips_area_unref(reinterpret_cast<VipsArea*>(background));
g_object_unref(image); vips_object_local(hook, flattened);
image = flattened; image = flattened;
} }
// Gamma encoding (darken) // Gamma encoding (darken)
if (baton->gamma >= 1 && baton->gamma <= 3) { if (baton->gamma >= 1 && baton->gamma <= 3) {
VipsImage *gammaEncoded = vips_image_new(); VipsImage *gammaEncoded;
vips_object_local(hook, gammaEncoded);
if (vips_gamma(image, &gammaEncoded, "exponent", 1.0 / baton->gamma, NULL)) { if (vips_gamma(image, &gammaEncoded, "exponent", 1.0 / baton->gamma, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, gammaEncoded);
image = gammaEncoded; image = gammaEncoded;
} }
// Convert to greyscale (linear, therefore after gamma encoding, if any) // Convert to greyscale (linear, therefore after gamma encoding, if any)
if (baton->greyscale) { if (baton->greyscale) {
VipsImage *greyscale = vips_image_new(); VipsImage *greyscale;
vips_object_local(hook, greyscale);
if (vips_colourspace(image, &greyscale, VIPS_INTERPRETATION_B_W, NULL)) { if (vips_colourspace(image, &greyscale, VIPS_INTERPRETATION_B_W, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, greyscale);
image = greyscale; image = greyscale;
} }
if (shrink > 1) { if (shrink > 1) {
VipsImage *shrunk = vips_image_new(); VipsImage *shrunk;
vips_object_local(hook, shrunk);
// Use vips_shrink with the integral reduction // Use vips_shrink with the integral reduction
if (vips_shrink(image, &shrunk, shrink, shrink, NULL)) { if (vips_shrink(image, &shrunk, shrink, shrink, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, shrunk);
image = shrunk; image = shrunk;
// Recalculate residual float based on dimensions of required vs shrunk images // Recalculate residual float based on dimensions of required vs shrunk images
double shrunkWidth = shrunk->Xsize; double shrunkWidth = shrunk->Xsize;
double shrunkHeight = shrunk->Ysize; double shrunkHeight = shrunk->Ysize;
if (rotation == ANGLE_90 || rotation == ANGLE_270) { if (rotation == Angle::D90 || rotation == Angle::D270) {
// Swap input output width and height when rotating by 90 or 270 degrees // Swap input output width and height when rotating by 90 or 270 degrees
int swap = shrunkWidth; int swap = shrunkWidth;
shrunkWidth = shrunkHeight; shrunkWidth = shrunkHeight;
@@ -330,7 +350,7 @@ class ResizeWorker : public NanAsyncWorker {
} }
double residualx = static_cast<double>(baton->width) / static_cast<double>(shrunkWidth); double residualx = static_cast<double>(baton->width) / static_cast<double>(shrunkWidth);
double residualy = static_cast<double>(baton->height) / static_cast<double>(shrunkHeight); double residualy = static_cast<double>(baton->height) / static_cast<double>(shrunkHeight);
if (baton->canvas == EMBED) { if (baton->canvas == Canvas::EMBED) {
residual = std::min(residualx, residualy); residual = std::min(residualx, residualy);
} else { } else {
residual = std::max(residualx, residualy); residual = std::max(residualx, residualy);
@@ -338,91 +358,102 @@ class ResizeWorker : public NanAsyncWorker {
} }
// Use vips_affine with the remaining float part // Use vips_affine with the remaining float part
if (residual != 0) { if (residual != 0.0) {
VipsImage *affined = vips_image_new(); // Apply Gaussian blur before large affine reductions
vips_object_local(hook, affined); if (residual < 1.0) {
// Calculate standard deviation
double sigma = ((1.0 / residual) - 0.4) / 3.0;
if (sigma >= 0.3) {
// Create Gaussian function for standard deviation
VipsImage *gaussian;
if (vips_gaussmat(&gaussian, sigma, 0.2, "separable", TRUE, "integer", TRUE, NULL)) {
return Error(baton, hook);
}
vips_object_local(hook, gaussian);
// Apply Gaussian function
VipsImage *blurred;
if (vips_convsep(image, &blurred, gaussian, "precision", VIPS_PRECISION_INTEGER, NULL)) {
return Error(baton, hook);
}
vips_object_local(hook, blurred);
image = blurred;
}
}
// Create interpolator - "bilinear" (default), "bicubic" or "nohalo" // Create interpolator - "bilinear" (default), "bicubic" or "nohalo"
VipsInterpolate *interpolator = vips_interpolate_new(baton->interpolator.c_str()); VipsInterpolate *interpolator = vips_interpolate_new(baton->interpolator.c_str());
vips_object_local(hook, interpolator);
// Perform affine transformation // Perform affine transformation
if (vips_affine(image, &affined, residual, 0, 0, residual, "interpolate", interpolator, NULL)) { VipsImage *affined;
g_object_unref(interpolator); if (vips_affine(image, &affined, residual, 0.0, 0.0, residual, "interpolate", interpolator, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(interpolator); vips_object_local(hook, affined);
g_object_unref(image);
image = affined; image = affined;
} }
// Rotate // Rotate
if (rotation != ANGLE_0) { if (rotation != Angle::D0) {
VipsImage *rotated = vips_image_new(); VipsImage *rotated;
vips_object_local(hook, rotated);
if (vips_rot(image, &rotated, static_cast<VipsAngle>(rotation), NULL)) { if (vips_rot(image, &rotated, static_cast<VipsAngle>(rotation), NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, rotated);
image = rotated; image = rotated;
} }
// Flip (mirror about Y axis) // Flip (mirror about Y axis)
if (baton->flip) { if (baton->flip) {
VipsImage *flipped = vips_image_new(); VipsImage *flipped;
vips_object_local(hook, flipped);
if (vips_flip(image, &flipped, VIPS_DIRECTION_VERTICAL, NULL)) { if (vips_flip(image, &flipped, VIPS_DIRECTION_VERTICAL, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, flipped);
image = flipped; image = flipped;
} }
// Flop (mirror about X axis) // Flop (mirror about X axis)
if (baton->flop) { if (baton->flop) {
VipsImage *flopped = vips_image_new(); VipsImage *flopped;
vips_object_local(hook, flopped);
if (vips_flip(image, &flopped, VIPS_DIRECTION_HORIZONTAL, NULL)) { if (vips_flip(image, &flopped, VIPS_DIRECTION_HORIZONTAL, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, flopped);
image = flopped; image = flopped;
} }
// Crop/embed // Crop/embed
if (image->Xsize != baton->width || image->Ysize != baton->height) { if (image->Xsize != baton->width || image->Ysize != baton->height) {
if (baton->canvas == EMBED) { if (baton->canvas == Canvas::EMBED) {
// Match background colour space, namely sRGB // Match background colour space, namely sRGB
if (image->Type != VIPS_INTERPRETATION_sRGB) { if (image->Type != VIPS_INTERPRETATION_sRGB) {
// Convert to sRGB colour space // Convert to sRGB colour space
VipsImage *colourspaced = vips_image_new(); VipsImage *colourspaced;
vips_object_local(hook, colourspaced);
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) { if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, colourspaced);
image = colourspaced; image = colourspaced;
} }
// Add non-transparent alpha channel, if required // Add non-transparent alpha channel, if required
if (baton->background[3] < 255.0 && !sharp_image_has_alpha(image)) { if (baton->background[3] < 255.0 && !HasAlpha(image)) {
// Create single-channel transparency // Create single-channel transparency
VipsImage *black = vips_image_new(); VipsImage *black;
vips_object_local(hook, black);
if (vips_black(&black, image->Xsize, image->Ysize, "bands", 1, NULL)) { if (vips_black(&black, image->Xsize, image->Ysize, "bands", 1, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
vips_object_local(hook, black);
// Invert to become non-transparent // Invert to become non-transparent
VipsImage *alpha = vips_image_new(); VipsImage *alpha;
vips_object_local(hook, alpha);
if (vips_invert(black, &alpha, NULL)) { if (vips_invert(black, &alpha, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(black); vips_object_local(hook, alpha);
// Append alpha channel to existing image // Append alpha channel to existing image
VipsImage *joined = vips_image_new(); VipsImage *joined;
vips_object_local(hook, joined);
if (vips_bandjoin2(image, alpha, &joined, NULL)) { if (vips_bandjoin2(image, alpha, &joined, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(alpha); vips_object_local(hook, joined);
g_object_unref(image);
image = joined; image = joined;
} }
// Create background // Create background
@@ -439,8 +470,7 @@ class ResizeWorker : public NanAsyncWorker {
// Embed // Embed
int left = (baton->width - image->Xsize) / 2; int left = (baton->width - image->Xsize) / 2;
int top = (baton->height - image->Ysize) / 2; int top = (baton->height - image->Ysize) / 2;
VipsImage *embedded = vips_image_new(); VipsImage *embedded;
vips_object_local(hook, embedded);
if (vips_embed(image, &embedded, left, top, baton->width, baton->height, if (vips_embed(image, &embedded, left, top, baton->width, baton->height,
"extend", VIPS_EXTEND_BACKGROUND, "background", background, NULL "extend", VIPS_EXTEND_BACKGROUND, "background", background, NULL
)) { )) {
@@ -448,7 +478,7 @@ class ResizeWorker : public NanAsyncWorker {
return Error(baton, hook); return Error(baton, hook);
} }
vips_area_unref(reinterpret_cast<VipsArea*>(background)); vips_area_unref(reinterpret_cast<VipsArea*>(background));
g_object_unref(image); vips_object_local(hook, embedded);
image = embedded; image = embedded;
} else { } else {
// Crop/max // Crop/max
@@ -457,93 +487,148 @@ class ResizeWorker : public NanAsyncWorker {
std::tie(left, top) = CalculateCrop(image->Xsize, image->Ysize, baton->width, baton->height, baton->gravity); std::tie(left, top) = CalculateCrop(image->Xsize, image->Ysize, baton->width, baton->height, baton->gravity);
int width = std::min(image->Xsize, baton->width); int width = std::min(image->Xsize, baton->width);
int height = std::min(image->Ysize, baton->height); int height = std::min(image->Ysize, baton->height);
VipsImage *extracted = vips_image_new(); VipsImage *extracted;
vips_object_local(hook, extracted);
if (vips_extract_area(image, &extracted, left, top, width, height, NULL)) { if (vips_extract_area(image, &extracted, left, top, width, height, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, extracted);
image = extracted; image = extracted;
} }
} }
// Post extraction // Post extraction
if (baton->topOffsetPost != -1) { if (baton->topOffsetPost != -1) {
VipsImage *extractedPost = vips_image_new(); VipsImage *extractedPost;
vips_object_local(hook, extractedPost);
if (vips_extract_area(image, &extractedPost, baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost, NULL)) { if (vips_extract_area(image, &extractedPost, baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, extractedPost);
image = extractedPost; image = extractedPost;
} }
// Mild sharpen // Blur
if (baton->sharpen) { if (baton->blurSigma != 0.0) {
VipsImage *sharpened = vips_image_new(); VipsImage *blurred;
vips_object_local(hook, sharpened); if (baton->blurSigma < 0.0) {
VipsImage *sharpen = vips_image_new_matrixv(3, 3, // Fast, mild blur - averages neighbouring pixels
-1.0, -1.0, -1.0, VipsImage *blur = vips_image_new_matrixv(3, 3,
-1.0, 32.0, -1.0, 1.0, 1.0, 1.0,
-1.0, -1.0, -1.0); 1.0, 1.0, 1.0,
vips_image_set_double(sharpen, "scale", 24); 1.0, 1.0, 1.0);
vips_object_local(hook, sharpen); vips_image_set_double(blur, "scale", 9);
if (vips_conv(image, &sharpened, sharpen, NULL)) { vips_object_local(hook, blur);
return Error(baton, hook); if (vips_conv(image, &blurred, blur, NULL)) {
return Error(baton, hook);
}
} else {
// Slower, accurate Gaussian blur
// Create Gaussian function for standard deviation
VipsImage *gaussian;
if (vips_gaussmat(&gaussian, baton->blurSigma, 0.2, "separable", TRUE, "integer", TRUE, NULL)) {
return Error(baton, hook);
}
vips_object_local(hook, gaussian);
// Apply Gaussian function
if (vips_convsep(image, &blurred, gaussian, "precision", VIPS_PRECISION_INTEGER, NULL)) {
return Error(baton, hook);
}
} }
g_object_unref(image); vips_object_local(hook, blurred);
image = blurred;
}
// Sharpen
if (baton->sharpenRadius != 0) {
VipsImage *sharpened;
if (baton->sharpenRadius == -1) {
// Fast, mild sharpen
VipsImage *sharpen = vips_image_new_matrixv(3, 3,
-1.0, -1.0, -1.0,
-1.0, 32.0, -1.0,
-1.0, -1.0, -1.0);
vips_image_set_double(sharpen, "scale", 24);
vips_object_local(hook, sharpen);
if (vips_conv(image, &sharpened, sharpen, NULL)) {
return Error(baton, hook);
}
} else {
// Slow, accurate sharpen in LAB colour space, with control over flat vs jagged areas
if (vips_sharpen(image, &sharpened, "radius", baton->sharpenRadius, "m1", baton->sharpenFlat, "m2", baton->sharpenJagged, NULL)) {
return Error(baton, hook);
}
}
vips_object_local(hook, sharpened);
image = sharpened; image = sharpened;
} }
// Gamma decoding (brighten) // Gamma decoding (brighten)
if (baton->gamma >= 1 && baton->gamma <= 3) { if (baton->gamma >= 1 && baton->gamma <= 3) {
VipsImage *gammaDecoded = vips_image_new(); VipsImage *gammaDecoded;
vips_object_local(hook, gammaDecoded);
if (vips_gamma(image, &gammaDecoded, "exponent", baton->gamma, NULL)) { if (vips_gamma(image, &gammaDecoded, "exponent", baton->gamma, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, gammaDecoded);
image = gammaDecoded; image = gammaDecoded;
} }
// Convert to sRGB colour space, if not already // Convert image to sRGB, if not already
if (image->Type != VIPS_INTERPRETATION_sRGB) { if (image->Type != VIPS_INTERPRETATION_sRGB) {
VipsImage *colourspaced = vips_image_new(); // Switch intrepretation to sRGB
vips_object_local(hook, colourspaced); VipsImage *rgb;
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) { if (vips_colourspace(image, &rgb, VIPS_INTERPRETATION_sRGB, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, rgb);
image = colourspaced; image = rgb;
// Tranform colours from embedded profile to sRGB profile
if (baton->withMetadata && HasProfile(image)) {
VipsImage *profiled;
if (vips_icc_transform(image, &profiled, srgbProfile.c_str(), "embedded", TRUE, NULL)) {
return Error(baton, hook);
}
vips_object_local(hook, profiled);
image = profiled;
}
} }
// Generate image tile cache when interlace output is required #if !(VIPS_MAJOR_VERSION >= 7 && VIPS_MINOR_VERSION >= 40 && VIPS_MINOR_VERSION >= 5)
// Generate image tile cache when interlace output is required - no longer required as of libvips 7.40.5+
if (baton->progressive) { if (baton->progressive) {
VipsImage *cached = vips_image_new(); VipsImage *cached;
vips_object_local(hook, cached);
if (vips_tilecache(image, &cached, "threaded", TRUE, "persistent", TRUE, "max_tiles", -1, NULL)) { if (vips_tilecache(image, &cached, "threaded", TRUE, "persistent", TRUE, "max_tiles", -1, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
g_object_unref(image); vips_object_local(hook, cached);
image = cached; image = cached;
} }
#endif
// Output // Output
if (baton->output == "__jpeg" || (baton->output == "__input" && inputImageType == JPEG)) { if (baton->output == "__jpeg" || (baton->output == "__input" && inputImageType == ImageType::JPEG)) {
// Write JPEG to buffer // Write JPEG to buffer
if (vips_jpegsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata, if (vips_jpegsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) { "Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
baton->outputFormat = "jpeg"; baton->outputFormat = "jpeg";
} else if (baton->output == "__png" || (baton->output == "__input" && inputImageType == PNG)) { } else if (baton->output == "__png" || (baton->output == "__input" && inputImageType == ImageType::PNG)) {
#if (VIPS_MAJOR_VERSION >= 7 && VIPS_MINOR_VERSION >= 42)
// Select PNG row filter
int filter = baton->withoutAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_NONE : VIPS_FOREIGN_PNG_FILTER_ALL;
// Write PNG to buffer
if (vips_pngsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, "filter", filter, NULL)) {
return Error(baton, hook);
}
#else
// Write PNG to buffer // Write PNG to buffer
if (vips_pngsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata, if (vips_pngsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) { "compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
#endif
baton->outputFormat = "png"; baton->outputFormat = "png";
} else if (baton->output == "__webp" || (baton->output == "__input" && inputImageType == WEBP)) { } else if (baton->output == "__webp" || (baton->output == "__input" && inputImageType == ImageType::WEBP)) {
// Write WEBP to buffer // Write WEBP to buffer
if (vips_webpsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata, if (vips_webpsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
"Q", baton->quality, NULL)) { "Q", baton->quality, NULL)) {
@@ -551,33 +636,43 @@ class ResizeWorker : public NanAsyncWorker {
} }
baton->outputFormat = "webp"; baton->outputFormat = "webp";
} else { } else {
bool output_jpeg = is_jpeg(baton->output); bool outputJpeg = IsJpeg(baton->output);
bool output_png = is_png(baton->output); bool outputPng = IsPng(baton->output);
bool output_webp = is_webp(baton->output); bool outputWebp = IsWebp(baton->output);
bool output_tiff = is_tiff(baton->output); bool outputTiff = IsTiff(baton->output);
bool match_input = !(output_jpeg || output_png || output_webp || output_tiff); bool matchInput = !(outputJpeg || outputPng || outputWebp || outputTiff);
if (output_jpeg || (match_input && inputImageType == JPEG)) { if (outputJpeg || (matchInput && inputImageType == ImageType::JPEG)) {
// Write JPEG to file // Write JPEG to file
if (vips_jpegsave(image, baton->output.c_str(), "strip", !baton->withMetadata, if (vips_jpegsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) { "Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
baton->outputFormat = "jpeg"; baton->outputFormat = "jpeg";
} else if (output_png || (match_input && inputImageType == PNG)) { } else if (outputPng || (matchInput && inputImageType == ImageType::PNG)) {
#if (VIPS_MAJOR_VERSION >= 7 && VIPS_MINOR_VERSION >= 41)
// Select PNG row filter
int filter = baton->withoutAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_NONE : VIPS_FOREIGN_PNG_FILTER_ALL;
// Write PNG to file
if (vips_pngsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, "filter", filter, NULL)) {
return Error(baton, hook);
}
#else
// Write PNG to file // Write PNG to file
if (vips_pngsave(image, baton->output.c_str(), "strip", !baton->withMetadata, if (vips_pngsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) { "compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
#endif
baton->outputFormat = "png"; baton->outputFormat = "png";
} else if (output_webp || (match_input && inputImageType == WEBP)) { } else if (outputWebp || (matchInput && inputImageType == ImageType::WEBP)) {
// Write WEBP to file // Write WEBP to file
if (vips_webpsave(image, baton->output.c_str(), "strip", !baton->withMetadata, if (vips_webpsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"Q", baton->quality, NULL)) { "Q", baton->quality, NULL)) {
return Error(baton, hook); return Error(baton, hook);
} }
baton->outputFormat = "webp"; baton->outputFormat = "webp";
} else if (output_tiff || (match_input && inputImageType == TIFF)) { } else if (outputTiff || (matchInput && inputImageType == ImageType::TIFF)) {
// Write TIFF to file // Write TIFF to file
if (vips_tiffsave(image, baton->output.c_str(), "strip", !baton->withMetadata, if (vips_tiffsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG, "Q", baton->quality, NULL)) { "compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG, "Q", baton->quality, NULL)) {
@@ -586,12 +681,10 @@ class ResizeWorker : public NanAsyncWorker {
baton->outputFormat = "tiff"; baton->outputFormat = "tiff";
} else { } else {
(baton->err).append("Unsupported output " + baton->output); (baton->err).append("Unsupported output " + baton->output);
g_object_unref(image);
return Error(baton, hook); return Error(baton, hook);
} }
} }
// Clean up any dangling image references // Clean up any dangling image references
g_object_unref(image);
g_object_unref(hook); g_object_unref(hook);
// Clean up libvips' per-request data and threads // Clean up libvips' per-request data and threads
vips_error_clear(); vips_error_clear();
@@ -601,6 +694,11 @@ class ResizeWorker : public NanAsyncWorker {
void HandleOKCallback () { void HandleOKCallback () {
NanScope(); NanScope();
// Free input Buffer
if (baton->bufferInLength > 0) {
g_free(baton->bufferIn);
}
Handle<Value> argv[3] = { NanNull(), NanNull(), NanNull() }; Handle<Value> argv[3] = { NanNull(), NanNull(), NanNull() };
if (!baton->err.empty()) { if (!baton->err.empty()) {
// Error // Error
@@ -619,23 +717,28 @@ class ResizeWorker : public NanAsyncWorker {
// Info Object // Info Object
Local<Object> info = NanNew<Object>(); Local<Object> info = NanNew<Object>();
info->Set(NanNew<String>("format"), NanNew<String>(baton->outputFormat)); info->Set(NanNew<String>("format"), NanNew<String>(baton->outputFormat));
info->Set(NanNew<String>("width"), NanNew<Number>(width)); info->Set(NanNew<String>("width"), NanNew<Integer>(width));
info->Set(NanNew<String>("height"), NanNew<Number>(height)); info->Set(NanNew<String>("height"), NanNew<Integer>(height));
if (baton->bufferOutLength > 0) { if (baton->bufferOutLength > 0) {
// Buffer // Copy data to new Buffer
argv[1] = NanNewBufferHandle(static_cast<char*>(baton->bufferOut), baton->bufferOutLength); argv[1] = NanNewBufferHandle(static_cast<char*>(baton->bufferOut), baton->bufferOutLength);
g_free(baton->bufferOut); g_free(baton->bufferOut);
// Add buffer size to info
info->Set(NanNew<String>("size"), NanNew<Integer>(baton->bufferOutLength));
argv[2] = info; argv[2] = info;
} else { } else {
// File // Add file size to info
struct stat st;
g_stat(baton->output.c_str(), &st);
info->Set(NanNew<String>("size"), NanNew<Integer>(st.st_size));
argv[1] = info; argv[1] = info;
} }
} }
delete baton; delete baton;
// Decrement processing task counter // Decrement processing task counter
g_atomic_int_dec_and_test(&counter_process); g_atomic_int_dec_and_test(&counterProcess);
// Return to JavaScript // Return to JavaScript
callback->Call(3, argv); callback->Call(3, argv);
@@ -653,40 +756,25 @@ class ResizeWorker : public NanAsyncWorker {
*/ */
std::tuple<Angle, bool> std::tuple<Angle, bool>
CalculateRotationAndFlip(int const angle, VipsImage const *input) { CalculateRotationAndFlip(int const angle, VipsImage const *input) {
Angle rotate = ANGLE_0; Angle rotate = Angle::D0;
bool flip = FALSE; bool flip = FALSE;
if (angle == -1) { if (angle == -1) {
const char *exif; switch(ExifOrientation(input)) {
if ( case 6: rotate = Angle::D90; break;
vips_image_get_typeof(input, "exif-ifd0-Orientation") != 0 && case 3: rotate = Angle::D180; break;
!vips_image_get_string(input, "exif-ifd0-Orientation", &exif) case 8: rotate = Angle::D270; break;
) { case 2: flip = TRUE; break; // flip 1
if (exif[0] == 0x36) { // "6" case 7: flip = TRUE; rotate = Angle::D90; break; // flip 6
rotate = ANGLE_90; case 4: flip = TRUE; rotate = Angle::D180; break; // flip 3
} else if (exif[0] == 0x33) { // "3" case 5: flip = TRUE; rotate = Angle::D270; break; // flip 8
rotate = ANGLE_180;
} else if (exif[0] == 0x38) { // "8"
rotate = ANGLE_270;
} else if (exif[0] == 0x32) { // "2" (flip 1)
flip = TRUE;
} else if (exif[0] == 0x37) { // "7" (flip 6)
rotate = ANGLE_90;
flip = TRUE;
} else if (exif[0] == 0x34) { // "4" (flip 3)
rotate = ANGLE_180;
flip = TRUE;
} else if (exif[0] == 0x35) { // "5" (flip 8)
rotate = ANGLE_270;
flip = TRUE;
}
} }
} else { } else {
if (angle == 90) { if (angle == 90) {
rotate = ANGLE_90; rotate = Angle::D90;
} else if (angle == 180) { } else if (angle == 180) {
rotate = ANGLE_180; rotate = Angle::D180;
} else if (angle == 270) { } else if (angle == 270) {
rotate = ANGLE_270; rotate = Angle::D270;
} }
} }
return std::make_tuple(rotate, flip); return std::make_tuple(rotate, flip);
@@ -728,9 +816,12 @@ class ResizeWorker : public NanAsyncWorker {
Clear all thread-local data. Clear all thread-local data.
*/ */
void Error(ResizeBaton *baton, VipsObject *hook) { void Error(ResizeBaton *baton, VipsObject *hook) {
// Get libvips' error message
(baton->err).append(vips_error_buffer()); (baton->err).append(vips_error_buffer());
vips_error_clear(); // Clean up any dangling image references
g_object_unref(hook); g_object_unref(hook);
// Clean up libvips' per-request data and threads
vips_error_clear();
vips_thread_shutdown(); vips_thread_shutdown();
} }
}; };
@@ -751,11 +842,14 @@ NAN_METHOD(resize) {
// Input Buffer object // Input Buffer object
if (options->Get(NanNew<String>("bufferIn"))->IsObject()) { if (options->Get(NanNew<String>("bufferIn"))->IsObject()) {
Local<Object> buffer = options->Get(NanNew<String>("bufferIn"))->ToObject(); Local<Object> buffer = options->Get(NanNew<String>("bufferIn"))->ToObject();
// Take a copy of the input Buffer to avoid problems with V8 heap compaction
baton->bufferInLength = node::Buffer::Length(buffer); baton->bufferInLength = node::Buffer::Length(buffer);
baton->bufferIn = node::Buffer::Data(buffer); baton->bufferIn = g_malloc(baton->bufferInLength);
memcpy(baton->bufferIn, node::Buffer::Data(buffer), baton->bufferInLength);
options->Set(NanNew<String>("bufferIn"), NanNull());
} }
// ICC profile to use when input CMYK image has no embedded profile // ICC profile to use when input CMYK image has no embedded profile
baton->iccProfileCmyk = *String::Utf8Value(options->Get(NanNew<String>("iccProfileCmyk"))->ToString()); baton->iccProfilePath = *String::Utf8Value(options->Get(NanNew<String>("iccProfilePath"))->ToString());
// Extract image options // Extract image options
baton->topOffsetPre = options->Get(NanNew<String>("topOffsetPre"))->Int32Value(); baton->topOffsetPre = options->Get(NanNew<String>("topOffsetPre"))->Int32Value();
baton->leftOffsetPre = options->Get(NanNew<String>("leftOffsetPre"))->Int32Value(); baton->leftOffsetPre = options->Get(NanNew<String>("leftOffsetPre"))->Int32Value();
@@ -771,11 +865,11 @@ NAN_METHOD(resize) {
// Canvas option // Canvas option
Local<String> canvas = options->Get(NanNew<String>("canvas"))->ToString(); Local<String> canvas = options->Get(NanNew<String>("canvas"))->ToString();
if (canvas->Equals(NanNew<String>("c"))) { if (canvas->Equals(NanNew<String>("c"))) {
baton->canvas = CROP; baton->canvas = Canvas::CROP;
} else if (canvas->Equals(NanNew<String>("m"))) { } else if (canvas->Equals(NanNew<String>("m"))) {
baton->canvas = MAX; baton->canvas = Canvas::MAX;
} else if (canvas->Equals(NanNew<String>("e"))) { } else if (canvas->Equals(NanNew<String>("e"))) {
baton->canvas = EMBED; baton->canvas = Canvas::EMBED;
} }
// Background colour // Background colour
Local<Array> background = Local<Array>::Cast(options->Get(NanNew<String>("background"))); Local<Array> background = Local<Array>::Cast(options->Get(NanNew<String>("background")));
@@ -788,7 +882,10 @@ NAN_METHOD(resize) {
baton->interpolator = *String::Utf8Value(options->Get(NanNew<String>("interpolator"))->ToString()); baton->interpolator = *String::Utf8Value(options->Get(NanNew<String>("interpolator"))->ToString());
// Operators // Operators
baton->flatten = options->Get(NanNew<String>("flatten"))->BooleanValue(); baton->flatten = options->Get(NanNew<String>("flatten"))->BooleanValue();
baton->sharpen = options->Get(NanNew<String>("sharpen"))->BooleanValue(); baton->blurSigma = options->Get(NanNew<String>("blurSigma"))->NumberValue();
baton->sharpenRadius = options->Get(NanNew<String>("sharpenRadius"))->Int32Value();
baton->sharpenFlat = options->Get(NanNew<String>("sharpenFlat"))->NumberValue();
baton->sharpenJagged = options->Get(NanNew<String>("sharpenJagged"))->NumberValue();
baton->gamma = options->Get(NanNew<String>("gamma"))->NumberValue(); baton->gamma = options->Get(NanNew<String>("gamma"))->NumberValue();
baton->greyscale = options->Get(NanNew<String>("greyscale"))->BooleanValue(); baton->greyscale = options->Get(NanNew<String>("greyscale"))->BooleanValue();
baton->angle = options->Get(NanNew<String>("angle"))->Int32Value(); baton->angle = options->Get(NanNew<String>("angle"))->Int32Value();
@@ -798,6 +895,7 @@ NAN_METHOD(resize) {
baton->progressive = options->Get(NanNew<String>("progressive"))->BooleanValue(); baton->progressive = options->Get(NanNew<String>("progressive"))->BooleanValue();
baton->quality = options->Get(NanNew<String>("quality"))->Int32Value(); baton->quality = options->Get(NanNew<String>("quality"))->Int32Value();
baton->compressionLevel = options->Get(NanNew<String>("compressionLevel"))->Int32Value(); baton->compressionLevel = options->Get(NanNew<String>("compressionLevel"))->Int32Value();
baton->withoutAdaptiveFiltering = options->Get(NanNew<String>("withoutAdaptiveFiltering"))->BooleanValue();
baton->withMetadata = options->Get(NanNew<String>("withMetadata"))->BooleanValue(); baton->withMetadata = options->Get(NanNew<String>("withMetadata"))->BooleanValue();
// Output filename or __format for Buffer // Output filename or __format for Buffer
baton->output = *String::Utf8Value(options->Get(NanNew<String>("output"))->ToString()); baton->output = *String::Utf8Value(options->Get(NanNew<String>("output"))->ToString());
@@ -807,7 +905,7 @@ NAN_METHOD(resize) {
NanAsyncQueueWorker(new ResizeWorker(callback, baton)); NanAsyncQueueWorker(new ResizeWorker(callback, baton));
// Increment queued task counter // Increment queued task counter
g_atomic_int_inc(&counter_queue); g_atomic_int_inc(&counterQueue);
NanReturnUndefined(); NanReturnUndefined();
} }

View File

@@ -33,6 +33,7 @@ extern "C" void init(Handle<Object> target) {
NODE_SET_METHOD(target, "cache", cache); NODE_SET_METHOD(target, "cache", cache);
NODE_SET_METHOD(target, "concurrency", concurrency); NODE_SET_METHOD(target, "concurrency", concurrency);
NODE_SET_METHOD(target, "counters", counters); NODE_SET_METHOD(target, "counters", counters);
NODE_SET_METHOD(target, "libvipsVersion", libvipsVersion);
} }
NODE_MODULE(sharp, init) NODE_MODULE(sharp, init)

View File

@@ -7,6 +7,7 @@
#include "utilities.h" #include "utilities.h"
using namespace v8; using namespace v8;
using namespace sharp;
/* /*
Get and set cache memory and item limits Get and set cache memory and item limits
@@ -58,7 +59,17 @@ NAN_METHOD(concurrency) {
NAN_METHOD(counters) { NAN_METHOD(counters) {
NanScope(); NanScope();
Local<Object> counters = NanNew<Object>(); Local<Object> counters = NanNew<Object>();
counters->Set(NanNew<String>("queue"), NanNew<Number>(counter_queue)); counters->Set(NanNew<String>("queue"), NanNew<Number>(counterQueue));
counters->Set(NanNew<String>("process"), NanNew<Number>(counter_process)); counters->Set(NanNew<String>("process"), NanNew<Number>(counterProcess));
NanReturnValue(counters); NanReturnValue(counters);
} }
/*
Get libvips version
*/
NAN_METHOD(libvipsVersion) {
NanScope();
char version[9];
snprintf(version, 9, "%d.%d.%d", vips_version(0), vips_version(1), vips_version(2));
NanReturnValue(NanNew<String>(version));
}

View File

@@ -6,5 +6,6 @@
NAN_METHOD(cache); NAN_METHOD(cache);
NAN_METHOD(concurrency); NAN_METHOD(concurrency);
NAN_METHOD(counters); NAN_METHOD(counters);
NAN_METHOD(libvipsVersion);
#endif #endif

View File

@@ -9,9 +9,10 @@
}, },
"devDependencies": { "devDependencies": {
"imagemagick": "^0.1.3", "imagemagick": "^0.1.3",
"imagemagick-native": "^1.4.0", "imagemagick-native": "^1.6.0",
"gm": "^1.16.0", "gm": "^1.17.0",
"async": "^0.9.0", "async": "^0.9.0",
"semver": "^4.1.0",
"benchmark": "^1.0.0" "benchmark": "^1.0.0"
}, },
"license": "Apache 2.0", "license": "Apache 2.0",

View File

@@ -5,6 +5,7 @@ var fs = require('fs');
var async = require('async'); var async = require('async');
var assert = require('assert'); var assert = require('assert');
var Benchmark = require('benchmark'); var Benchmark = require('benchmark');
var semver = require('semver');
var imagemagick = require('imagemagick'); var imagemagick = require('imagemagick');
var imagemagickNative = require('imagemagick-native'); var imagemagickNative = require('imagemagick-native');
@@ -16,6 +17,9 @@ var fixtures = require('../fixtures');
var width = 720; var width = 720;
var height = 480; var height = 480;
// Approximately equivalent to fast bilinear
var magickFilter = 'Triangle';
// Disable libvips cache to ensure tests are as fair as they can be // Disable libvips cache to ensure tests are as fair as they can be
sharp.cache(0); sharp.cache(0);
@@ -30,7 +34,9 @@ async.series({
dstPath: fixtures.outputJpg, dstPath: fixtures.outputJpg,
quality: 0.8, quality: 0.8,
width: width, width: width,
height: height height: height,
format: 'jpg',
filter: magickFilter
}, function(err) { }, function(err) {
if (err) { if (err) {
throw err; throw err;
@@ -47,55 +53,78 @@ async.series({
quality: 80, quality: 80,
width: width, width: width,
height: height, height: height,
format: 'JPEG' format: 'JPEG',
filter: magickFilter
}, function (err, buffer) {
if (err) {
throw err;
} else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
}); });
deferred.resolve();
} }
}).add('gm-buffer-file', { }).add('gm-buffer-file', {
defer: true, defer: true,
fn: function(deferred) { fn: function(deferred) {
gm(inputJpgBuffer).resize(width, height).quality(80).write(fixtures.outputJpg, function (err) { gm(inputJpgBuffer)
if (err) { .resize(width, height)
throw err; .filter(magickFilter)
} else { .quality(80)
deferred.resolve(); .write(fixtures.outputJpg, function (err) {
} if (err) {
}); throw err;
} else {
deferred.resolve();
}
});
} }
}).add('gm-buffer-buffer', { }).add('gm-buffer-buffer', {
defer: true, defer: true,
fn: function(deferred) { fn: function(deferred) {
gm(inputJpgBuffer).resize(width, height).quality(80).toBuffer(function (err, buffer) { gm(inputJpgBuffer)
if (err) { .resize(width, height)
throw err; .filter(magickFilter)
} else { .quality(80)
assert.notStrictEqual(null, buffer); .toBuffer(function (err, buffer) {
deferred.resolve(); if (err) {
} throw err;
}); } else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
} }
}).add('gm-file-file', { }).add('gm-file-file', {
defer: true, defer: true,
fn: function(deferred) { fn: function(deferred) {
gm(fixtures.inputJpg).resize(width, height).quality(80).write(fixtures.outputJpg, function (err) { gm(fixtures.inputJpg)
if (err) { .resize(width, height)
throw err; .filter(magickFilter)
} else { .quality(80)
deferred.resolve(); .write(fixtures.outputJpg, function (err) {
} if (err) {
}); throw err;
} else {
deferred.resolve();
}
});
} }
}).add('gm-file-buffer', { }).add('gm-file-buffer', {
defer: true, defer: true,
fn: function(deferred) { fn: function(deferred) {
gm(fixtures.inputJpg).resize(width, height).quality(80).toBuffer(function (err, buffer) { gm(fixtures.inputJpg)
if (err) { .resize(width, height)
throw err; .filter(magickFilter)
} else { .quality(80)
assert.notStrictEqual(null, buffer); .toBuffer(function (err, buffer) {
deferred.resolve(); if (err) {
} throw err;
}); } else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
} }
}).add('sharp-buffer-file', { }).add('sharp-buffer-file', {
defer: true, defer: true,
@@ -162,7 +191,7 @@ async.series({
deferred.resolve(); deferred.resolve();
}); });
} }
}).add('sharp-sharpen', { }).add('sharp-sharpen-mild', {
defer: true, defer: true,
fn: function(deferred) { fn: function(deferred) {
sharp(inputJpgBuffer).resize(width, height).sharpen().toBuffer(function(err, buffer) { sharp(inputJpgBuffer).resize(width, height).sharpen().toBuffer(function(err, buffer) {
@@ -174,6 +203,42 @@ async.series({
} }
}); });
} }
}).add('sharp-sharpen-radius', {
defer: true,
fn: function(deferred) {
sharp(inputJpgBuffer).resize(width, height).sharpen(3, 1, 3).toBuffer(function(err, buffer) {
if (err) {
throw err;
} else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
}
}).add('sharp-blur-mild', {
defer: true,
fn: function(deferred) {
sharp(inputJpgBuffer).resize(width, height).blur().toBuffer(function(err, buffer) {
if (err) {
throw err;
} else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
}
}).add('sharp-blur-radius', {
defer: true,
fn: function(deferred) {
sharp(inputJpgBuffer).resize(width, height).blur(3).toBuffer(function(err, buffer) {
if (err) {
throw err;
} else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
}
}).add('sharp-nearest-neighbour', { }).add('sharp-nearest-neighbour', {
defer: true, defer: true,
fn: function(deferred) { fn: function(deferred) {
@@ -314,14 +379,17 @@ async.series({
}, },
png: function(callback) { png: function(callback) {
var inputPngBuffer = fs.readFileSync(fixtures.inputPng); var inputPngBuffer = fs.readFileSync(fixtures.inputPng);
(new Benchmark.Suite('png')).add('imagemagick-file-file', { var pngSuite = new Benchmark.Suite('png');
pngSuite.add('imagemagick-file-file', {
defer: true, defer: true,
fn: function(deferred) { fn: function(deferred) {
imagemagick.resize({ imagemagick.resize({
srcPath: fixtures.inputPng, srcPath: fixtures.inputPng,
dstPath: fixtures.outputPng, dstPath: fixtures.outputPng,
width: width, width: width,
height: height height: height,
format: 'jpg',
filter: magickFilter
}, function(err) { }, function(err) {
if (err) { if (err) {
throw err; throw err;
@@ -337,32 +405,39 @@ async.series({
srcData: inputPngBuffer, srcData: inputPngBuffer,
width: width, width: width,
height: height, height: height,
format: 'PNG' format: 'PNG',
filter: magickFilter
}); });
deferred.resolve(); deferred.resolve();
} }
}).add('gm-file-file', { }).add('gm-file-file', {
defer: true, defer: true,
fn: function(deferred) { fn: function(deferred) {
gm(fixtures.inputPng).resize(width, height).write(fixtures.outputPng, function (err) { gm(fixtures.inputPng)
if (err) { .resize(width, height)
throw err; .filter(magickFilter)
} else { .write(fixtures.outputPng, function (err) {
deferred.resolve(); if (err) {
} throw err;
}); } else {
deferred.resolve();
}
});
} }
}).add('gm-file-buffer', { }).add('gm-file-buffer', {
defer: true, defer: true,
fn: function(deferred) { fn: function(deferred) {
gm(fixtures.inputPng).resize(width, height).quality(80).toBuffer(function (err, buffer) { gm(fixtures.inputPng)
if (err) { .resize(width, height)
throw err; .filter(magickFilter)
} else { .toBuffer(function (err, buffer) {
assert.notStrictEqual(null, buffer); if (err) {
deferred.resolve(); throw err;
} } else {
}); assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
} }
}).add('sharp-buffer-file', { }).add('sharp-buffer-file', {
defer: true, defer: true,
@@ -422,7 +497,23 @@ async.series({
} }
}); });
} }
}).on('cycle', function(event) { });
if (semver.gte(sharp.libvipsVersion(), '7.41.0')) {
pngSuite.add('sharp-withoutAdaptiveFiltering', {
defer: true,
fn: function(deferred) {
sharp(inputPngBuffer).resize(width, height).withoutAdaptiveFiltering().toBuffer(function(err, buffer) {
if (err) {
throw err;
} else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
}
});
}
pngSuite.on('cycle', function(event) {
console.log(' png ' + String(event.target)); console.log(' png ' + String(event.target));
}).on('complete', function() { }).on('complete', function() {
callback(null, this.filter('fastest').pluck('name')); callback(null, this.filter('fastest').pluck('name'));

Binary file not shown.

Before

Width:  |  Height:  |  Size: 813 KiB

After

Width:  |  Height:  |  Size: 810 KiB

17
test/fixtures/Wikimedia-logo.svg vendored Normal file
View File

@@ -0,0 +1,17 @@
<?xml version="1.0" standalone="yes"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1"
id="Wikimedia logo"
viewBox="-599 -599 1198 1198" width="1024" height="1024">
<defs>
<clipPath id="mask">
<path d="M 47.5,-87.5 v 425 h -95 v -425 l -552,-552 v 1250 h 1199 v -1250 z" />
</clipPath>
</defs>
<g clip-path="url(#mask)">
<circle id="green parts" fill="#396" r="336.5"/>
<circle id="blue arc" fill="none" stroke="#069" r="480.25" stroke-width="135.5" />
</g>
<circle fill="#900" cy="-379.5" r="184.5" id="red circle"/>
</svg>

After

Width:  |  Height:  |  Size: 692 B

BIN
test/fixtures/free-gearhead-pack.psd vendored Normal file

Binary file not shown.

View File

@@ -21,6 +21,8 @@ module.exports = {
inputWebP: getPath('4.webp'), // http://www.gstatic.com/webp/gallery/4.webp inputWebP: getPath('4.webp'), // http://www.gstatic.com/webp/gallery/4.webp
inputTiff: getPath('G31D.TIF'), // http://www.fileformat.info/format/tiff/sample/e6c9a6e5253348f4aef6d17b534360ab/index.htm inputTiff: getPath('G31D.TIF'), // http://www.fileformat.info/format/tiff/sample/e6c9a6e5253348f4aef6d17b534360ab/index.htm
inputGif: getPath('Crash_test.gif'), // http://upload.wikimedia.org/wikipedia/commons/e/e3/Crash_test.gif inputGif: getPath('Crash_test.gif'), // http://upload.wikimedia.org/wikipedia/commons/e/e3/Crash_test.gif
inputSvg: getPath('Wikimedia-logo.svg'), // http://commons.wikimedia.org/wiki/File:Wikimedia-logo.svg
inputPsd: getPath('free-gearhead-pack.psd'), // https://dribbble.com/shots/1624241-Free-Gearhead-Vector-Pack
outputJpg: getPath('output.jpg'), outputJpg: getPath('output.jpg'),
outputPng: getPath('output.png'), outputPng: getPath('output.png'),

View File

@@ -5,4 +5,4 @@ fi
curl -O https://raw.githubusercontent.com/jcupitt/libvips/master/libvips.supp test/leak/libvips.supp curl -O https://raw.githubusercontent.com/jcupitt/libvips/master/libvips.supp test/leak/libvips.supp
cd ../../ cd ../../
G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind --suppressions=test/leak/libvips.supp --suppressions=test/leak/sharp.supp --leak-check=full --show-leak-kinds=definite,indirect,possible --num-callers=20 npm test G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind --suppressions=test/leak/libvips.supp --suppressions=test/leak/sharp.supp --leak-check=full --show-leak-kinds=definite,indirect,possible --num-callers=20 --trace-children=yes npm test

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Alpha transparency', function() { describe('Alpha transparency', function() {
it('Flatten to black', function(done) { it('Flatten to black', function(done) {

100
test/unit/blur.js Executable file
View File

@@ -0,0 +1,100 @@
'use strict';
var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Blur', function() {
it('specific radius 1', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(1)
.toFile(fixtures.path('output.blur-1.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('specific radius 10', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(10)
.toFile(fixtures.path('output.blur-10.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('specific radius 0.3', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(0.3)
.toFile(fixtures.path('output.blur-0.3.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('mild blur', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur()
.toFile(fixtures.path('output.blur-mild.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('invalid radius', function(done) {
var isValid = true;
try {
sharp(fixtures.inputJpg).blur(0.1);
} catch (err) {
isValid = false;
}
assert.strictEqual(false, isValid);
done();
});
it('blurred image is smaller than non-blurred', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(false)
.toBuffer(function(err, notBlurred, info) {
if (err) throw err;
assert.strictEqual(true, notBlurred.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(true)
.toBuffer(function(err, blurred, info) {
if (err) throw err;
assert.strictEqual(true, blurred.length > 0);
assert.strictEqual(true, blurred.length < notBlurred.length);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
});
});

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Colour space conversion', function() { describe('Colour space conversion', function() {
it('To greyscale', function(done) { it('To greyscale', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Crop gravities', function() { describe('Crop gravities', function() {
it('North', function(done) { it('North', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Embed', function() { describe('Embed', function() {
it('JPEG within PNG, no alpha channel', function(done) { it('JPEG within PNG, no alpha channel', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Partial image extraction', function() { describe('Partial image extraction', function() {
it('JPEG', function(done) { it('JPEG', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Gamma correction', function() { describe('Gamma correction', function() {
it('value of 0.0 (disabled)', function(done) { it('value of 0.0 (disabled)', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Interpolation', function() { describe('Interpolation', function() {
it('nearest neighbour', function(done) { it('nearest neighbour', function(done) {

View File

@@ -3,9 +3,13 @@
var fs = require('fs'); var fs = require('fs');
var assert = require('assert'); var assert = require('assert');
var semver = require('semver');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Input/output', function() { describe('Input/output', function() {
it('Read from File and write to Stream', function(done) { it('Read from File and write to Stream', function(done) {
@@ -14,6 +18,7 @@ describe('Input/output', function() {
sharp(fixtures.outputJpg).toBuffer(function(err, data, info) { sharp(fixtures.outputJpg).toBuffer(function(err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
@@ -31,6 +36,7 @@ describe('Input/output', function() {
sharp(fixtures.outputJpg).toBuffer(function(err, data, info) { sharp(fixtures.outputJpg).toBuffer(function(err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
@@ -45,6 +51,7 @@ describe('Input/output', function() {
var readable = fs.createReadStream(fixtures.inputJpg); var readable = fs.createReadStream(fixtures.inputJpg);
var pipeline = sharp().resize(320, 240).toFile(fixtures.outputJpg, function(err, info) { var pipeline = sharp().resize(320, 240).toFile(fixtures.outputJpg, function(err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
@@ -59,6 +66,7 @@ describe('Input/output', function() {
var pipeline = sharp().resize(320, 240).toBuffer(function(err, data, info) { var pipeline = sharp().resize(320, 240).toBuffer(function(err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
@@ -86,6 +94,7 @@ describe('Input/output', function() {
sharp(fixtures.outputJpg).toBuffer(function(err, data, info) { sharp(fixtures.outputJpg).toBuffer(function(err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
@@ -133,6 +142,7 @@ describe('Input/output', function() {
.toBuffer(function(err, data, info) { .toBuffer(function(err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
@@ -147,6 +157,7 @@ describe('Input/output', function() {
.toBuffer(function(err, data, info) { .toBuffer(function(err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
@@ -217,6 +228,7 @@ describe('Input/output', function() {
sharp(data).toBuffer(function(err, data, info) { sharp(data).toBuffer(function(err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, data.length > 0); assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
@@ -258,21 +270,23 @@ describe('Input/output', function() {
.resize(320, 240) .resize(320, 240)
.png() .png()
.progressive(false) .progressive(false)
.toBuffer(function(err, nonProgressive, info) { .toBuffer(function(err, nonProgressiveData, nonProgressiveInfo) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, nonProgressive.length > 0); assert.strictEqual(true, nonProgressiveData.length > 0);
assert.strictEqual('png', info.format); assert.strictEqual(nonProgressiveData.length, nonProgressiveInfo.size);
assert.strictEqual(320, info.width); assert.strictEqual('png', nonProgressiveInfo.format);
assert.strictEqual(240, info.height); assert.strictEqual(320, nonProgressiveInfo.width);
sharp(nonProgressive) assert.strictEqual(240, nonProgressiveInfo.height);
sharp(nonProgressiveData)
.progressive() .progressive()
.toBuffer(function(err, progressive, info) { .toBuffer(function(err, progressiveData, progressiveInfo) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, progressive.length > 0); assert.strictEqual(true, progressiveData.length > 0);
assert.strictEqual(true, progressive.length > nonProgressive.length); assert.strictEqual(progressiveData.length, progressiveInfo.size);
assert.strictEqual('png', info.format); assert.strictEqual(true, progressiveData.length > nonProgressiveData.length);
assert.strictEqual(320, info.width); assert.strictEqual('png', progressiveInfo.format);
assert.strictEqual(240, info.height); assert.strictEqual(320, progressiveInfo.width);
assert.strictEqual(240, progressiveInfo.height);
done(); done();
}); });
}); });
@@ -283,6 +297,7 @@ describe('Input/output', function() {
it('JPEG', function(done) { it('JPEG', function(done) {
sharp(fixtures.inputJpg).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) { sharp(fixtures.inputJpg).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
@@ -294,6 +309,7 @@ describe('Input/output', function() {
it('PNG', function(done) { it('PNG', function(done) {
sharp(fixtures.inputPng).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) { sharp(fixtures.inputPng).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('png', info.format); assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
@@ -305,6 +321,7 @@ describe('Input/output', function() {
it('Transparent PNG', function(done) { it('Transparent PNG', function(done) {
sharp(fixtures.inputPngWithTransparency).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) { sharp(fixtures.inputPngWithTransparency).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('png', info.format); assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
@@ -315,6 +332,7 @@ describe('Input/output', function() {
it('WebP', function(done) { it('WebP', function(done) {
sharp(fixtures.inputWebP).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) { sharp(fixtures.inputWebP).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('webp', info.format); assert.strictEqual('webp', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
@@ -326,6 +344,7 @@ describe('Input/output', function() {
it('TIFF', function(done) { it('TIFF', function(done) {
sharp(fixtures.inputTiff).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) { sharp(fixtures.inputTiff).resize(320, 80).toFile(fixtures.outputZoinks, function(err, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('tiff', info.format); assert.strictEqual('tiff', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
@@ -343,9 +362,9 @@ describe('Input/output', function() {
}); });
describe('PNG compression level', function() { describe('PNG output', function() {
it('valid', function(done) { it('compression level is valid', function(done) {
var isValid = false; var isValid = false;
try { try {
sharp().compressionLevel(0); sharp().compressionLevel(0);
@@ -355,7 +374,7 @@ describe('Input/output', function() {
done(); done();
}); });
it('invalid', function(done) { it('compression level is invalid', function(done) {
var isValid = false; var isValid = false;
try { try {
sharp().compressionLevel(-1); sharp().compressionLevel(-1);
@@ -365,6 +384,83 @@ describe('Input/output', function() {
done(); done();
}); });
if (semver.gte(sharp.libvipsVersion(), '7.41.0')) {
it('withoutAdaptiveFiltering generates smaller file [libvips ' + sharp.libvipsVersion() + '>=7.41.0]', function(done) {
// First generate with adaptive filtering
sharp(fixtures.inputPng)
.resize(320, 240)
.withoutAdaptiveFiltering(false)
.toBuffer(function(err, adaptiveData, adaptiveInfo) {
if (err) throw err;
assert.strictEqual(true, adaptiveData.length > 0);
assert.strictEqual(adaptiveData.length, adaptiveInfo.size);
assert.strictEqual('png', adaptiveInfo.format);
assert.strictEqual(320, adaptiveInfo.width);
assert.strictEqual(240, adaptiveInfo.height);
// Then generate without
sharp(fixtures.inputPng)
.resize(320, 240)
.withoutAdaptiveFiltering()
.toBuffer(function(err, withoutAdaptiveData, withoutAdaptiveInfo) {
if (err) throw err;
assert.strictEqual(true, withoutAdaptiveData.length > 0);
assert.strictEqual(withoutAdaptiveData.length, withoutAdaptiveInfo.size);
assert.strictEqual('png', withoutAdaptiveInfo.format);
assert.strictEqual(320, withoutAdaptiveInfo.width);
assert.strictEqual(240, withoutAdaptiveInfo.height);
assert.strictEqual(true, withoutAdaptiveData.length < adaptiveData.length);
done();
});
});
});
}
}); });
it('Convert SVG to PNG', function(done) {
sharp(fixtures.inputSvg)
.resize(100, 100)
.png()
.toFile(fixtures.path('output.svg.png'), function(err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(100, info.width);
assert.strictEqual(100, info.height);
done();
});
});
it('Convert PSD to PNG', function(done) {
sharp(fixtures.inputPsd)
.resize(320, 240)
.png()
.toFile(fixtures.path('output.psd.png'), function(err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
if (semver.gte(sharp.libvipsVersion(), '7.40.0')) {
it('Load TIFF from Buffer [libvips ' + sharp.libvipsVersion() + '>=7.40.0]', function(done) {
var inputTiffBuffer = fs.readFileSync(fixtures.inputTiff);
sharp(inputTiffBuffer)
.resize(320, 240)
.jpeg()
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
}
}); });

View File

@@ -6,6 +6,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Image metadata', function() { describe('Image metadata', function() {
it('JPEG', function(done) { it('JPEG', function(done) {
@@ -16,6 +18,8 @@ describe('Image metadata', function() {
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual('undefined', typeof metadata.orientation); assert.strictEqual('undefined', typeof metadata.orientation);
done(); done();
}); });
@@ -29,6 +33,7 @@ describe('Image metadata', function() {
assert.strictEqual(600, metadata.height); assert.strictEqual(600, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual(true, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
assert.strictEqual(8, metadata.orientation); assert.strictEqual(8, metadata.orientation);
done(); done();
@@ -43,6 +48,7 @@ describe('Image metadata', function() {
assert.strictEqual(3248, metadata.height); assert.strictEqual(3248, metadata.height);
assert.strictEqual('b-w', metadata.space); assert.strictEqual('b-w', metadata.space);
assert.strictEqual(1, metadata.channels); assert.strictEqual(1, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
done(); done();
}); });
@@ -56,6 +62,7 @@ describe('Image metadata', function() {
assert.strictEqual(2074, metadata.height); assert.strictEqual(2074, metadata.height);
assert.strictEqual('b-w', metadata.space); assert.strictEqual('b-w', metadata.space);
assert.strictEqual(1, metadata.channels); assert.strictEqual(1, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
done(); done();
}); });
@@ -69,6 +76,7 @@ describe('Image metadata', function() {
assert.strictEqual(1536, metadata.height); assert.strictEqual(1536, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(4, metadata.channels); assert.strictEqual(4, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(true, metadata.hasAlpha); assert.strictEqual(true, metadata.hasAlpha);
done(); done();
}); });
@@ -82,6 +90,7 @@ describe('Image metadata', function() {
assert.strictEqual(772, metadata.height); assert.strictEqual(772, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
done(); done();
}); });
@@ -94,6 +103,7 @@ describe('Image metadata', function() {
assert.strictEqual(800, metadata.width); assert.strictEqual(800, metadata.width);
assert.strictEqual(533, metadata.height); assert.strictEqual(533, metadata.height);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
done(); done();
}); });
@@ -106,11 +116,21 @@ describe('Image metadata', function() {
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
done(); done();
}); });
}); });
it('Non-existent file in, Promise out', function(done) {
sharp('fail').metadata().then(function(metadata) {
throw new Error('Non-existent file');
}, function (err) {
assert.ok(!!err);
done();
});
});
it('Stream in, Promise out', function(done) { it('Stream in, Promise out', function(done) {
var readable = fs.createReadStream(fixtures.inputJpg); var readable = fs.createReadStream(fixtures.inputJpg);
var pipeline = sharp(); var pipeline = sharp();
@@ -120,6 +140,7 @@ describe('Image metadata', function() {
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
done(); done();
}).catch(function(err) { }).catch(function(err) {
@@ -137,6 +158,7 @@ describe('Image metadata', function() {
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
done(); done();
}); });
@@ -152,6 +174,7 @@ describe('Image metadata', function() {
assert.strictEqual(2225, metadata.height); assert.strictEqual(2225, metadata.height);
assert.strictEqual('srgb', metadata.space); assert.strictEqual('srgb', metadata.space);
assert.strictEqual(3, metadata.channels); assert.strictEqual(3, metadata.channels);
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(false, metadata.hasAlpha);
image.resize(metadata.width / 2).toBuffer(function(err, data, info) { image.resize(metadata.width / 2).toBuffer(function(err, data, info) {
if (err) throw err; if (err) throw err;
@@ -164,25 +187,33 @@ describe('Image metadata', function() {
}); });
it('Keep EXIF metadata after a resize', function(done) { it('Keep EXIF metadata after a resize', function(done) {
sharp(fixtures.inputJpgWithExif).resize(320, 240).withMetadata().toBuffer(function(err, buffer) { sharp(fixtures.inputJpgWithExif)
if (err) throw err; .resize(320, 240)
sharp(buffer).metadata(function(err, metadata) { .withMetadata()
.toBuffer(function(err, buffer) {
if (err) throw err; if (err) throw err;
assert.strictEqual(8, metadata.orientation); sharp(buffer).metadata(function(err, metadata) {
done(); if (err) throw err;
assert.strictEqual(true, metadata.hasProfile);
assert.strictEqual(8, metadata.orientation);
done();
});
}); });
});
}); });
it('Remove EXIF metadata after a resize', function(done) { it('Remove EXIF metadata after a resize', function(done) {
sharp(fixtures.inputJpgWithExif).resize(320, 240).withMetadata(false).toBuffer(function(err, buffer) { sharp(fixtures.inputJpgWithExif)
if (err) throw err; .resize(320, 240)
sharp(buffer).metadata(function(err, metadata) { .withMetadata(false)
.toBuffer(function(err, buffer) {
if (err) throw err; if (err) throw err;
assert.strictEqual('undefined', typeof metadata.orientation); sharp(buffer).metadata(function(err, metadata) {
done(); if (err) throw err;
assert.strictEqual(false, metadata.hasProfile);
assert.strictEqual('undefined', typeof metadata.orientation);
done();
});
}); });
});
}); });
}); });

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Resize dimensions', function() { describe('Resize dimensions', function() {
it('Exact crop', function(done) { it('Exact crop', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Rotation', function() { describe('Rotation', function() {
it('Rotate by 90 degrees, respecting output input size', function(done) { it('Rotate by 90 degrees, respecting output input size', function(done) {

View File

@@ -5,9 +5,96 @@ var assert = require('assert');
var sharp = require('../../index'); var sharp = require('../../index');
var fixtures = require('../fixtures'); var fixtures = require('../fixtures');
sharp.cache(0);
describe('Sharpen', function() { describe('Sharpen', function() {
it('sharpen image is larger than non-sharpen', function(done) { it('specific radius 10', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen(10)
.toFile(fixtures.path('output.sharpen-10.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('specific radius 3 and levels 0.5, 2.5', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen(3, 0.5, 2.5)
.toFile(fixtures.path('output.sharpen-3-0.5-2.5.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('specific radius 5 and levels 2, 4', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen(5, 2, 4)
.toFile(fixtures.path('output.sharpen-5-2-4.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('mild sharpen', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen()
.toFile(fixtures.path('output.sharpen-mild.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('invalid radius', function(done) {
var isValid = true;
try {
sharp(fixtures.inputJpg).sharpen(1.5);
} catch (err) {
isValid = false;
}
assert.strictEqual(false, isValid);
done();
});
it('invalid flat', function(done) {
var isValid = true;
try {
sharp(fixtures.inputJpg).sharpen(1, -1);
} catch (err) {
isValid = false;
}
assert.strictEqual(false, isValid);
done();
});
it('invalid jagged', function(done) {
var isValid = true;
try {
sharp(fixtures.inputJpg).sharpen(1, 1, -1);
} catch (err) {
isValid = false;
}
assert.strictEqual(false, isValid);
done();
});
it('sharpened image is larger than non-sharpened', function(done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(320, 240) .resize(320, 240)
.sharpen(false) .sharpen(false)
@@ -17,8 +104,9 @@ describe('Sharpen', function() {
assert.strictEqual('jpeg', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height); assert.strictEqual(240, info.height);
sharp(notSharpened) sharp(fixtures.inputJpg)
.sharpen() .resize(320, 240)
.sharpen(true)
.toBuffer(function(err, sharpened, info) { .toBuffer(function(err, sharpened, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(true, sharpened.length > 0); assert.strictEqual(true, sharpened.length > 0);