Compare commits

..

11 Commits

Author SHA1 Message Date
Lovell Fuller
177a4f574c Minimum version of libvips now 7.40.0 #74 2014-11-17 12:08:05 +00:00
Lovell Fuller
e22d093002 Ubuntu 14 now compiles 7.40.x from source
as packaged version is the outdated 7.38.x

Added support for Ubuntu 15 and Mint 17.1
2014-11-12 20:30:13 +00:00
Lovell Fuller
e7f6d49bc1 Additional blur radii tests #108 2014-11-12 20:11:28 +00:00
Lovell Fuller
b886db4b0d Add bounds checks on blur/sharpen parameters #108 2014-11-12 20:06:28 +00:00
Lovell Fuller
ee513ac7a7 Less C, more C++ e.g. namespace, enum class
Improve image reference handling
2014-11-11 18:28:23 +00:00
Lovell Fuller
e465306d97 Disable libvips cache for unit tests
Aids memory leak detection
2014-11-11 18:09:48 +00:00
Lovell Fuller
32d9bc204a Add 'fast' blur and Gaussian blur feature #108 2014-11-10 22:38:13 +00:00
Lovell Fuller
df5cf402e3 Ensure leak check tests child processes 2014-11-10 22:35:22 +00:00
Lovell Fuller
86681100b7 Control level of sharpening via radius/flat/jagged #108 2014-11-10 16:20:04 +00:00
Lovell Fuller
47927ef47d Shrink less, affine more, maintain performance #75
Affects interpolators with 4x4+ window size

e.g. Bicubic, LBB, Nohalo

Introduces blur before large affine

to improve large PNG reductions
2014-11-08 12:08:27 +00:00
Lovell Fuller
7537adf399 Add features from libvips 7.40+
Load TIFF from Buffer/Stream

Interlaced PNG output no longer needs tilecache

Option to disable PNG adaptive row filtering
2014-11-08 12:08:27 +00:00
27 changed files with 938 additions and 364 deletions

View File

@@ -29,16 +29,16 @@ This module is powered by the blazingly fast [libvips](https://github.com/jcupit
### Prerequisites
* Node.js v0.10+
* [libvips](https://github.com/jcupitt/libvips) v7.38.5+
* [libvips](https://github.com/jcupitt/libvips) v7.40.0+ (7.42.0+ recommended)
To install the latest version of libvips on the following Operating Systems:
To install the most suitable version of libvips on the following Operating Systems:
* Mac OS
* Homebrew
* MacPorts
* Debian Linux
* Debian 7, 8
* Ubuntu 12.04, 14.04, 14.10
* Ubuntu 12.04, 14.04, 14.10, 15.04
* Mint 13, 17
* Red Hat Linux
* RHEL/Centos/Scientific 6, 7
@@ -214,12 +214,12 @@ sharp(inputBuffer)
Constructor to which further methods are chained. `input`, if present, can be one of:
* Buffer containing JPEG, PNG or WebP image data, or
* Buffer containing JPEG, PNG, WebP or TIFF image data, or
* String containing the filename of an image, with most major formats supported.
The object returned implements the [stream.Duplex](http://nodejs.org/api/stream.html#stream_class_stream_duplex) class.
JPEG, PNG or WebP format image data can be streamed into the object when `input` is not provided.
JPEG, PNG, WebP or TIFF format image data can be streamed into the object when `input` is not provided.
JPEG, PNG or WebP format image data can be streamed out from this object.
@@ -232,7 +232,7 @@ Fast access to image metadata without decoding any compressed image data.
* `format`: Name of decoder to be used to decompress image data e.g. `jpeg`, `png`, `webp` (for file-based input additionally `tiff` and `magick`)
* `width`: Number of pixels wide
* `height`: Number of pixels high
* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L502)
* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L522)
* `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
* `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
* `orientation`: Number value of the EXIF Orientation header, if present
@@ -319,9 +319,23 @@ Do not enlarge the output image if the input image width *or* height are already
This is equivalent to GraphicsMagick's `>` geometry option: "change the dimensions of the image only if its width or height exceeds the geometry specification".
#### sharpen()
#### blur([radius])
Perform a mild sharpen of the output image. This typically reduces performance by 10%.
When used without parameters, performs a fast, mild blur of the output image. This typically reduces performance by 10%.
When a `radius` is provided, performs a slower, more accurate Gaussian blur. This typically reduces performance by 30%.
* `radius`, if present, is an integral Number representing the approximate blur mask radius in pixels.
#### sharpen([radius], [flat], [jagged])
When used without parameters, performs a fast, mild sharpen of the output image. This typically reduces performance by 10%.
When a `radius` is provided, performs a slower, more accurate sharpen of the L channel in the LAB colour space. Separate control over the level of sharpening in "flat" and "jagged" areas is available. This typically reduces performance by 50%.
* `radius`, if present, is an integral Number representing the sharpen mask radius in pixels.
* `flat`, if present, is a Number representing the level of sharpening to apply to "flat" areas, defaulting to a value of 1.0.
* `jagged`, if present, is a Number representing the level of sharpening to apply to "jagged" areas, defaulting to a value of 2.0.
#### interpolateWith(interpolator)
@@ -388,6 +402,12 @@ An advanced setting for the _zlib_ compression level of the lossless PNG output
`compressionLevel` is a Number between 0 and 9.
#### withoutAdaptiveFiltering()
_Requires libvips 7.41.0+_
An advanced and experimental PNG output setting to disable adaptive row filtering.
### Output methods
#### toFile(filename, [callback])

116
index.js
View File

@@ -4,10 +4,12 @@ var path = require('path');
var util = require('util');
var stream = require('stream');
var semver = require('semver');
var color = require('color');
var BluebirdPromise = require('bluebird');
var sharp = require('./build/Release/sharp');
var libvipsVersion = sharp.libvipsVersion();
var Sharp = function(input) {
if (!(this instanceof Sharp)) {
@@ -41,7 +43,10 @@ var Sharp = function(input) {
// operations
background: [0, 0, 0, 255],
flatten: false,
sharpen: false,
blurRadius: 0,
sharpenRadius: 0,
sharpenFlat: 1,
sharpenJagged: 2,
gamma: 0,
greyscale: false,
// output options
@@ -49,6 +54,7 @@ var Sharp = function(input) {
progressive: false,
quality: 80,
compressionLevel: 6,
withoutAdaptiveFiltering: false,
streamOut: false,
withMetadata: false
};
@@ -58,14 +64,20 @@ var Sharp = function(input) {
} else if (typeof input === 'object' && input instanceof Buffer) {
// input=buffer
if (
(input.length > 1) &&
(input[0] === 0xff && input[1] === 0xd8) || // JPEG
(input[0] === 0x89 && input[1] === 0x50) || // PNG
(input[0] === 0x52 && input[1] === 0x49) // WebP
(input.length > 3) &&
// JPEG
(input[0] === 0xFF && input[1] === 0xD8) ||
// PNG
(input[0] === 0x89 && input[1] === 0x50) ||
// WebP
(input[0] === 0x52 && input[1] === 0x49) ||
// TIFF
(input[0] === 0x4D && input[1] === 0x4D && input[2] === 0x00 && (input[3] === 0x2A || input[3] === 0x2B)) ||
(input[0] === 0x49 && input[1] === 0x49 && (input[2] === 0x2A || input[2] === 0x2B) && input[3] === 0x00)
) {
this.options.bufferIn = input;
} else {
throw new Error('Buffer contains an unsupported image format. JPEG, PNG and WebP are currently supported.');
throw new Error('Buffer contains an unsupported image format. JPEG, PNG, WebP and TIFF are currently supported.');
}
} else {
// input=stream
@@ -126,16 +138,6 @@ Sharp.prototype.extract = function(topOffset, leftOffset, width, height) {
return this;
};
/*
Deprecated embed* methods, to be removed in v0.8.0
*/
Sharp.prototype.embedWhite = util.deprecate(function() {
return this.background('white').embed();
}, "embedWhite() is deprecated, use background('white').embed() instead");
Sharp.prototype.embedBlack = util.deprecate(function() {
return this.background('black').embed();
}, "embedBlack() is deprecated, use background('black').embed() instead");
/*
Set the background colour for embed and flatten operations.
Delegates to the 'Color' module, which can throw an Error
@@ -204,8 +206,64 @@ Sharp.prototype.withoutEnlargement = function(withoutEnlargement) {
return this;
};
Sharp.prototype.sharpen = function(sharpen) {
this.options.sharpen = (typeof sharpen === 'boolean') ? sharpen : true;
/*
Blur the output image.
Call without a radius to use a fast, mild blur.
Call with a radius to use a slower, more accurate Gaussian blur.
*/
Sharp.prototype.blur = function(radius) {
if (typeof radius === 'undefined') {
// No arguments: default to mild blur
this.options.blurRadius = -1;
} else if (typeof radius === 'boolean') {
// Boolean argument: apply mild blur?
this.options.blurRadius = radius ? -1 : 0;
} else if (typeof radius === 'number' && !Number.isNaN(radius) && (radius % 1 === 0) && radius >= 1) {
// Numeric argument: specific radius
this.options.blurRadius = radius;
} else {
throw new Error('Invalid blur radius ' + radius + ' (expected integer >= 1)');
}
return this;
};
/*
Sharpen the output image.
Call without a radius to use a fast, mild sharpen.
Call with a radius to use a slow, accurate sharpen using the L of LAB colour space.
radius - size of mask in pixels, must be integer
flat - level of "flat" area sharpen, default 1
jagged - level of "jagged" area sharpen, default 2
*/
Sharp.prototype.sharpen = function(radius, flat, jagged) {
if (typeof radius === 'undefined') {
// No arguments: default to mild sharpen
this.options.sharpenRadius = -1;
} else if (typeof radius === 'boolean') {
// Boolean argument: apply mild sharpen?
this.options.sharpenRadius = radius ? -1 : 0;
} else if (typeof radius === 'number' && !Number.isNaN(radius) && (radius % 1 === 0) && radius >= 1) {
// Numeric argument: specific radius
this.options.sharpenRadius = radius;
// Control over flat areas
if (typeof flat !== 'undefined' && flat !== null) {
if (typeof flat === 'number' && !Number.isNaN(flat) && flat >= 0) {
this.options.sharpenFlat = flat;
} else {
throw new Error('Invalid sharpen level for flat areas ' + flat + ' (expected >= 0)');
}
}
// Control over jagged areas
if (typeof jagged !== 'undefined' && jagged !== null) {
if (typeof jagged === 'number' && !Number.isNaN(jagged) && jagged >= 0) {
this.options.sharpenJagged = jagged;
} else {
throw new Error('Invalid sharpen level for jagged areas ' + jagged + ' (expected >= 0)');
}
}
} else {
throw new Error('Invalid sharpen radius ' + radius + ' (expected integer >= 1)');
}
return this;
};
@@ -269,6 +327,9 @@ Sharp.prototype.quality = function(quality) {
return this;
};
/*
zlib compression level for PNG output
*/
Sharp.prototype.compressionLevel = function(compressionLevel) {
if (!Number.isNaN(compressionLevel) && compressionLevel >= 0 && compressionLevel <= 9) {
this.options.compressionLevel = compressionLevel;
@@ -278,6 +339,18 @@ Sharp.prototype.compressionLevel = function(compressionLevel) {
return this;
};
/*
Disable the use of adaptive row filtering for PNG output - requires libvips 7.41.0+
*/
Sharp.prototype.withoutAdaptiveFiltering = function(withoutAdaptiveFiltering) {
if (semver.gte(libvipsVersion, '7.41.0')) {
this.options.withoutAdaptiveFiltering = (typeof withoutAdaptiveFiltering === 'boolean') ? withoutAdaptiveFiltering : true;
} else {
console.error('withoutAdaptiveFiltering requires libvips 7.41.0+');
}
return this;
};
Sharp.prototype.withMetadata = function(withMetadata) {
this.options.withMetadata = (typeof withMetadata === 'boolean') ? withMetadata : true;
return this;
@@ -506,3 +579,10 @@ module.exports.concurrency = function(concurrency) {
module.exports.counters = function() {
return sharp.counters();
};
/*
Get the version of the libvips library
*/
module.exports.libvipsVersion = function() {
return libvipsVersion;
};

View File

@@ -1,6 +1,6 @@
{
"name": "sharp",
"version": "0.7.2",
"version": "0.8.0",
"author": "Lovell Fuller <npm@lovell.info>",
"contributors": [
"Pierre Inglebert <pierre.inglebert@gmail.com>",
@@ -13,7 +13,7 @@
"Brandon Aaron <hello.brandon@aaron.sh>",
"Andreas Lind <andreas@one.com>"
],
"description": "High performance Node.js module to resize JPEG, PNG and WebP images using the libvips library",
"description": "High performance Node.js module to resize JPEG, PNG, WebP and TIFF images using the libvips library",
"scripts": {
"test": "node ./node_modules/istanbul/lib/cli.js cover ./node_modules/mocha/bin/_mocha -- --slow=5000 --timeout=10000 ./test/unit/*.js"
},
@@ -36,14 +36,13 @@
"embed",
"libvips",
"vips",
"fast",
"buffer",
"stream"
],
"dependencies": {
"bluebird": "^2.3.10",
"bluebird": "^2.3.11",
"color": "^0.7.1",
"nan": "^1.4.0"
"nan": "^1.4.1",
"semver": "^4.1.0"
},
"devDependencies": {
"mocha": "^2.0.1",

View File

@@ -5,13 +5,13 @@
# * Mac OS
# * Debian Linux
# * Debian 7, 8
# * Ubuntu 12.04, 14.04, 14.10
# * Ubuntu 12.04, 14.04, 14.10, 15.04
# * Mint 13, 17
# * Red Hat Linux
# * RHEL/Centos/Scientific 6, 7
# * Fedora 21, 22
vips_version_minimum=7.38.5
vips_version_minimum=7.40.0
vips_version_latest_major=7.40
vips_version_latest_minor=11
@@ -86,11 +86,17 @@ case $(uname -s) in
DISTRO=$(lsb_release -c -s)
echo "Detected Debian Linux '$DISTRO'"
case "$DISTRO" in
jessie|trusty|utopic|qiana)
# Debian 8, Ubuntu 14, Mint 17
jessie|vivid)
# Debian 8, Ubuntu 15
echo "Installing libvips via apt-get"
apt-get install -y libvips-dev
;;
trusty|utopic|qiana|rebecca)
# Ubuntu 14, Mint 17
echo "Installing libvips dependencies via apt-get"
apt-get install -y automake build-essential gobject-introspection gtk-doc-tools libglib2.0-dev libjpeg-turbo8-dev libpng12-dev libwebp-dev libtiff5-dev libexif-dev libxml2-dev swig libmagickwand-dev curl
install_libvips_from_source
;;
precise|wheezy|maya)
# Debian 7, Ubuntu 12.04, Mint 13
echo "Installing libvips dependencies via apt-get"

View File

@@ -4,96 +4,155 @@
#include "common.h"
// How many tasks are in the queue?
volatile int counter_queue = 0;
namespace sharp {
// How many tasks are being processed?
volatile int counter_process = 0;
// How many tasks are in the queue?
volatile int counterQueue = 0;
// Filename extension checkers
static bool ends_with(std::string const &str, std::string const &end) {
return str.length() >= end.length() && 0 == str.compare(str.length() - end.length(), end.length(), end);
}
bool is_jpeg(std::string const &str) {
return ends_with(str, ".jpg") || ends_with(str, ".jpeg") || ends_with(str, ".JPG") || ends_with(str, ".JPEG");
}
bool is_png(std::string const &str) {
return ends_with(str, ".png") || ends_with(str, ".PNG");
}
bool is_webp(std::string const &str) {
return ends_with(str, ".webp") || ends_with(str, ".WEBP");
}
bool is_tiff(std::string const &str) {
return ends_with(str, ".tif") || ends_with(str, ".tiff") || ends_with(str, ".TIF") || ends_with(str, ".TIFF");
}
// How many tasks are being processed?
volatile int counterProcess = 0;
unsigned char const MARKER_JPEG[] = {0xff, 0xd8};
unsigned char const MARKER_PNG[] = {0x89, 0x50};
unsigned char const MARKER_WEBP[] = {0x52, 0x49};
/*
Initialise a VipsImage from a buffer. Supports JPEG, PNG and WebP.
Returns the ImageType detected, if any.
*/
ImageType
sharp_init_image_from_buffer(VipsImage **image, void *buffer, size_t const length, VipsAccess const access) {
ImageType imageType = UNKNOWN;
if (memcmp(MARKER_JPEG, buffer, 2) == 0) {
if (!vips_jpegload_buffer(buffer, length, image, "access", access, NULL)) {
imageType = JPEG;
}
} else if(memcmp(MARKER_PNG, buffer, 2) == 0) {
if (!vips_pngload_buffer(buffer, length, image, "access", access, NULL)) {
imageType = PNG;
}
} else if(memcmp(MARKER_WEBP, buffer, 2) == 0) {
if (!vips_webpload_buffer(buffer, length, image, "access", access, NULL)) {
imageType = WEBP;
}
// Filename extension checkers
static bool EndsWith(std::string const &str, std::string const &end) {
return str.length() >= end.length() && 0 == str.compare(str.length() - end.length(), end.length(), end);
}
return imageType;
}
/*
Initialise a VipsImage from a file.
Returns the ImageType detected, if any.
*/
ImageType
sharp_init_image_from_file(VipsImage **image, char const *file, VipsAccess const access) {
ImageType imageType = UNKNOWN;
if (vips_foreign_is_a("jpegload", file)) {
if (!vips_jpegload(file, image, "access", access, NULL)) {
imageType = JPEG;
}
} else if (vips_foreign_is_a("pngload", file)) {
if (!vips_pngload(file, image, "access", access, NULL)) {
imageType = PNG;
}
} else if (vips_foreign_is_a("webpload", file)) {
if (!vips_webpload(file, image, "access", access, NULL)) {
imageType = WEBP;
}
} else if (vips_foreign_is_a("tiffload", file)) {
if (!vips_tiffload(file, image, "access", access, NULL)) {
imageType = TIFF;
}
} else if(vips_foreign_is_a("magickload", file)) {
if (!vips_magickload(file, image, "access", access, NULL)) {
imageType = MAGICK;
}
bool IsJpeg(std::string const &str) {
return EndsWith(str, ".jpg") || EndsWith(str, ".jpeg") || EndsWith(str, ".JPG") || EndsWith(str, ".JPEG");
}
bool IsPng(std::string const &str) {
return EndsWith(str, ".png") || EndsWith(str, ".PNG");
}
bool IsWebp(std::string const &str) {
return EndsWith(str, ".webp") || EndsWith(str, ".WEBP");
}
bool IsTiff(std::string const &str) {
return EndsWith(str, ".tif") || EndsWith(str, ".tiff") || EndsWith(str, ".TIF") || EndsWith(str, ".TIFF");
}
return imageType;
}
/*
Does this image have an alpha channel?
Uses colour space interpretation with number of channels to guess this.
*/
bool
sharp_image_has_alpha(VipsImage *image) {
return (
(image->Bands == 2 && image->Type == VIPS_INTERPRETATION_B_W) ||
(image->Bands == 4 && image->Type != VIPS_INTERPRETATION_CMYK) ||
(image->Bands == 5 && image->Type == VIPS_INTERPRETATION_CMYK)
);
}
// Buffer content checkers
unsigned char const MARKER_JPEG[] = {0xff, 0xd8};
unsigned char const MARKER_PNG[] = {0x89, 0x50};
unsigned char const MARKER_WEBP[] = {0x52, 0x49};
static bool buffer_is_tiff(char *buffer, size_t len) {
return (
len >= 4 && (
(buffer[0] == 'M' && buffer[1] == 'M' && buffer[2] == '\0' && (buffer[3] == '*' || buffer[3] == '+')) ||
(buffer[0] == 'I' && buffer[1] == 'I' && (buffer[2] == '*' || buffer[2] == '+') && buffer[3] == '\0')
)
);
}
/*
Determine image format of a buffer.
*/
ImageType DetermineImageType(void *buffer, size_t const length) {
ImageType imageType = ImageType::UNKNOWN;
if (length >= 4) {
if (memcmp(MARKER_JPEG, buffer, 2) == 0) {
imageType = ImageType::JPEG;
} else if (memcmp(MARKER_PNG, buffer, 2) == 0) {
imageType = ImageType::PNG;
} else if (memcmp(MARKER_WEBP, buffer, 2) == 0) {
imageType = ImageType::WEBP;
} else if (buffer_is_tiff(static_cast<char*>(buffer), length)) {
imageType = ImageType::TIFF;
}
}
return imageType;
}
/*
Initialise and return a VipsImage from a buffer. Supports JPEG, PNG, WebP and TIFF.
*/
VipsImage* InitImage(ImageType imageType, void *buffer, size_t const length, VipsAccess const access) {
VipsImage *image = NULL;
if (imageType == ImageType::JPEG) {
vips_jpegload_buffer(buffer, length, &image, "access", access, NULL);
} else if (imageType == ImageType::PNG) {
vips_pngload_buffer(buffer, length, &image, "access", access, NULL);
} else if (imageType == ImageType::WEBP) {
vips_webpload_buffer(buffer, length, &image, "access", access, NULL);
} else if (imageType == ImageType::TIFF) {
vips_tiffload_buffer(buffer, length, &image, "access", access, NULL);
}
return image;
}
/*
Inpect the first 2-4 bytes of a file to determine image format
*/
ImageType DetermineImageType(char const *file) {
ImageType imageType = ImageType::UNKNOWN;
if (vips_foreign_is_a("jpegload", file)) {
imageType = ImageType::JPEG;
} else if (vips_foreign_is_a("pngload", file)) {
imageType = ImageType::PNG;
} else if (vips_foreign_is_a("webpload", file)) {
imageType = ImageType::WEBP;
} else if (vips_foreign_is_a("tiffload", file)) {
imageType = ImageType::TIFF;
} else if(vips_foreign_is_a("magickload", file)) {
imageType = ImageType::MAGICK;
}
return imageType;
}
/*
Initialise and return a VipsImage from a file.
*/
VipsImage* InitImage(ImageType imageType, char const *file, VipsAccess const access) {
VipsImage *image = NULL;
if (imageType == ImageType::JPEG) {
vips_jpegload(file, &image, "access", access, NULL);
} else if (imageType == ImageType::PNG) {
vips_pngload(file, &image, "access", access, NULL);
} else if (imageType == ImageType::WEBP) {
vips_webpload(file, &image, "access", access, NULL);
} else if (imageType == ImageType::TIFF) {
vips_tiffload(file, &image, "access", access, NULL);
} else if (imageType == ImageType::MAGICK) {
vips_magickload(file, &image, "access", access, NULL);
}
return image;
}
/*
Does this image have an alpha channel?
Uses colour space interpretation with number of channels to guess this.
*/
bool HasAlpha(VipsImage *image) {
return (
(image->Bands == 2 && image->Type == VIPS_INTERPRETATION_B_W) ||
(image->Bands == 4 && image->Type != VIPS_INTERPRETATION_CMYK) ||
(image->Bands == 5 && image->Type == VIPS_INTERPRETATION_CMYK)
);
}
/*
Get EXIF Orientation of image, if any.
*/
int ExifOrientation(VipsImage const *image) {
int orientation = 0;
const char *exif;
if (
vips_image_get_typeof(image, "exif-ifd0-Orientation") != 0 &&
!vips_image_get_string(image, "exif-ifd0-Orientation", &exif)
) {
orientation = atoi(&exif[0]);
}
return orientation;
}
/*
Returns the window size for the named interpolator. For example,
a window size of 3 means a 3x3 pixel grid is used for the calculation.
*/
int InterpolatorWindowSize(char const *name) {
VipsInterpolate *interpolator = vips_interpolate_new(name);
int window_size = vips_interpolate_get_window_size(interpolator);
g_object_unref(interpolator);
return window_size;
}
} // namespace

View File

@@ -1,46 +1,66 @@
#ifndef SHARP_COMMON_H
#define SHARP_COMMON_H
typedef enum {
UNKNOWN,
JPEG,
PNG,
WEBP,
TIFF,
MAGICK
} ImageType;
namespace sharp {
// Filename extension checkers
bool is_jpeg(std::string const &str);
bool is_png(std::string const &str);
bool is_webp(std::string const &str);
bool is_tiff(std::string const &str);
enum class ImageType {
UNKNOWN,
JPEG,
PNG,
WEBP,
TIFF,
MAGICK
};
// How many tasks are in the queue?
extern volatile int counter_queue;
// How many tasks are in the queue?
extern volatile int counterQueue;
// How many tasks are being processed?
extern volatile int counter_process;
// How many tasks are being processed?
extern volatile int counterProcess;
/*
Initialise a VipsImage from a buffer. Supports JPEG, PNG and WebP.
Returns the ImageType detected, if any.
*/
ImageType
sharp_init_image_from_buffer(VipsImage **image, void *buffer, size_t const length, VipsAccess const access);
// Filename extension checkers
bool IsJpeg(std::string const &str);
bool IsPng(std::string const &str);
bool IsWebp(std::string const &str);
bool IsTiff(std::string const &str);
/*
Initialise a VipsImage from a file.
Returns the ImageType detected, if any.
*/
ImageType
sharp_init_image_from_file(VipsImage **image, char const *file, VipsAccess const access);
/*
Determine image format of a buffer.
*/
ImageType DetermineImageType(void *buffer, size_t const length);
/*
Does this image have an alpha channel?
Uses colour space interpretation with number of channels to guess this.
*/
bool
sharp_image_has_alpha(VipsImage *image);
/*
Determine image format of a file.
*/
ImageType DetermineImageType(char const *file);
/*
Initialise and return a VipsImage from a buffer. Supports JPEG, PNG, WebP and TIFF.
*/
VipsImage* InitImage(ImageType imageType, void *buffer, size_t const length, VipsAccess const access);
/*
Initialise and return a VipsImage from a file.
*/
VipsImage* InitImage(ImageType imageType, char const *file, VipsAccess const access);
/*
Does this image have an alpha channel?
Uses colour space interpretation with number of channels to guess this.
*/
bool HasAlpha(VipsImage *image);
/*
Get EXIF Orientation of image, if any.
*/
int ExifOrientation(VipsImage const *image);
/*
Returns the window size for the named interpolator. For example,
a window size of 3 means a 3x3 pixel grid is used for the calculation.
*/
int InterpolatorWindowSize(char const *name);
} // namespace
#endif

View File

@@ -7,6 +7,7 @@
#include "metadata.h"
using namespace v8;
using namespace sharp;
struct MetadataBaton {
// Input
@@ -36,49 +37,49 @@ class MetadataWorker : public NanAsyncWorker {
void Execute() {
// Decrement queued task counter
g_atomic_int_dec_and_test(&counter_queue);
g_atomic_int_dec_and_test(&counterQueue);
ImageType imageType = UNKNOWN;
VipsImage *image;
ImageType imageType = ImageType::UNKNOWN;
VipsImage *image = NULL;
if (baton->bufferInLength > 1) {
// From buffer
imageType = sharp_init_image_from_buffer(&image, baton->bufferIn, baton->bufferInLength, VIPS_ACCESS_RANDOM);
if (imageType == UNKNOWN) {
imageType = DetermineImageType(baton->bufferIn, baton->bufferInLength);
if (imageType != ImageType::UNKNOWN) {
image = InitImage(imageType, baton->bufferIn, baton->bufferInLength, VIPS_ACCESS_RANDOM);
} else {
(baton->err).append("Input buffer contains unsupported image format");
}
} else {
// From file
imageType = sharp_init_image_from_file(&image, baton->fileIn.c_str(), VIPS_ACCESS_RANDOM);
if (imageType == UNKNOWN) {
imageType = DetermineImageType(baton->fileIn.c_str());
if (imageType != ImageType::UNKNOWN) {
image = InitImage(imageType, baton->fileIn.c_str(), VIPS_ACCESS_RANDOM);
} else {
(baton->err).append("File is of an unsupported image format");
}
}
if (imageType != UNKNOWN) {
if (image != NULL && imageType != ImageType::UNKNOWN) {
// Image type
switch (imageType) {
case JPEG: baton->format = "jpeg"; break;
case PNG: baton->format = "png"; break;
case WEBP: baton->format = "webp"; break;
case TIFF: baton->format = "tiff"; break;
case MAGICK: baton->format = "magick"; break;
case UNKNOWN: default: baton->format = "";
case ImageType::JPEG: baton->format = "jpeg"; break;
case ImageType::PNG: baton->format = "png"; break;
case ImageType::WEBP: baton->format = "webp"; break;
case ImageType::TIFF: baton->format = "tiff"; break;
case ImageType::MAGICK: baton->format = "magick"; break;
case ImageType::UNKNOWN: break;
}
// VipsImage attributes
baton->width = image->Xsize;
baton->height = image->Ysize;
baton->space = vips_enum_nick(VIPS_TYPE_INTERPRETATION, image->Type);
baton->channels = image->Bands;
baton->hasAlpha = sharp_image_has_alpha(image);
// EXIF Orientation
const char *exif;
if (!vips_image_get_string(image, "exif-ifd0-Orientation", &exif)) {
baton->orientation = atoi(&exif[0]);
}
}
// Clean up
if (imageType != UNKNOWN) {
// Derived attributes
baton->hasAlpha = HasAlpha(image);
baton->orientation = ExifOrientation(image);
// Drop image reference
g_object_unref(image);
}
// Clean up
vips_error_clear();
vips_thread_shutdown();
}
@@ -138,7 +139,7 @@ NAN_METHOD(metadata) {
NanAsyncQueueWorker(new MetadataWorker(callback, baton));
// Increment queued task counter
g_atomic_int_inc(&counter_queue);
g_atomic_int_inc(&counterQueue);
NanReturnUndefined();
}

View File

@@ -11,20 +11,21 @@
#include "resize.h"
using namespace v8;
using namespace sharp;
typedef enum {
enum class Canvas {
CROP,
MAX,
EMBED
} Canvas;
};
typedef enum {
ANGLE_0,
ANGLE_90,
ANGLE_180,
ANGLE_270,
ANGLE_LAST
} Angle;
enum class Angle {
D0,
D90,
D180,
D270,
DLAST
};
struct ResizeBaton {
std::string fileIn;
@@ -50,7 +51,10 @@ struct ResizeBaton {
std::string interpolator;
double background[4];
bool flatten;
bool sharpen;
int blurRadius;
int sharpenRadius;
double sharpenFlat;
double sharpenJagged;
double gamma;
bool greyscale;
int angle;
@@ -61,6 +65,7 @@ struct ResizeBaton {
VipsAccess accessMethod;
int quality;
int compressionLevel;
bool withoutAdaptiveFiltering;
std::string err;
bool withMetadata;
@@ -70,16 +75,23 @@ struct ResizeBaton {
bufferOutLength(0),
topOffsetPre(-1),
topOffsetPost(-1),
canvas(CROP),
canvas(Canvas::CROP),
gravity(0),
flatten(false),
sharpen(false),
blurRadius(0),
sharpenRadius(0),
sharpenFlat(1.0),
sharpenJagged(2.0),
gamma(0.0),
greyscale(false),
angle(0),
flip(false),
flop(false),
progressive(false),
withoutEnlargement(false),
quality(80),
compressionLevel(6),
withoutAdaptiveFiltering(false),
withMetadata(false) {
background[0] = 0.0;
background[1] = 0.0;
@@ -99,43 +111,45 @@ class ResizeWorker : public NanAsyncWorker {
*/
void Execute() {
// Decrement queued task counter
g_atomic_int_dec_and_test(&counter_queue);
g_atomic_int_dec_and_test(&counterQueue);
// Increment processing task counter
g_atomic_int_inc(&counter_process);
g_atomic_int_inc(&counterProcess);
// Hang image references from this hook object
VipsObject *hook = reinterpret_cast<VipsObject*>(vips_image_new());
// Input
ImageType inputImageType = UNKNOWN;
VipsImage *image = vips_image_new();
vips_object_local(hook, image);
ImageType inputImageType = ImageType::UNKNOWN;
VipsImage *image;
if (baton->bufferInLength > 1) {
// From buffer
inputImageType = sharp_init_image_from_buffer(&image, baton->bufferIn, baton->bufferInLength, baton->accessMethod);
if (inputImageType == UNKNOWN) {
inputImageType = DetermineImageType(baton->bufferIn, baton->bufferInLength);
if (inputImageType != ImageType::UNKNOWN) {
image = InitImage(inputImageType, baton->bufferIn, baton->bufferInLength, baton->accessMethod);
} else {
(baton->err).append("Input buffer contains unsupported image format");
}
} else {
// From file
inputImageType = sharp_init_image_from_file(&image, baton->fileIn.c_str(), baton->accessMethod);
if (inputImageType == UNKNOWN) {
inputImageType = DetermineImageType(baton->fileIn.c_str());
if (inputImageType != ImageType::UNKNOWN) {
image = InitImage(inputImageType, baton->fileIn.c_str(), baton->accessMethod);
} else {
(baton->err).append("File is of an unsupported image format");
}
}
if (inputImageType == UNKNOWN) {
if (inputImageType == ImageType::UNKNOWN) {
return Error(baton, hook);
}
vips_object_local(hook, image);
// Pre extraction
if (baton->topOffsetPre != -1) {
VipsImage *extractedPre = vips_image_new();
vips_object_local(hook, extractedPre);
VipsImage *extractedPre;
if (vips_extract_area(image, &extractedPre, baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, extractedPre);
image = extractedPre;
}
@@ -147,7 +161,7 @@ class ResizeWorker : public NanAsyncWorker {
Angle rotation;
bool flip;
std::tie(rotation, flip) = CalculateRotationAndFlip(baton->angle, image);
if (rotation == ANGLE_90 || rotation == ANGLE_270) {
if (rotation == Angle::D90 || rotation == Angle::D270) {
// Swap input output width and height when rotating by 90 or 270 degrees
int swap = inputWidth;
inputWidth = inputHeight;
@@ -158,15 +172,18 @@ class ResizeWorker : public NanAsyncWorker {
baton->flip = TRUE;
}
// Get window size of interpolator, used for determining shrink vs affine
int interpolatorWindowSize = InterpolatorWindowSize(baton->interpolator.c_str());
// Scaling calculations
double factor;
if (baton->width > 0 && baton->height > 0) {
// Fixed width and height
double xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width);
double yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height);
factor = (baton->canvas == CROP) ? std::min(xfactor, yfactor) : std::max(xfactor, yfactor);
factor = (baton->canvas == Canvas::CROP) ? std::min(xfactor, yfactor) : std::max(xfactor, yfactor);
// if max is set, we need to compute the real size of the thumb image
if (baton->canvas == MAX) {
if (baton->canvas == Canvas::MAX) {
if (xfactor > yfactor) {
baton->height = round(static_cast<double>(inputHeight) / xfactor);
} else {
@@ -187,10 +204,20 @@ class ResizeWorker : public NanAsyncWorker {
baton->width = inputWidth;
baton->height = inputHeight;
}
int shrink = floor(factor);
// Calculate integral box shrink
int shrink = 1;
if (factor >= 2 && interpolatorWindowSize > 3) {
// Shrink less, affine more with interpolators that use at least 4x4 pixel window, e.g. bicubic
shrink = floor(factor * 3.0 / interpolatorWindowSize);
} else {
shrink = floor(factor);
}
if (shrink < 1) {
shrink = 1;
}
// Calculate residual float affine transformation
double residual = static_cast<double>(shrink) / factor;
// Do not enlarge the output if the input width *or* height are already less than the required dimensions
@@ -206,7 +233,7 @@ class ResizeWorker : public NanAsyncWorker {
// Try to use libjpeg shrink-on-load, but not when applying gamma correction or pre-resize extract
int shrink_on_load = 1;
if (inputImageType == JPEG && baton->gamma == 0 && baton->topOffsetPre == -1) {
if (inputImageType == ImageType::JPEG && shrink >= 2 && baton->gamma == 0 && baton->topOffsetPre == -1) {
if (shrink >= 8) {
factor = factor / 8;
shrink_on_load = 8;
@@ -221,55 +248,58 @@ class ResizeWorker : public NanAsyncWorker {
if (shrink_on_load > 1) {
// Recalculate integral shrink and double residual
factor = std::max(factor, 1.0);
shrink = floor(factor);
if (factor >= 2 && interpolatorWindowSize > 3) {
shrink = floor(factor * 3.0 / interpolatorWindowSize);
} else {
shrink = floor(factor);
}
residual = static_cast<double>(shrink) / factor;
// Reload input using shrink-on-load
g_object_unref(image);
VipsImage *shrunkOnLoad;
if (baton->bufferInLength > 1) {
if (vips_jpegload_buffer(baton->bufferIn, baton->bufferInLength, &image, "shrink", shrink_on_load, NULL)) {
if (vips_jpegload_buffer(baton->bufferIn, baton->bufferInLength, &shrunkOnLoad, "shrink", shrink_on_load, NULL)) {
return Error(baton, hook);
}
} else {
if (vips_jpegload((baton->fileIn).c_str(), &image, "shrink", shrink_on_load, NULL)) {
if (vips_jpegload((baton->fileIn).c_str(), &shrunkOnLoad, "shrink", shrink_on_load, NULL)) {
return Error(baton, hook);
}
}
vips_object_local(hook, shrunkOnLoad);
image = shrunkOnLoad;
}
// Handle colour profile, if any, for non sRGB images
if (image->Type != VIPS_INTERPRETATION_sRGB) {
// Get the input colour profile
if (vips_image_get_typeof(image, VIPS_META_ICC_NAME)) {
VipsImage *profile;
// Use embedded profile
VipsImage *profile = vips_image_new();
vips_object_local(hook, profile);
if (vips_icc_import(image, &profile, "pcs", VIPS_PCS_XYZ, "embedded", TRUE, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, profile);
image = profile;
} else if (image->Type == VIPS_INTERPRETATION_CMYK) {
VipsImage *profile;
// CMYK with no embedded profile
VipsImage *profile = vips_image_new();
vips_object_local(hook, profile);
if (vips_icc_import(image, &profile, "pcs", VIPS_PCS_XYZ, "input_profile", (baton->iccProfileCmyk).c_str(), NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, profile);
image = profile;
}
// Attempt to convert to sRGB colour space
VipsImage *colourspaced = vips_image_new();
vips_object_local(hook, colourspaced);
VipsImage *colourspaced;
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, colourspaced);
image = colourspaced;
}
// Flatten image to remove alpha channel
if (baton->flatten && sharp_image_has_alpha(image)) {
if (baton->flatten && HasAlpha(image)) {
// Background colour
VipsArrayDouble *background = vips_array_double_newv(
3, // Ignore alpha channel as we're about to remove it
@@ -277,52 +307,48 @@ class ResizeWorker : public NanAsyncWorker {
baton->background[1],
baton->background[2]
);
VipsImage *flattened = vips_image_new();
vips_object_local(hook, flattened);
VipsImage *flattened;
if (vips_flatten(image, &flattened, "background", background, NULL)) {
vips_area_unref(reinterpret_cast<VipsArea*>(background));
return Error(baton, hook);
};
vips_area_unref(reinterpret_cast<VipsArea*>(background));
g_object_unref(image);
vips_object_local(hook, flattened);
image = flattened;
}
// Gamma encoding (darken)
if (baton->gamma >= 1 && baton->gamma <= 3) {
VipsImage *gammaEncoded = vips_image_new();
vips_object_local(hook, gammaEncoded);
VipsImage *gammaEncoded;
if (vips_gamma(image, &gammaEncoded, "exponent", 1.0 / baton->gamma, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, gammaEncoded);
image = gammaEncoded;
}
// Convert to greyscale (linear, therefore after gamma encoding, if any)
if (baton->greyscale) {
VipsImage *greyscale = vips_image_new();
vips_object_local(hook, greyscale);
VipsImage *greyscale;
if (vips_colourspace(image, &greyscale, VIPS_INTERPRETATION_B_W, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, greyscale);
image = greyscale;
}
if (shrink > 1) {
VipsImage *shrunk = vips_image_new();
vips_object_local(hook, shrunk);
VipsImage *shrunk;
// Use vips_shrink with the integral reduction
if (vips_shrink(image, &shrunk, shrink, shrink, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, shrunk);
image = shrunk;
// Recalculate residual float based on dimensions of required vs shrunk images
double shrunkWidth = shrunk->Xsize;
double shrunkHeight = shrunk->Ysize;
if (rotation == ANGLE_90 || rotation == ANGLE_270) {
if (rotation == Angle::D90 || rotation == Angle::D270) {
// Swap input output width and height when rotating by 90 or 270 degrees
int swap = shrunkWidth;
shrunkWidth = shrunkHeight;
@@ -330,7 +356,7 @@ class ResizeWorker : public NanAsyncWorker {
}
double residualx = static_cast<double>(baton->width) / static_cast<double>(shrunkWidth);
double residualy = static_cast<double>(baton->height) / static_cast<double>(shrunkHeight);
if (baton->canvas == EMBED) {
if (baton->canvas == Canvas::EMBED) {
residual = std::min(residualx, residualy);
} else {
residual = std::max(residualx, residualy);
@@ -339,90 +365,90 @@ class ResizeWorker : public NanAsyncWorker {
// Use vips_affine with the remaining float part
if (residual != 0) {
VipsImage *affined = vips_image_new();
vips_object_local(hook, affined);
// Apply variable blur radius of floor(residual) before large affine reductions
if (residual >= 1) {
VipsImage *blurred;
if (vips_gaussblur(image, &blurred, floor(residual), NULL)) {
return Error(baton, hook);
}
vips_object_local(hook, blurred);
image = blurred;
}
// Create interpolator - "bilinear" (default), "bicubic" or "nohalo"
VipsInterpolate *interpolator = vips_interpolate_new(baton->interpolator.c_str());
vips_object_local(hook, interpolator);
// Perform affine transformation
VipsImage *affined;
if (vips_affine(image, &affined, residual, 0, 0, residual, "interpolate", interpolator, NULL)) {
g_object_unref(interpolator);
return Error(baton, hook);
}
g_object_unref(interpolator);
g_object_unref(image);
vips_object_local(hook, affined);
image = affined;
}
// Rotate
if (rotation != ANGLE_0) {
VipsImage *rotated = vips_image_new();
vips_object_local(hook, rotated);
if (rotation != Angle::D0) {
VipsImage *rotated;
if (vips_rot(image, &rotated, static_cast<VipsAngle>(rotation), NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, rotated);
image = rotated;
}
// Flip (mirror about Y axis)
if (baton->flip) {
VipsImage *flipped = vips_image_new();
vips_object_local(hook, flipped);
VipsImage *flipped;
if (vips_flip(image, &flipped, VIPS_DIRECTION_VERTICAL, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, flipped);
image = flipped;
}
// Flop (mirror about X axis)
if (baton->flop) {
VipsImage *flopped = vips_image_new();
vips_object_local(hook, flopped);
VipsImage *flopped;
if (vips_flip(image, &flopped, VIPS_DIRECTION_HORIZONTAL, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, flopped);
image = flopped;
}
// Crop/embed
if (image->Xsize != baton->width || image->Ysize != baton->height) {
if (baton->canvas == EMBED) {
if (baton->canvas == Canvas::EMBED) {
// Match background colour space, namely sRGB
if (image->Type != VIPS_INTERPRETATION_sRGB) {
// Convert to sRGB colour space
VipsImage *colourspaced = vips_image_new();
vips_object_local(hook, colourspaced);
VipsImage *colourspaced;
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, colourspaced);
image = colourspaced;
}
// Add non-transparent alpha channel, if required
if (baton->background[3] < 255.0 && !sharp_image_has_alpha(image)) {
if (baton->background[3] < 255.0 && !HasAlpha(image)) {
// Create single-channel transparency
VipsImage *black = vips_image_new();
vips_object_local(hook, black);
VipsImage *black;
if (vips_black(&black, image->Xsize, image->Ysize, "bands", 1, NULL)) {
return Error(baton, hook);
}
vips_object_local(hook, black);
// Invert to become non-transparent
VipsImage *alpha = vips_image_new();
vips_object_local(hook, alpha);
VipsImage *alpha;
if (vips_invert(black, &alpha, NULL)) {
return Error(baton, hook);
}
g_object_unref(black);
vips_object_local(hook, alpha);
// Append alpha channel to existing image
VipsImage *joined = vips_image_new();
vips_object_local(hook, joined);
VipsImage *joined;
if (vips_bandjoin2(image, alpha, &joined, NULL)) {
return Error(baton, hook);
}
g_object_unref(alpha);
g_object_unref(image);
vips_object_local(hook, joined);
image = joined;
}
// Create background
@@ -439,8 +465,7 @@ class ResizeWorker : public NanAsyncWorker {
// Embed
int left = (baton->width - image->Xsize) / 2;
int top = (baton->height - image->Ysize) / 2;
VipsImage *embedded = vips_image_new();
vips_object_local(hook, embedded);
VipsImage *embedded;
if (vips_embed(image, &embedded, left, top, baton->width, baton->height,
"extend", VIPS_EXTEND_BACKGROUND, "background", background, NULL
)) {
@@ -448,7 +473,7 @@ class ResizeWorker : public NanAsyncWorker {
return Error(baton, hook);
}
vips_area_unref(reinterpret_cast<VipsArea*>(background));
g_object_unref(image);
vips_object_local(hook, embedded);
image = embedded;
} else {
// Crop/max
@@ -457,93 +482,131 @@ class ResizeWorker : public NanAsyncWorker {
std::tie(left, top) = CalculateCrop(image->Xsize, image->Ysize, baton->width, baton->height, baton->gravity);
int width = std::min(image->Xsize, baton->width);
int height = std::min(image->Ysize, baton->height);
VipsImage *extracted = vips_image_new();
vips_object_local(hook, extracted);
VipsImage *extracted;
if (vips_extract_area(image, &extracted, left, top, width, height, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, extracted);
image = extracted;
}
}
// Post extraction
if (baton->topOffsetPost != -1) {
VipsImage *extractedPost = vips_image_new();
vips_object_local(hook, extractedPost);
VipsImage *extractedPost;
if (vips_extract_area(image, &extractedPost, baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, extractedPost);
image = extractedPost;
}
// Mild sharpen
if (baton->sharpen) {
VipsImage *sharpened = vips_image_new();
vips_object_local(hook, sharpened);
VipsImage *sharpen = vips_image_new_matrixv(3, 3,
-1.0, -1.0, -1.0,
-1.0, 32.0, -1.0,
-1.0, -1.0, -1.0);
vips_image_set_double(sharpen, "scale", 24);
vips_object_local(hook, sharpen);
if (vips_conv(image, &sharpened, sharpen, NULL)) {
return Error(baton, hook);
// Blur
if (baton->blurRadius != 0) {
VipsImage *blurred;
if (baton->blurRadius == -1) {
// Fast, mild blur
VipsImage *blur = vips_image_new_matrixv(3, 3,
1.0, 1.0, 1.0,
1.0, 1.0, 1.0,
1.0, 1.0, 1.0);
vips_image_set_double(blur, "scale", 9);
vips_object_local(hook, blur);
if (vips_conv(image, &blurred, blur, NULL)) {
return Error(baton, hook);
}
} else {
// Slower, accurate Gaussian blur
if (vips_gaussblur(image, &blurred, baton->blurRadius, NULL)) {
return Error(baton, hook);
}
}
g_object_unref(image);
vips_object_local(hook, blurred);
image = blurred;
}
// Sharpen
if (baton->sharpenRadius != 0) {
VipsImage *sharpened;
if (baton->sharpenRadius == -1) {
// Fast, mild sharpen
VipsImage *sharpen = vips_image_new_matrixv(3, 3,
-1.0, -1.0, -1.0,
-1.0, 32.0, -1.0,
-1.0, -1.0, -1.0);
vips_image_set_double(sharpen, "scale", 24);
vips_object_local(hook, sharpen);
if (vips_conv(image, &sharpened, sharpen, NULL)) {
return Error(baton, hook);
}
} else {
// Slow, accurate sharpen in LAB colour space, with control over flat vs jagged areas
if (vips_sharpen(image, &sharpened, "radius", baton->sharpenRadius, "m1", baton->sharpenFlat, "m2", baton->sharpenJagged, NULL)) {
return Error(baton, hook);
}
}
vips_object_local(hook, sharpened);
image = sharpened;
}
// Gamma decoding (brighten)
if (baton->gamma >= 1 && baton->gamma <= 3) {
VipsImage *gammaDecoded = vips_image_new();
vips_object_local(hook, gammaDecoded);
VipsImage *gammaDecoded;
if (vips_gamma(image, &gammaDecoded, "exponent", baton->gamma, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, gammaDecoded);
image = gammaDecoded;
}
// Convert to sRGB colour space, if not already
if (image->Type != VIPS_INTERPRETATION_sRGB) {
VipsImage *colourspaced = vips_image_new();
vips_object_local(hook, colourspaced);
VipsImage *colourspaced;
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, colourspaced);
image = colourspaced;
}
// Generate image tile cache when interlace output is required
#if !(VIPS_MAJOR_VERSION >= 7 && VIPS_MINOR_VERSION >= 40 && VIPS_MINOR_VERSION >= 5)
// Generate image tile cache when interlace output is required - no longer required as of libvips 7.40.5+
if (baton->progressive) {
VipsImage *cached = vips_image_new();
vips_object_local(hook, cached);
VipsImage *cached;
if (vips_tilecache(image, &cached, "threaded", TRUE, "persistent", TRUE, "max_tiles", -1, NULL)) {
return Error(baton, hook);
}
g_object_unref(image);
vips_object_local(hook, cached);
image = cached;
}
#endif
// Output
if (baton->output == "__jpeg" || (baton->output == "__input" && inputImageType == JPEG)) {
if (baton->output == "__jpeg" || (baton->output == "__input" && inputImageType == ImageType::JPEG)) {
// Write JPEG to buffer
if (vips_jpegsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
return Error(baton, hook);
}
baton->outputFormat = "jpeg";
} else if (baton->output == "__png" || (baton->output == "__input" && inputImageType == PNG)) {
} else if (baton->output == "__png" || (baton->output == "__input" && inputImageType == ImageType::PNG)) {
#if (VIPS_MAJOR_VERSION >= 7 && VIPS_MINOR_VERSION >= 41)
// Select PNG row filter
int filter = baton->withoutAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_NONE : VIPS_FOREIGN_PNG_FILTER_ALL;
// Write PNG to buffer
if (vips_pngsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, "filter", filter, NULL)) {
return Error(baton, hook);
}
#else
// Write PNG to buffer
if (vips_pngsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) {
return Error(baton, hook);
}
#endif
baton->outputFormat = "png";
} else if (baton->output == "__webp" || (baton->output == "__input" && inputImageType == WEBP)) {
} else if (baton->output == "__webp" || (baton->output == "__input" && inputImageType == ImageType::WEBP)) {
// Write WEBP to buffer
if (vips_webpsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
"Q", baton->quality, NULL)) {
@@ -551,33 +614,43 @@ class ResizeWorker : public NanAsyncWorker {
}
baton->outputFormat = "webp";
} else {
bool output_jpeg = is_jpeg(baton->output);
bool output_png = is_png(baton->output);
bool output_webp = is_webp(baton->output);
bool output_tiff = is_tiff(baton->output);
bool match_input = !(output_jpeg || output_png || output_webp || output_tiff);
if (output_jpeg || (match_input && inputImageType == JPEG)) {
bool outputJpeg = IsJpeg(baton->output);
bool outputPng = IsPng(baton->output);
bool outputWebp = IsWebp(baton->output);
bool outputTiff = IsTiff(baton->output);
bool matchInput = !(outputJpeg || outputPng || outputWebp || outputTiff);
if (outputJpeg || (matchInput && inputImageType == ImageType::JPEG)) {
// Write JPEG to file
if (vips_jpegsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
return Error(baton, hook);
}
baton->outputFormat = "jpeg";
} else if (output_png || (match_input && inputImageType == PNG)) {
} else if (outputPng || (matchInput && inputImageType == ImageType::PNG)) {
#if (VIPS_MAJOR_VERSION >= 7 && VIPS_MINOR_VERSION >= 41)
// Select PNG row filter
int filter = baton->withoutAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_NONE : VIPS_FOREIGN_PNG_FILTER_ALL;
// Write PNG to file
if (vips_pngsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, "filter", filter, NULL)) {
return Error(baton, hook);
}
#else
// Write PNG to file
if (vips_pngsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) {
return Error(baton, hook);
}
#endif
baton->outputFormat = "png";
} else if (output_webp || (match_input && inputImageType == WEBP)) {
} else if (outputWebp || (matchInput && inputImageType == ImageType::WEBP)) {
// Write WEBP to file
if (vips_webpsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"Q", baton->quality, NULL)) {
return Error(baton, hook);
}
baton->outputFormat = "webp";
} else if (output_tiff || (match_input && inputImageType == TIFF)) {
} else if (outputTiff || (matchInput && inputImageType == ImageType::TIFF)) {
// Write TIFF to file
if (vips_tiffsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
"compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG, "Q", baton->quality, NULL)) {
@@ -586,12 +659,10 @@ class ResizeWorker : public NanAsyncWorker {
baton->outputFormat = "tiff";
} else {
(baton->err).append("Unsupported output " + baton->output);
g_object_unref(image);
return Error(baton, hook);
}
}
// Clean up any dangling image references
g_object_unref(image);
g_object_unref(hook);
// Clean up libvips' per-request data and threads
vips_error_clear();
@@ -635,7 +706,7 @@ class ResizeWorker : public NanAsyncWorker {
delete baton;
// Decrement processing task counter
g_atomic_int_dec_and_test(&counter_process);
g_atomic_int_dec_and_test(&counterProcess);
// Return to JavaScript
callback->Call(3, argv);
@@ -653,40 +724,25 @@ class ResizeWorker : public NanAsyncWorker {
*/
std::tuple<Angle, bool>
CalculateRotationAndFlip(int const angle, VipsImage const *input) {
Angle rotate = ANGLE_0;
Angle rotate = Angle::D0;
bool flip = FALSE;
if (angle == -1) {
const char *exif;
if (
vips_image_get_typeof(input, "exif-ifd0-Orientation") != 0 &&
!vips_image_get_string(input, "exif-ifd0-Orientation", &exif)
) {
if (exif[0] == 0x36) { // "6"
rotate = ANGLE_90;
} else if (exif[0] == 0x33) { // "3"
rotate = ANGLE_180;
} else if (exif[0] == 0x38) { // "8"
rotate = ANGLE_270;
} else if (exif[0] == 0x32) { // "2" (flip 1)
flip = TRUE;
} else if (exif[0] == 0x37) { // "7" (flip 6)
rotate = ANGLE_90;
flip = TRUE;
} else if (exif[0] == 0x34) { // "4" (flip 3)
rotate = ANGLE_180;
flip = TRUE;
} else if (exif[0] == 0x35) { // "5" (flip 8)
rotate = ANGLE_270;
flip = TRUE;
}
switch(ExifOrientation(input)) {
case 6: rotate = Angle::D90; break;
case 3: rotate = Angle::D180; break;
case 8: rotate = Angle::D270; break;
case 2: flip = TRUE; break; // flip 1
case 7: flip = TRUE; rotate = Angle::D90; break; // flip 6
case 4: flip = TRUE; rotate = Angle::D180; break; // flip 3
case 5: flip = TRUE; rotate = Angle::D270; break; // flip 8
}
} else {
if (angle == 90) {
rotate = ANGLE_90;
rotate = Angle::D90;
} else if (angle == 180) {
rotate = ANGLE_180;
rotate = Angle::D180;
} else if (angle == 270) {
rotate = ANGLE_270;
rotate = Angle::D270;
}
}
return std::make_tuple(rotate, flip);
@@ -728,9 +784,12 @@ class ResizeWorker : public NanAsyncWorker {
Clear all thread-local data.
*/
void Error(ResizeBaton *baton, VipsObject *hook) {
// Get libvips' error message
(baton->err).append(vips_error_buffer());
vips_error_clear();
// Clean up any dangling image references
g_object_unref(hook);
// Clean up libvips' per-request data and threads
vips_error_clear();
vips_thread_shutdown();
}
};
@@ -771,11 +830,11 @@ NAN_METHOD(resize) {
// Canvas option
Local<String> canvas = options->Get(NanNew<String>("canvas"))->ToString();
if (canvas->Equals(NanNew<String>("c"))) {
baton->canvas = CROP;
baton->canvas = Canvas::CROP;
} else if (canvas->Equals(NanNew<String>("m"))) {
baton->canvas = MAX;
baton->canvas = Canvas::MAX;
} else if (canvas->Equals(NanNew<String>("e"))) {
baton->canvas = EMBED;
baton->canvas = Canvas::EMBED;
}
// Background colour
Local<Array> background = Local<Array>::Cast(options->Get(NanNew<String>("background")));
@@ -788,7 +847,10 @@ NAN_METHOD(resize) {
baton->interpolator = *String::Utf8Value(options->Get(NanNew<String>("interpolator"))->ToString());
// Operators
baton->flatten = options->Get(NanNew<String>("flatten"))->BooleanValue();
baton->sharpen = options->Get(NanNew<String>("sharpen"))->BooleanValue();
baton->blurRadius = options->Get(NanNew<String>("blurRadius"))->Int32Value();
baton->sharpenRadius = options->Get(NanNew<String>("sharpenRadius"))->Int32Value();
baton->sharpenFlat = options->Get(NanNew<String>("sharpenFlat"))->NumberValue();
baton->sharpenJagged = options->Get(NanNew<String>("sharpenJagged"))->NumberValue();
baton->gamma = options->Get(NanNew<String>("gamma"))->NumberValue();
baton->greyscale = options->Get(NanNew<String>("greyscale"))->BooleanValue();
baton->angle = options->Get(NanNew<String>("angle"))->Int32Value();
@@ -798,6 +860,7 @@ NAN_METHOD(resize) {
baton->progressive = options->Get(NanNew<String>("progressive"))->BooleanValue();
baton->quality = options->Get(NanNew<String>("quality"))->Int32Value();
baton->compressionLevel = options->Get(NanNew<String>("compressionLevel"))->Int32Value();
baton->withoutAdaptiveFiltering = options->Get(NanNew<String>("withoutAdaptiveFiltering"))->BooleanValue();
baton->withMetadata = options->Get(NanNew<String>("withMetadata"))->BooleanValue();
// Output filename or __format for Buffer
baton->output = *String::Utf8Value(options->Get(NanNew<String>("output"))->ToString());
@@ -807,7 +870,7 @@ NAN_METHOD(resize) {
NanAsyncQueueWorker(new ResizeWorker(callback, baton));
// Increment queued task counter
g_atomic_int_inc(&counter_queue);
g_atomic_int_inc(&counterQueue);
NanReturnUndefined();
}

View File

@@ -33,6 +33,7 @@ extern "C" void init(Handle<Object> target) {
NODE_SET_METHOD(target, "cache", cache);
NODE_SET_METHOD(target, "concurrency", concurrency);
NODE_SET_METHOD(target, "counters", counters);
NODE_SET_METHOD(target, "libvipsVersion", libvipsVersion);
}
NODE_MODULE(sharp, init)

View File

@@ -7,6 +7,7 @@
#include "utilities.h"
using namespace v8;
using namespace sharp;
/*
Get and set cache memory and item limits
@@ -58,7 +59,17 @@ NAN_METHOD(concurrency) {
NAN_METHOD(counters) {
NanScope();
Local<Object> counters = NanNew<Object>();
counters->Set(NanNew<String>("queue"), NanNew<Number>(counter_queue));
counters->Set(NanNew<String>("process"), NanNew<Number>(counter_process));
counters->Set(NanNew<String>("queue"), NanNew<Number>(counterQueue));
counters->Set(NanNew<String>("process"), NanNew<Number>(counterProcess));
NanReturnValue(counters);
}
/*
Get libvips version
*/
NAN_METHOD(libvipsVersion) {
NanScope();
char version[9];
snprintf(version, 9, "%d.%d.%d", vips_version(0), vips_version(1), vips_version(2));
NanReturnValue(NanNew<String>(version));
}

View File

@@ -6,5 +6,6 @@
NAN_METHOD(cache);
NAN_METHOD(concurrency);
NAN_METHOD(counters);
NAN_METHOD(libvipsVersion);
#endif

View File

@@ -9,9 +9,10 @@
},
"devDependencies": {
"imagemagick": "^0.1.3",
"imagemagick-native": "^1.4.0",
"gm": "^1.16.0",
"imagemagick-native": "^1.5.0",
"gm": "^1.17.0",
"async": "^0.9.0",
"semver": "^4.1.0",
"benchmark": "^1.0.0"
},
"license": "Apache 2.0",

View File

@@ -5,6 +5,7 @@ var fs = require('fs');
var async = require('async');
var assert = require('assert');
var Benchmark = require('benchmark');
var semver = require('semver');
var imagemagick = require('imagemagick');
var imagemagickNative = require('imagemagick-native');
@@ -162,7 +163,7 @@ async.series({
deferred.resolve();
});
}
}).add('sharp-sharpen', {
}).add('sharp-sharpen-mild', {
defer: true,
fn: function(deferred) {
sharp(inputJpgBuffer).resize(width, height).sharpen().toBuffer(function(err, buffer) {
@@ -174,6 +175,42 @@ async.series({
}
});
}
}).add('sharp-sharpen-radius', {
defer: true,
fn: function(deferred) {
sharp(inputJpgBuffer).resize(width, height).sharpen(3, 1, 3).toBuffer(function(err, buffer) {
if (err) {
throw err;
} else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
}
}).add('sharp-blur-mild', {
defer: true,
fn: function(deferred) {
sharp(inputJpgBuffer).resize(width, height).blur().toBuffer(function(err, buffer) {
if (err) {
throw err;
} else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
}
}).add('sharp-blur-radius', {
defer: true,
fn: function(deferred) {
sharp(inputJpgBuffer).resize(width, height).blur(3).toBuffer(function(err, buffer) {
if (err) {
throw err;
} else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
}
}).add('sharp-nearest-neighbour', {
defer: true,
fn: function(deferred) {
@@ -314,7 +351,8 @@ async.series({
},
png: function(callback) {
var inputPngBuffer = fs.readFileSync(fixtures.inputPng);
(new Benchmark.Suite('png')).add('imagemagick-file-file', {
var pngSuite = new Benchmark.Suite('png');
pngSuite.add('imagemagick-file-file', {
defer: true,
fn: function(deferred) {
imagemagick.resize({
@@ -422,7 +460,23 @@ async.series({
}
});
}
}).on('cycle', function(event) {
});
if (semver.gte(sharp.libvipsVersion(), '7.41.0')) {
pngSuite.add('sharp-withoutAdaptiveFiltering', {
defer: true,
fn: function(deferred) {
sharp(inputPngBuffer).resize(width, height).withoutAdaptiveFiltering().toBuffer(function(err, buffer) {
if (err) {
throw err;
} else {
assert.notStrictEqual(null, buffer);
deferred.resolve();
}
});
}
});
}
pngSuite.on('cycle', function(event) {
console.log(' png ' + String(event.target));
}).on('complete', function() {
callback(null, this.filter('fastest').pluck('name'));

View File

@@ -5,4 +5,4 @@ fi
curl -O https://raw.githubusercontent.com/jcupitt/libvips/master/libvips.supp test/leak/libvips.supp
cd ../../
G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind --suppressions=test/leak/libvips.supp --suppressions=test/leak/sharp.supp --leak-check=full --show-leak-kinds=definite,indirect,possible --num-callers=20 npm test
G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind --suppressions=test/leak/libvips.supp --suppressions=test/leak/sharp.supp --leak-check=full --show-leak-kinds=definite,indirect,possible --num-callers=20 --trace-children=yes npm test

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Alpha transparency', function() {
it('Flatten to black', function(done) {

100
test/unit/blur.js Executable file
View File

@@ -0,0 +1,100 @@
'use strict';
var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Blur', function() {
it('specific radius 1', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(1)
.toFile(fixtures.path('output.blur-1.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('specific radius 10', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(10)
.toFile(fixtures.path('output.blur-10.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('specific radius 100', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(100)
.toFile(fixtures.path('output.blur-100.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('mild blur', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur()
.toFile(fixtures.path('output.blur-mild.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('invalid radius', function(done) {
var isValid = true;
try {
sharp(fixtures.inputJpg).blur(1.5);
} catch (err) {
isValid = false;
}
assert.strictEqual(false, isValid);
done();
});
it('blurred image is smaller than non-blurred', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(false)
.toBuffer(function(err, notBlurred, info) {
if (err) throw err;
assert.strictEqual(true, notBlurred.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
sharp(fixtures.inputJpg)
.resize(320, 240)
.blur(true)
.toBuffer(function(err, blurred, info) {
if (err) throw err;
assert.strictEqual(true, blurred.length > 0);
assert.strictEqual(true, blurred.length < notBlurred.length);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
});
});

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Colour space conversion', function() {
it('To greyscale', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Crop gravities', function() {
it('North', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Embed', function() {
it('JPEG within PNG, no alpha channel', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Partial image extraction', function() {
it('JPEG', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Gamma correction', function() {
it('value of 0.0 (disabled)', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Interpolation', function() {
it('nearest neighbour', function(done) {

View File

@@ -3,9 +3,13 @@
var fs = require('fs');
var assert = require('assert');
var semver = require('semver');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Input/output', function() {
it('Read from File and write to Stream', function(done) {
@@ -343,9 +347,9 @@ describe('Input/output', function() {
});
describe('PNG compression level', function() {
describe('PNG output', function() {
it('valid', function(done) {
it('compression level is valid', function(done) {
var isValid = false;
try {
sharp().compressionLevel(0);
@@ -355,7 +359,7 @@ describe('Input/output', function() {
done();
});
it('invalid', function(done) {
it('compression level is invalid', function(done) {
var isValid = false;
try {
sharp().compressionLevel(-1);
@@ -365,6 +369,52 @@ describe('Input/output', function() {
done();
});
if (semver.gte(sharp.libvipsVersion(), '7.41.0')) {
it('withoutAdaptiveFiltering generates smaller file [libvips 7.41.0+]', function(done) {
// First generate with adaptive filtering
sharp(fixtures.inputPng)
.resize(320, 240)
.withoutAdaptiveFiltering(false)
.toBuffer(function(err, dataAdaptive, info) {
if (err) throw err;
assert.strictEqual(true, dataAdaptive.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
// Then generate without
sharp(fixtures.inputPng)
.resize(320, 240)
.withoutAdaptiveFiltering()
.toBuffer(function(err, dataWithoutAdaptive, info) {
if (err) throw err;
assert.strictEqual(true, dataWithoutAdaptive.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(true, dataWithoutAdaptive.length < dataAdaptive.length);
done();
});
});
});
}
});
if (semver.gte(sharp.libvipsVersion(), '7.40.0')) {
it('Load TIFF from Buffer [libvips 7.40.0+]', function(done) {
var inputTiffBuffer = fs.readFileSync(fixtures.inputTiff);
sharp(inputTiffBuffer)
.resize(320, 240)
.jpeg()
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
}
});

View File

@@ -6,6 +6,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Image metadata', function() {
it('JPEG', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Resize dimensions', function() {
it('Exact crop', function(done) {

View File

@@ -5,6 +5,8 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Rotation', function() {
it('Rotate by 90 degrees, respecting output input size', function(done) {

View File

@@ -5,9 +5,96 @@ var assert = require('assert');
var sharp = require('../../index');
var fixtures = require('../fixtures');
sharp.cache(0);
describe('Sharpen', function() {
it('sharpen image is larger than non-sharpen', function(done) {
it('specific radius 10', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen(10)
.toFile(fixtures.path('output.sharpen-10.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('specific radius 3 and levels 0.5, 2.5', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen(3, 0.5, 2.5)
.toFile(fixtures.path('output.sharpen-3-0.5-2.5.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('specific radius 5 and levels 2, 4', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen(5, 2, 4)
.toFile(fixtures.path('output.sharpen-5-2-4.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('mild sharpen', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen()
.toFile(fixtures.path('output.sharpen-mild.jpg'), function(err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
done();
});
});
it('invalid radius', function(done) {
var isValid = true;
try {
sharp(fixtures.inputJpg).sharpen(1.5);
} catch (err) {
isValid = false;
}
assert.strictEqual(false, isValid);
done();
});
it('invalid flat', function(done) {
var isValid = true;
try {
sharp(fixtures.inputJpg).sharpen(1, -1);
} catch (err) {
isValid = false;
}
assert.strictEqual(false, isValid);
done();
});
it('invalid jagged', function(done) {
var isValid = true;
try {
sharp(fixtures.inputJpg).sharpen(1, 1, -1);
} catch (err) {
isValid = false;
}
assert.strictEqual(false, isValid);
done();
});
it('sharpened image is larger than non-sharpened', function(done) {
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen(false)
@@ -17,8 +104,9 @@ describe('Sharpen', function() {
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
sharp(notSharpened)
.sharpen()
sharp(fixtures.inputJpg)
.resize(320, 240)
.sharpen(true)
.toBuffer(function(err, sharpened, info) {
if (err) throw err;
assert.strictEqual(true, sharpened.length > 0);