Allow embed with rgba background #86 #89

Small memory leak mop-up related to #94
This commit is contained in:
Lovell Fuller 2014-10-02 20:02:14 +01:00
parent 5cdfbba55c
commit 0cba506bc4
6 changed files with 242 additions and 199 deletions

View File

@ -180,7 +180,8 @@ http.createServer(function(request, response) {
sharp(inputBuffer)
.resize(200, 300)
.interpolateWith(sharp.interpolator.nohalo)
.embedWhite()
.background('white')
.embed()
.toFile('output.tiff')
.then(function() {
// output.tiff is a 200 pixels wide and 300 pixels high image
@ -190,20 +191,29 @@ sharp(inputBuffer)
```
```javascript
sharp('input.gif').resize(200, 300).embedBlack().webp().toBuffer(function(err, outputBuffer) {
if (err) {
throw err;
}
// outputBuffer contains WebP image data of a 200 pixels wide and 300 pixels high
// containing a scaled version, embedded on a black canvas, of input.gif
});
sharp('input.gif')
.resize(200, 300)
.background({r: 0, g: 0, b: 0, a: 0})
.embed()
.webp()
.toBuffer(function(err, outputBuffer) {
if (err) {
throw err;
}
// outputBuffer contains WebP image data of a 200 pixels wide and 300 pixels high
// containing a scaled version, embedded on a transparent canvas, of input.gif
});
```
```javascript
sharp(inputBuffer).resize(200, 200).max().jpeg().toBuffer().then(function(outputBuffer) {
// outputBuffer contains JPEG image data no wider than 200 pixels and no higher
// than 200 pixels regardless of the inputBuffer image dimensions
});
sharp(inputBuffer)
.resize(200, 200)
.max()
.jpeg()
.toBuffer().then(function(outputBuffer) {
// outputBuffer contains JPEG image data no wider than 200 pixels and no higher
// than 200 pixels regardless of the inputBuffer image dimensions
});
```
## API
@ -234,6 +244,7 @@ Fast access to image metadata without decoding any compressed image data.
* `height`: Number of pixels high
* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L502)
* `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK
* `hasAlpha`: Boolean indicating the presence of an alpha transparency channel
* `orientation`: Number value of the EXIF Orientation header, if present
A Promises/A+ promise is returned when `callback` is not provided.
@ -244,10 +255,6 @@ An advanced setting that switches the libvips access method to `VIPS_ACCESS_SEQU
### Image transformation options
#### background(color) or background(r, g, b)
Set background color for operations such as `flatten`. Pass any valid CSS color as `color` string, e.g. `'#00ff00'`, `'hsl(120,100%,50%)'`, etc., or as numbers in the range of `[0, 255]` for `r`, `g`, and `b`. Defaults color to black.
#### resize(width, [height])
Scale output to `width` x `height`. By default, the resized image is cropped to the exact size specified.
@ -266,21 +273,29 @@ Possible values are `north`, `east`, `south`, `west`, `center` and `centre`. The
#### max()
Preserving aspect ratio, resize the image to the maximum width or height specified.
Preserving aspect ratio, resize the image to the maximum `width` or `height` specified.
Both `width` and `height` must be provided via `resize` otherwise the behaviour will default to `crop`.
#### embedWhite()
#### background(rgba)
Embed the resized image on a white background of the exact size specified.
Set the background for the `embed` and `flatten` operations.
#### embedBlack()
`rgba` is parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
Embed the resized image on a black background of the exact size specified.
The alpha value is a float between `0` (transparent) and `1` (opaque).
The default background is `{r: 0, g: 0, b: 0, a: 1}`, black without transparency.
#### embed()
Preserving aspect ratio, resize the image to the maximum `width` or `height` specified then embed on a background of the exact `width` and `height` specified.
If the background contains an alpha value then WebP and PNG format output images will contain an alpha channel, even when the input image does not.
#### flatten()
Flatten transparent images onto background with a color set using `background`.
Merge alpha transparency channel, if any, with `background`.
#### rotate([angle])
@ -327,7 +342,7 @@ JPEG input images will not take advantage of the shrink-on-load performance opti
Convert to 8-bit greyscale; 256 shades of grey.
This is a linear operation. If the input image is in a non-linear colourspace such as sRGB, use `gamma()` with `greyscale()` for the best results.
This is a linear operation. If the input image is in a non-linear colour space such as sRGB, use `gamma()` with `greyscale()` for the best results.
The output image will still be web-friendly sRGB and contain three (identical) channels.

117
index.js
View File

@ -1,10 +1,12 @@
/*jslint node: true */
'use strict';
var Color = require('color');
var util = require('util');
var stream = require('stream');
var color = require('color');
var Promise = require('bluebird');
var sharp = require('./build/Release/sharp');
var Sharp = function(input) {
@ -13,30 +15,30 @@ var Sharp = function(input) {
}
stream.Duplex.call(this);
this.options = {
// input options
streamIn: false,
sequentialRead: false,
// resize options
width: -1,
height: -1,
canvas: 'c',
gravity: 0,
angle: 0,
withoutEnlargement: false,
sharpen: false,
interpolator: 'bilinear',
// operations
background: [0, 0, 0, 255],
flatten: false,
sharpen: false,
gamma: 0,
greyscale: false,
// output options
output: '__input',
progressive: false,
sequentialRead: false,
quality: 80,
compressionLevel: 6,
streamIn: false,
streamOut: false,
withMetadata: false,
output: '__input',
// background
backgroundRed: 0,
backgroundGreen: 0,
backgroundBlue: 0,
// flatten
flatten: false
withMetadata: false
};
if (typeof input === 'string') {
// input=file
@ -86,34 +88,6 @@ Sharp.prototype._write = function(chunk, encoding, callback) {
// Crop this part of the resized image (Center/Centre, North, East, South, West)
module.exports.gravity = {'center': 0, 'centre': 0, 'north': 1, 'east': 2, 'south': 3, 'west': 4};
Sharp.prototype.background = function(color) {
if (arguments.length !== 1 && arguments.length !== 3) {
throw new Error('Invalid color. Expected `color` or `r, g, b`');
}
var normalize = function (name, color) {
if (isNaN(color) || color < 0 || 255 < color) {
throw new Error('Invalid ' + name + ' value (0.0 to 255.0) ' + color);
}
return color;
};
if (arguments.length === 1) {
var channels = Color(color);
this.options.backgroundRed = normalize('red', channels.red());
this.options.backgroundGreen = normalize('green', channels.green());
this.options.backgroundBlue = normalize('blue', channels.blue());
} else if (arguments.length === 3) {
this.options.backgroundRed = normalize('red', arguments[0]);
this.options.backgroundGreen = normalize('green', arguments[1]);
this.options.backgroundBlue = normalize('blue', arguments[2]);
} else {
throw new Error('Unreachable state');
}
return this;
};
Sharp.prototype.crop = function(gravity) {
this.options.canvas = 'c';
if (typeof gravity !== 'undefined') {
@ -127,18 +101,30 @@ Sharp.prototype.crop = function(gravity) {
return this;
};
Sharp.prototype.embedWhite = function() {
this.options.canvas = 'w';
/*
Deprecated embed* methods, to be removed in v0.8.0
*/
Sharp.prototype.embedWhite = util.deprecate(function() {
return this.background('white').embed();
}, "embedWhite() is deprecated, use background('white').embed() instead");
Sharp.prototype.embedBlack = util.deprecate(function() {
return this.background('black').embed();
}, "embedBlack() is deprecated, use background('black').embed() instead");
/*
Set the background colour for embed and flatten operations.
Delegates to the 'Color' module, which can throw an Error
but is liberal in what it accepts, clamping values to sensible min/max.
*/
Sharp.prototype.background = function(rgba) {
var colour = color(rgba);
this.options.background = colour.rgbArray();
this.options.background.push(colour.alpha() * 255);
return this;
};
Sharp.prototype.embedBlack = function() {
this.options.canvas = 'b';
return this;
};
Sharp.prototype.flatten = function(background) {
this.options.flatten = true;
Sharp.prototype.embed = function() {
this.options.canvas = 'e';
return this;
};
@ -147,6 +133,11 @@ Sharp.prototype.max = function() {
return this;
};
Sharp.prototype.flatten = function(flatten) {
this.options.flatten = (typeof flatten === 'boolean') ? flatten : true;
return this;
};
/*
Rotate output image by 0, 90, 180 or 270 degrees
Auto-rotation based on the EXIF Orientation tag is represented by an angle of -1
@ -193,19 +184,6 @@ Sharp.prototype.interpolateWith = function(interpolator) {
return this;
};
/*
Deprecated interpolation methods, to be removed in v0.7.0
*/
Sharp.prototype.bilinearInterpolation = util.deprecate(function() {
return this.interpolateWith(module.exports.interpolator.bilinear);
}, 'bilinearInterpolation() is deprecated, use interpolateWith(sharp.interpolator.bilinear) instead');
Sharp.prototype.bicubicInterpolation = util.deprecate(function() {
return this.interpolateWith(module.exports.interpolator.bicubic);
}, 'bicubicInterpolation() is deprecated, use interpolateWith(sharp.interpolator.bicubic) instead');
Sharp.prototype.nohaloInterpolation = util.deprecate(function() {
return this.interpolateWith(module.exports.interpolator.nohalo);
}, 'nohaloInterpolation() is deprecated, use interpolateWith(sharp.interpolator.nohalo) instead');
/*
Darken image pre-resize (1/gamma) and brighten post-resize (gamma).
Improves brightness of resized image in non-linear colour spaces.
@ -319,31 +297,16 @@ Sharp.prototype.toBuffer = function(callback) {
Sharp.prototype.jpeg = function() {
this.options.output = '__jpeg';
if (arguments.length > 0) {
console.error('Use of the jpeg() method with a callback is deprecated in 0.6.x and will be removed in 0.7.x');
console.error('Please add toFile(), toBuffer() or Stream methods e.g. pipe() for JPEG output');
this._sharp(arguments);
}
return this;
};
Sharp.prototype.png = function() {
this.options.output = '__png';
if (arguments.length > 0) {
console.error('Use of the png() method with a callback is deprecated in 0.6.x and will be removed in 0.7.x');
console.error('Please add toFile(), toBuffer() or Stream methods e.g. pipe() for PNG output');
this._sharp(arguments);
}
return this;
};
Sharp.prototype.webp = function() {
this.options.output = '__webp';
if (arguments.length > 0) {
console.error('Use of the webp() method with a callback is deprecated in 0.6.x and will be removed in 0.7.x');
console.error('Please add toFile(), toBuffer() or Stream methods e.g. pipe() for WebP output');
this._sharp(arguments);
}
return this;
};

View File

@ -1,6 +1,6 @@
{
"name": "sharp",
"version": "0.6.2",
"version": "0.7.0",
"author": "Lovell Fuller <npm@lovell.info>",
"contributors": [
"Pierre Inglebert <pierre.inglebert@gmail.com>",
@ -38,13 +38,13 @@
"stream"
],
"dependencies": {
"bluebird": "^2.3.2",
"bluebird": "^2.3.4",
"color": "^0.7.1",
"nan": "^1.3.0"
},
"devDependencies": {
"imagemagick": "^0.1.3",
"imagemagick-native": "^1.2.2",
"imagemagick-native": "^1.3.0",
"gm": "^1.16.0",
"async": "^0.9.0",
"benchmark": "^1.0.0"

View File

@ -11,6 +11,12 @@
using namespace v8;
using namespace node;
typedef enum {
CROP,
MAX,
EMBED
} Canvas;
struct resize_baton {
std::string file_in;
void* buffer_in;
@ -21,47 +27,37 @@ struct resize_baton {
size_t buffer_out_len;
int width;
int height;
bool crop;
Canvas canvas;
int gravity;
bool max;
VipsExtend extend;
bool sharpen;
std::string interpolator;
double background[4];
bool flatten;
bool sharpen;
double gamma;
bool greyscale;
bool progressive;
bool without_enlargement;
VipsAccess access_method;
int quality;
int compressionLevel;
int compression_level;
int angle;
std::string err;
bool withMetadata;
// background
double background_red;
double background_green;
double background_blue;
// flatten
bool flatten;
bool with_metadata;
resize_baton():
buffer_in_len(0),
output_format(""),
buffer_out_len(0),
crop(false),
canvas(CROP),
gravity(0),
max(false),
background{0.0, 0.0, 0.0, 255.0},
flatten(false),
sharpen(false),
gamma(0.0),
greyscale(false),
progressive(false),
without_enlargement(false),
withMetadata(false),
// background
background_red(0.0),
background_green(0.0),
background_blue(0.0),
// flatten
flatten(false) {}
with_metadata(false) {}
};
typedef enum {
@ -175,7 +171,7 @@ sharp_calc_crop(int const inWidth, int const inHeight, int const outWidth, int c
/*
Does this image have an alpha channel?
Uses colourspace interpretation with number of channels to guess this.
Uses colour space interpretation with number of channels to guess this.
*/
static bool
sharp_image_has_alpha(VipsImage *image) {
@ -427,9 +423,9 @@ class ResizeWorker : public NanAsyncWorker {
// Fixed width and height
double xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width);
double yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height);
factor = baton->crop ? std::min(xfactor, yfactor) : std::max(xfactor, yfactor);
factor = (baton->canvas == CROP) ? std::min(xfactor, yfactor) : std::max(xfactor, yfactor);
// if max is set, we need to compute the real size of the thumb image
if (baton->max) {
if (baton->canvas == MAX) {
if (xfactor > yfactor) {
baton->height = round(static_cast<double>(inputHeight) / xfactor);
} else {
@ -502,22 +498,21 @@ class ResizeWorker : public NanAsyncWorker {
}
g_object_unref(in);
// Flatten
// Flatten image to remove alpha channel
VipsImage *flattened = vips_image_new();
// We skip non-four-band images as we havent tested two-channel PNGs with
// alpha channel (yet).
// See: https://github.com/lovell/sharp/pull/91#issuecomment-56496548
if (baton->flatten && sharp_image_has_alpha(shrunk_on_load) && shrunk_on_load->Bands == 4) {
if (baton->flatten && sharp_image_has_alpha(shrunk_on_load)) {
// Background colour
VipsArrayDouble *background = vips_array_double_newv(
3, // vector size
baton->background_red,
baton->background_green,
baton->background_blue
3, // Ignore alpha channel as we're about to remove it
baton->background[0],
baton->background[1],
baton->background[2]
);
if (vips_flatten(shrunk_on_load, &flattened, "background", background, NULL)) {
vips_area_unref(reinterpret_cast<VipsArea*>(background));
return resize_error(baton, shrunk_on_load);
};
vips_area_unref(reinterpret_cast<VipsArea*>(background));
} else {
vips_copy(shrunk_on_load, &flattened, NULL);
}
@ -536,11 +531,11 @@ class ResizeWorker : public NanAsyncWorker {
// Convert to greyscale (linear, therefore after gamma encoding, if any)
if (baton->greyscale) {
VipsImage *greyscale = vips_image_new();
if (vips_colourspace(shrunk_on_load, &greyscale, VIPS_INTERPRETATION_B_W, NULL)) {
return resize_error(baton, shrunk_on_load);
if (vips_colourspace(flattened, &greyscale, VIPS_INTERPRETATION_B_W, NULL)) {
return resize_error(baton, flattened);
}
g_object_unref(shrunk_on_load);
shrunk_on_load = greyscale;
g_object_unref(flattened);
flattened = greyscale;
}
VipsImage *shrunk = vips_image_new();
@ -560,10 +555,10 @@ class ResizeWorker : public NanAsyncWorker {
}
double residualx = static_cast<double>(baton->width) / static_cast<double>(shrunkWidth);
double residualy = static_cast<double>(baton->height) / static_cast<double>(shrunkHeight);
if (baton->crop || baton->max) {
residual = std::max(residualx, residualy);
} else {
if (baton->canvas == EMBED) {
residual = std::min(residualx, residualy);
} else {
residual = std::max(residualx, residualy);
}
} else {
vips_copy(flattened, &shrunk, NULL);
@ -600,7 +595,56 @@ class ResizeWorker : public NanAsyncWorker {
// Crop/embed
VipsImage *canvased = vips_image_new();
if (rotated->Xsize != baton->width || rotated->Ysize != baton->height) {
if (baton->crop || baton->max) {
if (baton->canvas == EMBED) {
// Add non-transparent alpha channel, if required
if (baton->background[3] < 255.0 && !sharp_image_has_alpha(rotated)) {
// Create single-channel transparency
VipsImage *black = vips_image_new();
if (vips_black(&black, rotated->Xsize, rotated->Ysize, "bands", 1, NULL)) {
g_object_unref(black);
return resize_error(baton, rotated);
}
// Invert to become non-transparent
VipsImage *alphaChannel = vips_image_new();
if (vips_invert(black, &alphaChannel, NULL)) {
g_object_unref(black);
g_object_unref(alphaChannel);
return resize_error(baton, rotated);
}
g_object_unref(black);
// Append alpha channel to existing image
VipsImage *joined = vips_image_new();
if (vips_bandjoin2(rotated, alphaChannel, &joined, NULL)) {
g_object_unref(alphaChannel);
g_object_unref(joined);
return resize_error(baton, rotated);
}
g_object_unref(alphaChannel);
g_object_unref(rotated);
rotated = joined;
}
// Create background
VipsArrayDouble *background;
if (baton->background[3] < 255.0) {
background = vips_array_double_newv(
4, baton->background[0], baton->background[1], baton->background[2], baton->background[3]
);
} else {
background = vips_array_double_newv(
3, baton->background[0], baton->background[1], baton->background[2]
);
}
// Embed
int left = (baton->width - rotated->Xsize) / 2;
int top = (baton->height - rotated->Ysize) / 2;
if (vips_embed(rotated, &canvased, left, top, baton->width, baton->height,
"extend", VIPS_EXTEND_BACKGROUND, "background", background, NULL
)) {
vips_area_unref(reinterpret_cast<VipsArea*>(background));
return resize_error(baton, rotated);
}
vips_area_unref(reinterpret_cast<VipsArea*>(background));
} else {
// Crop/max
int left;
int top;
@ -610,13 +654,6 @@ class ResizeWorker : public NanAsyncWorker {
if (vips_extract_area(rotated, &canvased, left, top, width, height, NULL)) {
return resize_error(baton, rotated);
}
} else {
// Embed
int left = (baton->width - rotated->Xsize) / 2;
int top = (baton->height - rotated->Ysize) / 2;
if (vips_embed(rotated, &canvased, left, top, baton->width, baton->height, "extend", baton->extend, NULL)) {
return resize_error(baton, rotated);
}
}
} else {
vips_copy(rotated, &canvased, NULL);
@ -671,21 +708,21 @@ class ResizeWorker : public NanAsyncWorker {
VipsImage *output = cached;
if (baton->output == "__jpeg" || (baton->output == "__input" && inputImageType == JPEG)) {
// Write JPEG to buffer
if (vips_jpegsave_buffer(output, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->withMetadata,
if (vips_jpegsave_buffer(output, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->with_metadata,
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
return resize_error(baton, output);
}
baton->output_format = "jpeg";
} else if (baton->output == "__png" || (baton->output == "__input" && inputImageType == PNG)) {
// Write PNG to buffer
if (vips_pngsave_buffer(output, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) {
if (vips_pngsave_buffer(output, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->with_metadata,
"compression", baton->compression_level, "interlace", baton->progressive, NULL)) {
return resize_error(baton, output);
}
baton->output_format = "png";
} else if (baton->output == "__webp" || (baton->output == "__input" && inputImageType == WEBP)) {
// Write WEBP to buffer
if (vips_webpsave_buffer(output, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->withMetadata,
if (vips_webpsave_buffer(output, &baton->buffer_out, &baton->buffer_out_len, "strip", !baton->with_metadata,
"Q", baton->quality, NULL)) {
return resize_error(baton, output);
}
@ -698,28 +735,28 @@ class ResizeWorker : public NanAsyncWorker {
bool match_input = !(output_jpeg || output_png || output_webp || output_tiff);
if (output_jpeg || (match_input && inputImageType == JPEG)) {
// Write JPEG to file
if (vips_jpegsave(output, baton->output.c_str(), "strip", !baton->withMetadata,
if (vips_jpegsave(output, baton->output.c_str(), "strip", !baton->with_metadata,
"Q", baton->quality, "optimize_coding", TRUE, "interlace", baton->progressive, NULL)) {
return resize_error(baton, output);
}
baton->output_format = "jpeg";
} else if (output_png || (match_input && inputImageType == PNG)) {
// Write PNG to file
if (vips_pngsave(output, baton->output.c_str(), "strip", !baton->withMetadata,
"compression", baton->compressionLevel, "interlace", baton->progressive, NULL)) {
if (vips_pngsave(output, baton->output.c_str(), "strip", !baton->with_metadata,
"compression", baton->compression_level, "interlace", baton->progressive, NULL)) {
return resize_error(baton, output);
}
baton->output_format = "png";
} else if (output_webp || (match_input && inputImageType == WEBP)) {
// Write WEBP to file
if (vips_webpsave(output, baton->output.c_str(), "strip", !baton->withMetadata,
if (vips_webpsave(output, baton->output.c_str(), "strip", !baton->with_metadata,
"Q", baton->quality, NULL)) {
return resize_error(baton, output);
}
baton->output_format = "webp";
} else if (output_tiff || (match_input && inputImageType == TIFF)) {
// Write TIFF to file
if (vips_tiffsave(output, baton->output.c_str(), "strip", !baton->withMetadata,
if (vips_tiffsave(output, baton->output.c_str(), "strip", !baton->with_metadata,
"compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG, "Q", baton->quality, NULL)) {
return resize_error(baton, output);
}
@ -785,6 +822,7 @@ NAN_METHOD(resize) {
// Input filename
baton->file_in = *String::Utf8Value(options->Get(NanNew<String>("fileIn"))->ToString());
baton->access_method = options->Get(NanNew<String>("sequentialRead"))->BooleanValue() ? VIPS_ACCESS_SEQUENTIAL : VIPS_ACCESS_RANDOM;
// Input Buffer object
if (options->Get(NanNew<String>("bufferIn"))->IsObject()) {
Local<Object> buffer = options->Get(NanNew<String>("bufferIn"))->ToObject();
@ -794,40 +832,35 @@ NAN_METHOD(resize) {
// Output image dimensions
baton->width = options->Get(NanNew<String>("width"))->Int32Value();
baton->height = options->Get(NanNew<String>("height"))->Int32Value();
// Canvas options
// Canvas option
Local<String> canvas = options->Get(NanNew<String>("canvas"))->ToString();
if (canvas->Equals(NanNew<String>("c"))) {
baton->crop = true;
} else if (canvas->Equals(NanNew<String>("w"))) {
baton->extend = VIPS_EXTEND_WHITE;
} else if (canvas->Equals(NanNew<String>("b"))) {
baton->extend = VIPS_EXTEND_BLACK;
baton->canvas = CROP;
} else if (canvas->Equals(NanNew<String>("m"))) {
baton->max = true;
baton->canvas = MAX;
} else if (canvas->Equals(NanNew<String>("e"))) {
baton->canvas = EMBED;
}
// Flatten
baton->flatten = options->Get(NanNew<String>("flatten"))->BooleanValue();
// Background
baton->background_red = options->Get(NanNew<String>("backgroundRed"))->NumberValue();
baton->background_green = options->Get(NanNew<String>("backgroundGreen"))->NumberValue();
baton->background_blue = options->Get(NanNew<String>("backgroundBlue"))->NumberValue();
// Other options
// Background colour
Local<Array> background = Local<Array>::Cast(options->Get(NanNew<String>("background")));
for (int i = 0; i < 4; i++) {
baton->background[i] = background->Get(i)->NumberValue();
}
// Resize options
baton->without_enlargement = options->Get(NanNew<String>("withoutEnlargement"))->BooleanValue();
baton->gravity = options->Get(NanNew<String>("gravity"))->Int32Value();
baton->sharpen = options->Get(NanNew<String>("sharpen"))->BooleanValue();
baton->interpolator = *String::Utf8Value(options->Get(NanNew<String>("interpolator"))->ToString());
// Operators
baton->flatten = options->Get(NanNew<String>("flatten"))->BooleanValue();
baton->sharpen = options->Get(NanNew<String>("sharpen"))->BooleanValue();
baton->gamma = options->Get(NanNew<String>("gamma"))->NumberValue();
baton->greyscale = options->Get(NanNew<String>("greyscale"))->BooleanValue();
baton->progressive = options->Get(NanNew<String>("progressive"))->BooleanValue();
baton->without_enlargement = options->Get(NanNew<String>("withoutEnlargement"))->BooleanValue();
baton->access_method = options->Get(NanNew<String>("sequentialRead"))->BooleanValue() ? VIPS_ACCESS_SEQUENTIAL : VIPS_ACCESS_RANDOM;
baton->quality = options->Get(NanNew<String>("quality"))->Int32Value();
baton->compressionLevel = options->Get(NanNew<String>("compressionLevel"))->Int32Value();
baton->angle = options->Get(NanNew<String>("angle"))->Int32Value();
baton->withMetadata = options->Get(NanNew<String>("withMetadata"))->BooleanValue();
// Output options
baton->progressive = options->Get(NanNew<String>("progressive"))->BooleanValue();
baton->quality = options->Get(NanNew<String>("quality"))->Int32Value();
baton->compression_level = options->Get(NanNew<String>("compressionLevel"))->Int32Value();
baton->with_metadata = options->Get(NanNew<String>("withMetadata"))->BooleanValue();
// Output filename or __format for Buffer
baton->output = *String::Utf8Value(options->Get(NanNew<String>("output"))->ToString());

View File

@ -4,4 +4,4 @@ if ! type valgrind >/dev/null; then
fi
curl -O https://raw.githubusercontent.com/jcupitt/libvips/master/libvips.supp
G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind --suppressions=libvips.supp --suppressions=sharp.supp --leak-check=full --show-leak-kinds=definite,indirect,possible node unit.js
G_SLICE=always-malloc G_DEBUG=gc-friendly valgrind --suppressions=libvips.supp --suppressions=sharp.supp --leak-check=full --show-leak-kinds=definite,indirect,possible --num-callers=20 node unit.js

View File

@ -94,6 +94,45 @@ async.series([
done();
});
},
// Embed - JPEG within PNG, no alpha channel
function(done) {
sharp(inputJpg)
.embed()
.resize(320, 240)
.png()
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
sharp(data).metadata(function(err, metadata) {
if (err) throw err;
assert.strictEqual(3, metadata.channels);
done();
});
});
},
// Embed - JPEG within WebP, to include alpha channel
function(done) {
sharp(inputJpg)
.resize(320, 240)
.background({r: 0, g: 0, b: 0, a: 0})
.embed()
.webp()
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
sharp(data).metadata(function(err, metadata) {
if (err) throw err;
assert.strictEqual(4, metadata.channels);
done();
});
});
},
// Quality
function(done) {
sharp(inputJpg).resize(320, 240).quality(70).toBuffer(function(err, buffer70) {
@ -110,7 +149,7 @@ async.series([
},
// TIFF with dimensions known to cause rounding errors
function(done) {
sharp(inputTiff).resize(240, 320).embedBlack().jpeg().toBuffer(function(err, data, info) {
sharp(inputTiff).resize(240, 320).embed().jpeg().toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('jpeg', info.format);
@ -776,22 +815,15 @@ async.series([
done();
});
},
function(done) {
// Invalid `background` arguments
try {
sharp(inputPngWithTransparency).background(-1, -1, -1).flatten();
} catch (e) {
assert.strictEqual(e.message, "Invalid red value (0.0 to 255.0) -1");
done();
return;
}
assert.fail();
},
// Verify internal counters
function(done) {
var counters = sharp.counters();
assert.strictEqual(0, counters.queue);
assert.strictEqual(0, counters.process);
},
// Empty cache
function(done) {
sharp.cache(0);
done();
}
]);