Merge pull request #94 from gasi/background-flatten

Add `background` and `flatten` APIs
This commit is contained in:
Lovell Fuller 2014-10-01 20:51:52 +01:00
commit 5cdfbba55c
6 changed files with 164 additions and 15 deletions

3
.gitignore vendored
View File

@ -14,3 +14,6 @@ build
node_modules
tests/fixtures/output.*
tests/libvips.supp
# Mac OS X
.DS_Store

View File

@ -153,14 +153,17 @@ readableStream.pipe(pipeline);
sharp('input.png')
.rotate(180)
.resize(300)
.flatten()
.background('#ff6600')
.sharpen()
.withMetadata()
.quality(90)
.webp()
.toBuffer()
.then(function(outputBuffer) {
// outputBuffer contains 300px wide, upside down, sharpened,
// with metadata, 90% quality WebP image data
// outputBuffer contains upside down, 300px wide, alpha channel flattened
// onto orange background, sharpened, with metadata, 90% quality WebP image
// data
});
```
@ -241,6 +244,10 @@ An advanced setting that switches the libvips access method to `VIPS_ACCESS_SEQU
### Image transformation options
#### background(color) or background(r, g, b)
Set background color for operations such as `flatten`. Pass any valid CSS color as `color` string, e.g. `'#00ff00'`, `'hsl(120,100%,50%)'`, etc., or as numbers in the range of `[0, 255]` for `r`, `g`, and `b`. Defaults color to black.
#### resize(width, [height])
Scale output to `width` x `height`. By default, the resized image is cropped to the exact size specified.
@ -271,6 +278,10 @@ Embed the resized image on a white background of the exact size specified.
Embed the resized image on a black background of the exact size specified.
#### flatten()
Flatten transparent images onto background with a color set using `background`.
#### rotate([angle])
Rotate the output image by either an explicit angle or auto-orient based on the EXIF `Orientation` tag.
@ -503,6 +514,9 @@ This module would never have been possible without the help and code contributio
* [Jonathan Ong](https://github.com/jonathanong)
* [Chanon Sajjamanochai](https://github.com/chanon)
* [Juliano Julio](https://github.com/julianojulio)
* [Daniel Gasienica](https://github.com/gasi)
* [Julian Walker](https://github.com/julianwa)
* [Amit Pitaru](https://github.com/apitaru)
Thank you!

View File

@ -1,6 +1,7 @@
/*jslint node: true */
'use strict';
var Color = require('color');
var util = require('util');
var stream = require('stream');
var Promise = require('bluebird');
@ -29,7 +30,13 @@ var Sharp = function(input) {
streamIn: false,
streamOut: false,
withMetadata: false,
output: '__input'
output: '__input',
// background
backgroundRed: 0,
backgroundGreen: 0,
backgroundBlue: 0,
// flatten
flatten: false
};
if (typeof input === 'string') {
// input=file
@ -79,6 +86,34 @@ Sharp.prototype._write = function(chunk, encoding, callback) {
// Crop this part of the resized image (Center/Centre, North, East, South, West)
module.exports.gravity = {'center': 0, 'centre': 0, 'north': 1, 'east': 2, 'south': 3, 'west': 4};
Sharp.prototype.background = function(color) {
if (arguments.length !== 1 && arguments.length !== 3) {
throw new Error('Invalid color. Expected `color` or `r, g, b`');
}
var normalize = function (name, color) {
if (isNaN(color) || color < 0 || 255 < color) {
throw new Error('Invalid ' + name + ' value (0.0 to 255.0) ' + color);
}
return color;
};
if (arguments.length === 1) {
var channels = Color(color);
this.options.backgroundRed = normalize('red', channels.red());
this.options.backgroundGreen = normalize('green', channels.green());
this.options.backgroundBlue = normalize('blue', channels.blue());
} else if (arguments.length === 3) {
this.options.backgroundRed = normalize('red', arguments[0]);
this.options.backgroundGreen = normalize('green', arguments[1]);
this.options.backgroundBlue = normalize('blue', arguments[2]);
} else {
throw new Error('Unreachable state');
}
return this;
};
Sharp.prototype.crop = function(gravity) {
this.options.canvas = 'c';
if (typeof gravity !== 'undefined') {
@ -102,6 +137,11 @@ Sharp.prototype.embedBlack = function() {
return this;
};
Sharp.prototype.flatten = function(background) {
this.options.flatten = true;
return this;
};
Sharp.prototype.max = function() {
this.options.canvas = 'm';
return this;

View File

@ -6,7 +6,10 @@
"Pierre Inglebert <pierre.inglebert@gmail.com>",
"Jonathan Ong <jonathanrichardong@gmail.com>",
"Chanon Sajjamanochai <chanon.s@gmail.com>",
"Juliano Julio <julianojulio@gmail.com>"
"Juliano Julio <julianojulio@gmail.com>",
"Daniel Gasienica <daniel@gasienica.ch>",
"Julian Walker <julian@fiftythree.com>",
"Amit Pitaru <pitaru.amit@gmail.com>"
],
"description": "High performance Node.js module to resize JPEG, PNG and WebP images using the libvips library",
"scripts": {
@ -35,8 +38,9 @@
"stream"
],
"dependencies": {
"nan": "^1.3.0",
"bluebird": "^2.3.2"
"bluebird": "^2.3.2",
"color": "^0.7.1",
"nan": "^1.3.0"
},
"devDependencies": {
"imagemagick": "^0.1.3",

View File

@ -37,6 +37,12 @@ struct resize_baton {
int angle;
std::string err;
bool withMetadata;
// background
double background_red;
double background_green;
double background_blue;
// flatten
bool flatten;
resize_baton():
buffer_in_len(0),
@ -49,7 +55,13 @@ struct resize_baton {
gamma(0.0),
progressive(false),
without_enlargement(false),
withMetadata(false) {}
withMetadata(false),
// background
background_red(0.0),
background_green(0.0),
background_blue(0.0),
// flatten
flatten(false) {}
};
typedef enum {
@ -490,14 +502,35 @@ class ResizeWorker : public NanAsyncWorker {
}
g_object_unref(in);
// Flatten
VipsImage *flattened = vips_image_new();
// We skip non-four-band images as we havent tested two-channel PNGs with
// alpha channel (yet).
// See: https://github.com/lovell/sharp/pull/91#issuecomment-56496548
if (baton->flatten && sharp_image_has_alpha(shrunk_on_load) && shrunk_on_load->Bands == 4) {
VipsArrayDouble *background = vips_array_double_newv(
3, // vector size
baton->background_red,
baton->background_green,
baton->background_blue
);
if (vips_flatten(shrunk_on_load, &flattened, "background", background, NULL)) {
return resize_error(baton, shrunk_on_load);
};
} else {
vips_copy(shrunk_on_load, &flattened, NULL);
}
g_object_unref(shrunk_on_load);
// Gamma encoding (darken)
if (baton->gamma >= 1 && baton->gamma <= 3) {
VipsImage *gamma_encoded = vips_image_new();
if (vips_gamma(shrunk_on_load, &gamma_encoded, "exponent", 1.0 / baton->gamma, NULL)) {
return resize_error(baton, shrunk_on_load);
if (vips_gamma(flattened, &gamma_encoded, "exponent", 1.0 / baton->gamma, NULL)) {
return resize_error(baton, flattened);
}
g_object_unref(shrunk_on_load);
shrunk_on_load = gamma_encoded;
g_object_unref(flattened);
flattened = gamma_encoded;
}
// Convert to greyscale (linear, therefore after gamma encoding, if any)
@ -513,8 +546,8 @@ class ResizeWorker : public NanAsyncWorker {
VipsImage *shrunk = vips_image_new();
if (shrink > 1) {
// Use vips_shrink with the integral reduction
if (vips_shrink(shrunk_on_load, &shrunk, shrink, shrink, NULL)) {
return resize_error(baton, shrunk_on_load);
if (vips_shrink(flattened, &shrunk, shrink, shrink, NULL)) {
return resize_error(baton, flattened);
}
// Recalculate residual float based on dimensions of required vs shrunk images
double shrunkWidth = shrunk->Xsize;
@ -533,9 +566,9 @@ class ResizeWorker : public NanAsyncWorker {
residual = std::min(residualx, residualy);
}
} else {
vips_copy(shrunk_on_load, &shrunk, NULL);
vips_copy(flattened, &shrunk, NULL);
}
g_object_unref(shrunk_on_load);
g_object_unref(flattened);
// Use vips_affine with the remaining float part
VipsImage *affined = vips_image_new();
@ -772,6 +805,15 @@ NAN_METHOD(resize) {
} else if (canvas->Equals(NanNew<String>("m"))) {
baton->max = true;
}
// Flatten
baton->flatten = options->Get(NanNew<String>("flatten"))->BooleanValue();
// Background
baton->background_red = options->Get(NanNew<String>("backgroundRed"))->NumberValue();
baton->background_green = options->Get(NanNew<String>("backgroundGreen"))->NumberValue();
baton->background_blue = options->Get(NanNew<String>("backgroundBlue"))->NumberValue();
// Other options
baton->gravity = options->Get(NanNew<String>("gravity"))->Int32Value();
baton->sharpen = options->Get(NanNew<String>("sharpen"))->BooleanValue();
@ -785,6 +827,7 @@ NAN_METHOD(resize) {
baton->compressionLevel = options->Get(NanNew<String>("compressionLevel"))->Int32Value();
baton->angle = options->Get(NanNew<String>("angle"))->Int32Value();
baton->withMetadata = options->Get(NanNew<String>("withMetadata"))->BooleanValue();
// Output filename or __format for Buffer
baton->output = *String::Utf8Value(options->Get(NanNew<String>("output"))->ToString());

View File

@ -546,6 +546,15 @@ async.series([
done();
});
},
function(done) {
sharp(inputPngWithTransparency).resize(320, 80).toFile(outputZoinks, function(err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
done();
});
},
function(done) {
sharp(inputWebP).resize(320, 80).toFile(outputZoinks, function(err, info) {
if (err) throw err;
@ -742,6 +751,42 @@ async.series([
done();
});
},
// Flattening
function(done) {
sharp(inputPngWithTransparency).flatten().resize(400, 300).toFile(path.join(fixturesPath, 'output.flatten-black.jpg'), function(err) {
if (err) throw err;
done();
});
},
function(done) {
sharp(inputPngWithTransparency).flatten().background(255, 102, 0).resize(400, 300).toFile(path.join(fixturesPath, 'output.flatten-rgb-orange.jpg'), function(err) {
if (err) throw err;
done();
});
},
function(done) {
sharp(inputPngWithTransparency).flatten().background('#ff6600').resize(400, 300).toFile(path.join(fixturesPath, 'output.flatten-hex-orange.jpg'), function(err) {
if (err) throw err;
done();
});
},
function(done) {
sharp(inputJpg).background('#ff0000').flatten().resize(500, 400).toFile(path.join(fixturesPath, 'output.flatten-input-jpg.jpg'), function(err) {
if (err) throw err;
done();
});
},
function(done) {
// Invalid `background` arguments
try {
sharp(inputPngWithTransparency).background(-1, -1, -1).flatten();
} catch (e) {
assert.strictEqual(e.message, "Invalid red value (0.0 to 255.0) -1");
done();
return;
}
assert.fail();
},
// Verify internal counters
function(done) {
var counters = sharp.counters();