Ensure 16-bit input images embed onto alpha background

Support gamma correction of images with alpha channel
Favour shrink over affine when reducing by integral factor
This commit is contained in:
Lovell Fuller 2016-01-30 22:23:17 +00:00
parent 41e50770d1
commit 2675b2265b
10 changed files with 70 additions and 15 deletions

View File

@ -2,9 +2,20 @@
### v0.13 - "*mind*" ### v0.13 - "*mind*"
* Switch from libvips' C to C++ bindings, requires upgrade to v8.2.2.
[#299](https://github.com/lovell/sharp/issues/299)
* Control number of open files in libvips' cache; breaks existing `cache` behaviour. * Control number of open files in libvips' cache; breaks existing `cache` behaviour.
[#315](https://github.com/lovell/sharp/issues/315) [#315](https://github.com/lovell/sharp/issues/315)
* Ensure 16-bit input images can be embedded onto a transparent background.
[#340](https://github.com/lovell/sharp/issues/340)
[@janaz](https://github.com/janaz)
* Small optimisation when reducing by an integral factor to favour shrink over affine.
* Add support for gamma correction of images with an alpha channel.
### v0.12 - "*look*" ### v0.12 - "*look*"
#### v0.12.2 - 16<sup>th</sup> January 2016 #### v0.12.2 - 16<sup>th</sup> January 2016

10
package.json Executable file → Normal file
View File

@ -47,11 +47,11 @@
"vips" "vips"
], ],
"dependencies": { "dependencies": {
"bluebird": "^3.1.1", "bluebird": "^3.1.5",
"color": "^0.11.1", "color": "^0.11.1",
"nan": "^2.2.0", "nan": "^2.2.0",
"semver": "^5.1.0", "semver": "^5.1.0",
"request": "^2.67.0", "request": "^2.69.0",
"tar": "^2.2.1" "tar": "^2.2.1"
}, },
"devDependencies": { "devDependencies": {
@ -60,10 +60,10 @@
"exif-reader": "^1.0.0", "exif-reader": "^1.0.0",
"icc": "^0.0.2", "icc": "^0.0.2",
"istanbul": "^0.4.2", "istanbul": "^0.4.2",
"mocha": "^2.3.4", "mocha": "^2.4.5",
"mocha-jshint": "^2.2.6", "mocha-jshint": "^2.3.0",
"node-cpplint": "^0.4.0", "node-cpplint": "^0.4.0",
"rimraf": "^2.5.0", "rimraf": "^2.5.1",
"bufferutil": "^1.2.1" "bufferutil": "^1.2.1"
}, },
"license": "Apache-2.0", "license": "Apache-2.0",

0
src/metadata.h Executable file → Normal file
View File

View File

@ -102,11 +102,25 @@ namespace sharp {
return image; return image;
} }
/*
* Gamma encoding/decoding
*/
VImage Gamma(VImage image, double const exponent) {
if (HasAlpha(image)) {
// Separate alpha channel
VImage imageWithoutAlpha = image.extract_band(0,
VImage::option()->set("n", image.bands() - 1));
VImage alpha = image[image.bands() - 1];
return imageWithoutAlpha.gamma(VImage::option()->set("exponent", exponent)).bandjoin(alpha);
} else {
return image.gamma(VImage::option()->set("exponent", exponent));
}
}
/* /*
* Gaussian blur (use sigma <0 for fast blur) * Gaussian blur (use sigma <0 for fast blur)
*/ */
VImage Blur(VImage image, double const sigma) { VImage Blur(VImage image, double const sigma) {
VImage blurred;
if (sigma < 0.0) { if (sigma < 0.0) {
// Fast, mild blur - averages neighbouring pixels // Fast, mild blur - averages neighbouring pixels
VImage blur = VImage::new_matrixv(3, 3, VImage blur = VImage::new_matrixv(3, 3,

View File

@ -18,6 +18,11 @@ namespace sharp {
*/ */
VImage Normalize(VImage image); VImage Normalize(VImage image);
/*
* Gamma encoding/decoding
*/
VImage Gamma(VImage image, double const exponent);
/* /*
* Gaussian blur. Use sigma of -1 for fast blur. * Gaussian blur. Use sigma of -1 for fast blur.
*/ */

View File

@ -43,6 +43,7 @@ using vips::VError;
using sharp::Composite; using sharp::Composite;
using sharp::Normalize; using sharp::Normalize;
using sharp::Gamma;
using sharp::Blur; using sharp::Blur;
using sharp::Sharpen; using sharp::Sharpen;
@ -425,7 +426,8 @@ class PipelineWorker : public AsyncWorker {
} }
// Calculate maximum alpha value based on input image pixel depth // Calculate maximum alpha value based on input image pixel depth
double maxAlpha = (image.format() == VIPS_FORMAT_USHORT) ? 65535.0 : 255.0; bool is16Bit = (image.format() == VIPS_FORMAT_USHORT);
double maxAlpha = is16Bit ? 65535.0 : 255.0;
// Flatten image to remove alpha channel // Flatten image to remove alpha channel
if (baton->flatten && HasAlpha(image)) { if (baton->flatten && HasAlpha(image)) {
@ -449,8 +451,8 @@ class PipelineWorker : public AsyncWorker {
} }
// Gamma encoding (darken) // Gamma encoding (darken)
if (baton->gamma >= 1 && baton->gamma <= 3 && !HasAlpha(image)) { if (baton->gamma >= 1 && baton->gamma <= 3) {
image = image.gamma(VImage::option()->set("exponent", 1.0 / baton->gamma)); image = Gamma(image, 1.0 / baton->gamma);
} }
// Convert to greyscale (linear, therefore after gamma encoding, if any) // Convert to greyscale (linear, therefore after gamma encoding, if any)
@ -541,10 +543,6 @@ class PipelineWorker : public AsyncWorker {
// Crop/embed // Crop/embed
if (image.width() != baton->width || image.height() != baton->height) { if (image.width() != baton->width || image.height() != baton->height) {
if (baton->canvas == Canvas::EMBED) { if (baton->canvas == Canvas::EMBED) {
// Add non-transparent alpha channel, if required
if (baton->background[3] < 255.0 && !HasAlpha(image)) {
image = image.bandjoin(VImage::black(image.width(), image.height()).invert());
}
// Scale up 8-bit values to match 16-bit input image // Scale up 8-bit values to match 16-bit input image
double multiplier = (image.interpretation() == VIPS_INTERPRETATION_RGB16) ? 256.0 : 1.0; double multiplier = (image.interpretation() == VIPS_INTERPRETATION_RGB16) ? 256.0 : 1.0;
// Create background colour // Create background colour
@ -557,6 +555,12 @@ class PipelineWorker : public AsyncWorker {
if (baton->background[3] < 255.0 || HasAlpha(image)) { if (baton->background[3] < 255.0 || HasAlpha(image)) {
background.push_back(baton->background[3] * multiplier); background.push_back(baton->background[3] * multiplier);
} }
// Add non-transparent alpha channel, if required
if (baton->background[3] < 255.0 && !HasAlpha(image)) {
VImage alpha = VImage::new_matrix(image.width(), image.height())
.new_from_image(baton->background[3] * multiplier);
image = image.bandjoin(alpha);
}
// Embed // Embed
int left = static_cast<int>(round((baton->width - image.width()) / 2)); int left = static_cast<int>(round((baton->width - image.width()) / 2));
int top = static_cast<int>(round((baton->height - image.height()) / 2)); int top = static_cast<int>(round((baton->height - image.height()) / 2));
@ -639,11 +643,17 @@ class PipelineWorker : public AsyncWorker {
// Reverse premultiplication after all transformations: // Reverse premultiplication after all transformations:
if (shouldPremultiplyAlpha) { if (shouldPremultiplyAlpha) {
image = image.unpremultiply(VImage::option()->set("max_alpha", maxAlpha)); image = image.unpremultiply(VImage::option()->set("max_alpha", maxAlpha));
// Cast pixel values to integer
if (is16Bit) {
image = image.cast(VIPS_FORMAT_USHORT);
} else {
image = image.cast(VIPS_FORMAT_UCHAR);
}
} }
// Gamma decoding (brighten) // Gamma decoding (brighten)
if (baton->gamma >= 1 && baton->gamma <= 3 && !HasAlpha(image)) { if (baton->gamma >= 1 && baton->gamma <= 3) {
image = image.gamma(VImage::option()->set("exponent", baton->gamma)); image = Gamma(image, baton->gamma);
} }
// Apply normalization - stretch luminance to cover full dynamic range // Apply normalization - stretch luminance to cover full dynamic range

0
src/pipeline.h Executable file → Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 755 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.1 KiB

After

Width:  |  Height:  |  Size: 4.5 KiB

View File

@ -80,6 +80,21 @@ describe('Embed', function() {
}); });
}); });
it('16-bit PNG with alpha channel onto RGBA', function(done) {
sharp(fixtures.inputPngWithTransparency16bit)
.resize(32, 16)
.embed()
.background({r: 0, g: 0, b: 0, a: 0})
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(32, info.width);
assert.strictEqual(16, info.height);
fixtures.assertSimilar(fixtures.expected('embed-16bit-rgba.png'), data, done);
});
});
it('Enlarge and embed', function(done) { it('Enlarge and embed', function(done) {
sharp(fixtures.inputPngWithOneColor) sharp(fixtures.inputPngWithOneColor)
.embed() .embed()