Add support for clipping/cutting out (#435) (#448)

USAGE: overlayWith('overlayimage.png', { cutout: true } )
This commit is contained in:
Kleis Auke Wolthuizen 2016-06-25 17:48:01 +02:00 committed by Lovell Fuller
parent f1ead06645
commit 2e9cd83ed2
21 changed files with 167 additions and 10 deletions

View File

@ -93,6 +93,7 @@ var Sharp = function(input, options) {
overlayBufferIn: null,
overlayGravity: 0,
overlayTile: false,
overlayCutout: false,
// output options
formatOut: 'input',
fileOut: '',
@ -361,9 +362,18 @@ Sharp.prototype.overlayWith = function(overlay, options) {
else if (isBoolean(options.tile)) {
this.options.overlayTile = options.tile;
} else {
throw new Error(' Invalid Value for tile ' + options.tile + 'Only Boolean Values allowed for overlay.tile.');
throw new Error('Invalid Value for tile ' + options.tile + ' Only Boolean Values allowed for overlay.tile.');
}
if (typeof options.cutout === 'undefined') {
this.options.overlayCutout = false;
}
else if (isBoolean(options.cutout)) {
this.options.overlayCutout = options.cutout;
} else {
throw new Error('Invalid Value for cutout ' + options.cutout + ' Only Boolean Values allowed for overlay.cutout.');
}
if (isInteger(options.gravity) && inRange(options.gravity, 0, 8)) {
this.options.overlayGravity = options.gravity;
} else if (isString(options.gravity) && isInteger(module.exports.gravity[options.gravity])) {

View File

@ -24,7 +24,8 @@
"Felix Bünemann <Felix.Buenemann@gmail.com>",
"Samy Al Zahrani <samyalzahrany@gmail.com>",
"Chintan Thakkar <lemnisk8@gmail.com>",
"F. Orlando Galashan <frulo@gmx.de>"
"F. Orlando Galashan <frulo@gmx.de>",
"Kleis Auke Wolthuizen <info@kleisauke.nl>"
],
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images",
"scripts": {

View File

@ -277,4 +277,17 @@ namespace sharp {
return std::make_tuple(left, top);
}
/*
Return the image alpha maximum. Useful for combining alpha bands. scRGB
images are 0 - 1 for image data, but the alpha is 0 - 255.
*/
int MaximumImageAlpha(VipsInterpretation interpretation) {
if(interpretation == VIPS_INTERPRETATION_RGB16 ||
interpretation == VIPS_INTERPRETATION_GREY16) {
return (65535);
} else {
return (255);
}
}
} // namespace sharp

View File

@ -108,6 +108,8 @@ namespace sharp {
std::tuple<int, int> CalculateCrop(int const inWidth, int const inHeight,
int const outWidth, int const outHeight, int const gravity);
int MaximumImageAlpha(VipsInterpretation interpretation);
} // namespace sharp
#endif // SRC_COMMON_H_

View File

@ -81,6 +81,65 @@ namespace sharp {
return outRGBPremultiplied.bandjoin(outAlphaNormalized * 255.0);
}
/*
Cutout src over dst with given gravity.
*/
VImage Cutout(VImage mask, VImage dst, const int gravity) {
using sharp::CalculateCrop;
using sharp::HasAlpha;
using sharp::MaximumImageAlpha;
bool maskHasAlpha = HasAlpha(mask);
if (!maskHasAlpha && mask.bands() > 1) {
throw VError("Overlay image must have an alpha channel or one band");
}
if (!HasAlpha(dst)) {
throw VError("Image to be overlaid must have an alpha channel");
}
if (mask.width() > dst.width() || mask.height() > dst.height()) {
throw VError("Overlay image must have same dimensions or smaller");
}
// Enlarge overlay mask, if required
if (mask.width() < dst.width() || mask.height() < dst.height()) {
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
int left;
int top;
std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), mask.width(), mask.height(), gravity);
// Embed onto transparent background
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
mask = mask.embed(left, top, dst.width(), dst.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background)
);
}
// we use the mask alpha if it has alpha
if(maskHasAlpha) {
mask = mask.extract_band(mask.bands() - 1, VImage::option()->set("n", 1));;
}
// Split dst into an optional alpha
VImage dstAlpha = dst.extract_band(dst.bands() - 1, VImage::option()->set("n", 1));
// we use the dst non-alpha
dst = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
// the range of the mask and the image need to match .. one could be
// 16-bit, one 8-bit
int dstMax = MaximumImageAlpha(dst.interpretation());
int maskMax = MaximumImageAlpha(mask.interpretation());
// combine the new mask and the existing alpha ... there are
// many ways of doing this, mult is the simplest
mask = dstMax * ((mask / maskMax) * (dstAlpha / dstMax));
// append the mask to the image data ... the mask might be float now,
// we must cast the format down to match the image data
return dst.bandjoin(mask.cast(dst.format()));
}
/*
* Stretch luminance to cover full dynamic range.
*/

View File

@ -14,6 +14,11 @@ namespace sharp {
*/
VImage Composite(VImage src, VImage dst, const int gravity);
/*
Cutout src over dst with given gravity.
*/
VImage Cutout(VImage src, VImage dst, const int gravity);
/*
* Stretch luminance to cover full dynamic range.
*/

View File

@ -45,6 +45,7 @@ using vips::VOption;
using vips::VError;
using sharp::Composite;
using sharp::Cutout;
using sharp::Normalize;
using sharp::Gamma;
using sharp::Blur;
@ -464,8 +465,9 @@ class PipelineWorker : public AsyncWorker {
bool shouldBlur = baton->blurSigma != 0.0;
bool shouldSharpen = baton->sharpenSigma != 0.0;
bool shouldThreshold = baton->threshold != 0;
bool shouldCutout = baton->overlayCutout;
bool shouldPremultiplyAlpha = HasAlpha(image) &&
(shouldAffineTransform || shouldBlur || shouldSharpen || hasOverlay);
(shouldAffineTransform || shouldBlur || shouldSharpen || (hasOverlay && !shouldCutout));
// Premultiply image alpha channel before all transformations to avoid
// dark fringing around bright pixels
@ -699,10 +701,15 @@ class PipelineWorker : public AsyncWorker {
// the overlayGravity was used for extract_area, therefore set it back to its default value of 0
baton->overlayGravity = 0;
}
// Ensure overlay is premultiplied sRGB
overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB).premultiply();
// Composite images with given gravity
image = Composite(overlayImage, image, baton->overlayGravity);
if(shouldCutout) {
// 'cut out' the image, premultiplication is not required
image = Cutout(overlayImage, image, baton->overlayGravity);
} else {
// Ensure overlay is premultiplied sRGB
overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB).premultiply();
// Composite images with given gravity
image = Composite(overlayImage, image, baton->overlayGravity);
}
}
// Reverse premultiplication after all transformations:
@ -1086,6 +1093,7 @@ NAN_METHOD(pipeline) {
}
baton->overlayGravity = attrAs<int32_t>(options, "overlayGravity");
baton->overlayTile = attrAs<bool>(options, "overlayTile");
baton->overlayCutout = attrAs<bool>(options, "overlayCutout");
// Resize options
baton->withoutEnlargement = attrAs<bool>(options, "withoutEnlargement");
baton->crop = attrAs<int32_t>(options, "crop");

View File

@ -34,6 +34,7 @@ struct PipelineBaton {
size_t overlayBufferInLength;
int overlayGravity;
bool overlayTile;
bool overlayCutout;
int topOffsetPre;
int leftOffsetPre;
int widthPre;
@ -99,6 +100,7 @@ struct PipelineBaton {
overlayBufferInLength(0),
overlayGravity(0),
overlayTile(false),
overlayCutout(false),
topOffsetPre(-1),
topOffsetPost(-1),
channels(0),

Binary file not shown.

After

Width:  |  Height:  |  Size: 795 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 795 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 692 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 745 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 633 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 725 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 823 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 745 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 743 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 745 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 699 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 842 B

View File

@ -264,7 +264,6 @@ describe('Overlays', function() {
});
});
it('With tile enabled and image rotated 90 degrees', function(done) {
var expected = fixtures.expected('overlay-tile-rotated90.jpg');
sharp(fixtures.inputJpg)
@ -283,7 +282,6 @@ describe('Overlays', function() {
});
});
it('With tile enabled and image rotated 90 degrees and gravity northwest', function(done) {
var expected = fixtures.expected('overlay-tile-rotated90-gravity-northwest.jpg');
sharp(fixtures.inputJpg)
@ -303,4 +301,63 @@ describe('Overlays', function() {
});
});
describe('Overlay with cutout enabled and gravity', function() {
Object.keys(sharp.gravity).forEach(function(gravity) {
it(gravity, function(done) {
var expected = fixtures.expected('overlay-cutout-gravity-' + gravity + '.jpg');
sharp(fixtures.inputJpg)
.resize(80)
.overlayWith(fixtures.inputPngWithTransparency16bit, {
cutout: true,
gravity: gravity
})
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(expected, data, done);
});
});
});
});
it('With cutout enabled and image rotated 90 degrees', function(done) {
var expected = fixtures.expected('overlay-cutout-rotated90.jpg');
sharp(fixtures.inputJpg)
.rotate(90)
.resize(80)
.overlayWith(fixtures.inputPngWithTransparency16bit, {
cutout: true
})
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(98, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(expected, data, done);
});
});
it('With cutout enabled and image rotated 90 degrees and gravity northwest', function(done) {
var expected = fixtures.expected('overlay-cutout-rotated90-gravity-northwest.jpg');
sharp(fixtures.inputJpg)
.rotate(90)
.resize(80)
.overlayWith(fixtures.inputPngWithTransparency16bit, {
cutout: true,
gravity: 'northwest'
})
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(98, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(expected, data, done);
});
});
});