Add composite op, supporting multiple images and blend modes #728

This commit is contained in:
Lovell Fuller 2019-03-09 21:28:45 +00:00
parent e3549ba28c
commit 7cafd4386c
16 changed files with 605 additions and 369 deletions

View File

@ -1,33 +1,41 @@
<!-- Generated by documentation.js. Update this documentation by updating the source code. --> <!-- Generated by documentation.js. Update this documentation by updating the source code. -->
## overlayWith ## composite
Overlay (composite) an image over the processed (resized, extracted etc.) image. Composite image(s) over the processed (resized, extracted etc.) image.
The overlay image must be the same size or smaller than the processed image. The images to composite must be the same size or smaller than the processed image.
If both `top` and `left` options are provided, they take precedence over `gravity`. If both `top` and `left` options are provided, they take precedence over `gravity`.
If the overlay image contains an alpha channel then composition with premultiplication will occur. The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
`dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
`xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
`colour-dodge`, `color-dodge`, `colour-burn`,`color-burn`,
`hard-light`, `soft-light`, `difference`, `exclusion`.
More information about blend modes can be found at
[https://libvips.github.io/libvips/API/current/libvips-conversion.html#VipsBlendMode][1]
and [https://www.cairographics.org/operators/][2]
### Parameters ### Parameters
- `overlay` **([Buffer][1] \| [String][2])?** Buffer containing image data or String containing the path to an image file. - `images` **[Array][3]&lt;[Object][4]>** Ordered list of images to composite
- `options` **[Object][3]?** - `images[].input` **([Buffer][5] \| [String][6])?** Buffer containing image data or String containing the path to an image file.
- `options.gravity` **[String][2]** gravity at which to place the overlay. (optional, default `'centre'`) - `images[].blend` **[String][6]** how to blend this image with the image below. (optional, default `'over'`)
- `options.top` **[Number][4]?** the pixel offset from the top edge. - `images[].gravity` **[String][6]** gravity at which to place the overlay. (optional, default `'centre'`)
- `options.left` **[Number][4]?** the pixel offset from the left edge. - `images[].top` **[Number][7]?** the pixel offset from the top edge.
- `options.tile` **[Boolean][5]** set to true to repeat the overlay image across the entire image with the given `gravity`. (optional, default `false`) - `images[].left` **[Number][7]?** the pixel offset from the left edge.
- `options.cutout` **[Boolean][5]** set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another. (optional, default `false`) - `images[].tile` **[Boolean][8]** set to true to repeat the overlay image across the entire image with the given `gravity`. (optional, default `false`)
- `options.density` **[Number][4]** number representing the DPI for vector overlay image. (optional, default `72`) - `images[].density` **[Number][7]** number representing the DPI for vector overlay image. (optional, default `72`)
- `options.raw` **[Object][3]?** describes overlay when using raw pixel data. - `images[].raw` **[Object][4]?** describes overlay when using raw pixel data.
- `options.raw.width` **[Number][4]?** - `images[].raw.width` **[Number][7]?**
- `options.raw.height` **[Number][4]?** - `images[].raw.height` **[Number][7]?**
- `options.raw.channels` **[Number][4]?** - `images[].raw.channels` **[Number][7]?**
- `options.create` **[Object][3]?** describes a blank overlay to be created. - `images[].create` **[Object][4]?** describes a blank overlay to be created.
- `options.create.width` **[Number][4]?** - `images[].create.width` **[Number][7]?**
- `options.create.height` **[Number][4]?** - `images[].create.height` **[Number][7]?**
- `options.create.channels` **[Number][4]?** 3-4 - `images[].create.channels` **[Number][7]?** 3-4
- `options.create.background` **([String][2] \| [Object][3])?** parsed by the [color][6] module to extract values for red, green, blue and alpha. - `images[].create.background` **([String][6] \| [Object][4])?** parsed by the [color][9] module to extract values for red, green, blue and alpha.
### Examples ### Examples
@ -36,7 +44,7 @@ sharp('input.png')
.rotate(180) .rotate(180)
.resize(300) .resize(300)
.flatten( { background: '#ff6600' } ) .flatten( { background: '#ff6600' } )
.overlayWith('overlay.png', { gravity: sharp.gravity.southeast } ) .composite([{ input: 'overlay.png', gravity: 'southeast' }])
.sharpen() .sharpen()
.withMetadata() .withMetadata()
.webp( { quality: 90 } ) .webp( { quality: 90 } )
@ -48,20 +56,26 @@ sharp('input.png')
}); });
``` ```
- Throws **[Error][7]** Invalid parameters - Throws **[Error][10]** Invalid parameters
Returns **Sharp** Returns **Sharp**
[1]: https://nodejs.org/api/buffer.html [1]: https://libvips.github.io/libvips/API/current/libvips-conversion.html#VipsBlendMode
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String [2]: https://www.cairographics.org/operators/
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object [3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Array
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number [4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean [5]: https://nodejs.org/api/buffer.html
[6]: https://www.npmjs.org/package/color [6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error [7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[9]: https://www.npmjs.org/package/color
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error

View File

@ -9,6 +9,9 @@ Requires libvips v8.7.4.
* Remove functions previously deprecated in v0.21.0: * Remove functions previously deprecated in v0.21.0:
`background`, `crop`, `embed`, `ignoreAspectRatio`, `max`, `min` and `withoutEnlargement`. `background`, `crop`, `embed`, `ignoreAspectRatio`, `max`, `min` and `withoutEnlargement`.
* Add `composite` operation supporting multiple images and blend modes; deprecate `overlayWith`.
[#728](https://github.com/lovell/sharp/issues/728)
### v0.21 - "*teeth*" ### v0.21 - "*teeth*"
Requires libvips v8.7.0. Requires libvips v8.7.0.

View File

@ -1,21 +1,66 @@
'use strict'; 'use strict';
const deprecate = require('util').deprecate;
const is = require('./is'); const is = require('./is');
/** /**
* Overlay (composite) an image over the processed (resized, extracted etc.) image. * Blend modes.
* @member
* @private
*/
const blend = {
clear: 'clear',
source: 'source',
over: 'over',
in: 'in',
out: 'out',
atop: 'atop',
dest: 'dest',
'dest-over': 'dest-over',
'dest-in': 'dest-in',
'dest-out': 'dest-out',
'dest-atop': 'dest-atop',
xor: 'xor',
add: 'add',
saturate: 'saturate',
multiply: 'multiply',
screen: 'screen',
overlay: 'overlay',
darken: 'darken',
lighten: 'lighten',
'colour-dodge': 'colour-dodge',
'color-dodge': 'colour-dodge',
'colour-burn': 'colour-burn',
'color-burn': 'colour-burn',
'hard-light': 'hard-light',
'soft-light': 'soft-light',
difference: 'difference',
exclusion: 'exclusion'
};
/**
* Composite image(s) over the processed (resized, extracted etc.) image.
* *
* The overlay image must be the same size or smaller than the processed image. * The images to composite must be the same size or smaller than the processed image.
* If both `top` and `left` options are provided, they take precedence over `gravity`. * If both `top` and `left` options are provided, they take precedence over `gravity`.
* *
* If the overlay image contains an alpha channel then composition with premultiplication will occur. * The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
* `dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
* `xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
* `colour-dodge`, `color-dodge`, `colour-burn`,`color-burn`,
* `hard-light`, `soft-light`, `difference`, `exclusion`.
*
* More information about blend modes can be found at
* https://libvips.github.io/libvips/API/current/libvips-conversion.html#VipsBlendMode
* and https://www.cairographics.org/operators/
* *
* @example * @example
* sharp('input.png') * sharp('input.png')
* .rotate(180) * .rotate(180)
* .resize(300) * .resize(300)
* .flatten( { background: '#ff6600' } ) * .flatten( { background: '#ff6600' } )
* .overlayWith('overlay.png', { gravity: sharp.gravity.southeast } ) * .composite([{ input: 'overlay.png', gravity: 'southeast' }])
* .sharpen() * .sharpen()
* .withMetadata() * .withMetadata()
* .webp( { quality: 90 } ) * .webp( { quality: 90 } )
@ -26,70 +71,104 @@ const is = require('./is');
* // sharpened, with metadata, 90% quality WebP image data. Phew! * // sharpened, with metadata, 90% quality WebP image data. Phew!
* }); * });
* *
* @param {(Buffer|String)} [overlay] - Buffer containing image data or String containing the path to an image file. * @param {Object[]} images - Ordered list of images to composite
* @param {Object} [options] * @param {Buffer|String} [images[].input] - Buffer containing image data or String containing the path to an image file.
* @param {String} [options.gravity='centre'] - gravity at which to place the overlay. * @param {String} [images[].blend='over'] - how to blend this image with the image below.
* @param {Number} [options.top] - the pixel offset from the top edge. * @param {String} [images[].gravity='centre'] - gravity at which to place the overlay.
* @param {Number} [options.left] - the pixel offset from the left edge. * @param {Number} [images[].top] - the pixel offset from the top edge.
* @param {Boolean} [options.tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`. * @param {Number} [images[].left] - the pixel offset from the left edge.
* @param {Boolean} [options.cutout=false] - set to true to apply only the alpha channel of the overlay image to the input image, giving the appearance of one image being cut out of another. * @param {Boolean} [images[].tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`.
* @param {Number} [options.density=72] - number representing the DPI for vector overlay image. * @param {Number} [images[].density=72] - number representing the DPI for vector overlay image.
* @param {Object} [options.raw] - describes overlay when using raw pixel data. * @param {Object} [images[].raw] - describes overlay when using raw pixel data.
* @param {Number} [options.raw.width] * @param {Number} [images[].raw.width]
* @param {Number} [options.raw.height] * @param {Number} [images[].raw.height]
* @param {Number} [options.raw.channels] * @param {Number} [images[].raw.channels]
* @param {Object} [options.create] - describes a blank overlay to be created. * @param {Object} [images[].create] - describes a blank overlay to be created.
* @param {Number} [options.create.width] * @param {Number} [images[].create.width]
* @param {Number} [options.create.height] * @param {Number} [images[].create.height]
* @param {Number} [options.create.channels] - 3-4 * @param {Number} [images[].create.channels] - 3-4
* @param {String|Object} [options.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha. * @param {String|Object} [images[].create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @returns {Sharp} * @returns {Sharp}
* @throws {Error} Invalid parameters * @throws {Error} Invalid parameters
*/ */
function overlayWith (overlay, options) { function composite (images) {
this.options.overlay = this._createInputDescriptor(overlay, options, { if (!Array.isArray(images)) {
allowStream: false throw is.invalidParameterError('images to composite', 'array', images);
}
this.options.composite = images.map(image => {
if (!is.object(image)) {
throw is.invalidParameterError('image to composite', 'object', image);
}
const { raw, density } = image;
const inputOptions = (raw || density) ? { raw, density } : undefined;
const composite = {
input: this._createInputDescriptor(image.input, inputOptions, { allowStream: false }),
blend: 'over',
tile: false,
left: -1,
top: -1,
gravity: 0
};
if (is.defined(image.blend)) {
if (is.string(blend[image.blend])) {
composite.blend = blend[image.blend];
} else {
throw is.invalidParameterError('blend', 'valid blend name', image.blend);
}
}
if (is.defined(image.tile)) {
if (is.bool(image.tile)) {
composite.tile = image.tile;
} else {
throw is.invalidParameterError('tile', 'boolean', image.tile);
}
}
if (is.defined(image.left)) {
if (is.integer(image.left) && image.left >= 0) {
composite.left = image.left;
} else {
throw is.invalidParameterError('left', 'positive integer', image.left);
}
}
if (is.defined(image.top)) {
if (is.integer(image.top) && image.top >= 0) {
composite.top = image.top;
} else {
throw is.invalidParameterError('top', 'positive integer', image.top);
}
}
if (composite.left !== composite.top && Math.min(composite.left, composite.top) === -1) {
throw new Error('Expected both left and top to be set');
}
if (is.defined(image.gravity)) {
if (is.integer(image.gravity) && is.inRange(image.gravity, 0, 8)) {
composite.gravity = image.gravity;
} else if (is.string(image.gravity) && is.integer(this.constructor.gravity[image.gravity])) {
composite.gravity = this.constructor.gravity[image.gravity];
} else {
throw is.invalidParameterError('gravity', 'valid gravity', image.gravity);
}
}
return composite;
}); });
if (is.object(options)) {
if (is.defined(options.tile)) {
if (is.bool(options.tile)) {
this.options.overlayTile = options.tile;
} else {
throw new Error('Invalid overlay tile ' + options.tile);
}
}
if (is.defined(options.cutout)) {
if (is.bool(options.cutout)) {
this.options.overlayCutout = options.cutout;
} else {
throw new Error('Invalid overlay cutout ' + options.cutout);
}
}
if (is.defined(options.left) || is.defined(options.top)) {
if (is.integer(options.left) && options.left >= 0 && is.integer(options.top) && options.top >= 0) {
this.options.overlayXOffset = options.left;
this.options.overlayYOffset = options.top;
} else {
throw new Error('Invalid overlay left ' + options.left + ' and/or top ' + options.top);
}
}
if (is.defined(options.gravity)) {
if (is.integer(options.gravity) && is.inRange(options.gravity, 0, 8)) {
this.options.overlayGravity = options.gravity;
} else if (is.string(options.gravity) && is.integer(this.constructor.gravity[options.gravity])) {
this.options.overlayGravity = this.constructor.gravity[options.gravity];
} else {
throw new Error('Unsupported overlay gravity ' + options.gravity);
}
}
}
return this; return this;
} }
/**
* @deprecated
* @private
*/
function overlayWith (input, options) {
const blend = (is.object(options) && options.cutout) ? 'dest-in' : 'over';
return this.composite([Object.assign({ input, blend }, options)]);
}
/** /**
* Decorate the Sharp prototype with composite-related functions. * Decorate the Sharp prototype with composite-related functions.
* @private * @private
*/ */
module.exports = function (Sharp) { module.exports = function (Sharp) {
Sharp.prototype.overlayWith = overlayWith; Sharp.prototype.composite = composite;
Sharp.prototype.overlayWith = deprecate(overlayWith, 'overlayWith(input, options) is deprecated, use composite([{ input, ...options }]) instead');
Sharp.blend = blend;
}; };

View File

@ -145,12 +145,7 @@ const Sharp = function (input, options) {
removeAlpha: false, removeAlpha: false,
ensureAlpha: false, ensureAlpha: false,
colourspace: 'srgb', colourspace: 'srgb',
// overlay composite: [],
overlayGravity: 0,
overlayXOffset: -1,
overlayYOffset: -1,
overlayTile: false,
overlayCutout: false,
// output // output
fileOut: '', fileOut: '',
formatOut: 'input', formatOut: 'input',

View File

@ -50,130 +50,6 @@ namespace sharp {
return image; return image;
} }
/*
Composite overlayImage over image at given position
Assumes alpha channels are already premultiplied and will be unpremultiplied after
*/
VImage Composite(VImage image, VImage overlayImage, int const left, int const top) {
if (HasAlpha(overlayImage)) {
// Alpha composite
if (overlayImage.width() < image.width() || overlayImage.height() < image.height()) {
// Enlarge overlay
std::vector<double> const background { 0.0, 0.0, 0.0, 0.0 };
overlayImage = overlayImage.embed(left, top, image.width(), image.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background));
}
return AlphaComposite(image, overlayImage);
} else {
if (HasAlpha(image)) {
// Add alpha channel to overlayImage so channels match
double const multiplier = sharp::Is16Bit(overlayImage.interpretation()) ? 256.0 : 1.0;
overlayImage = overlayImage.bandjoin(
VImage::new_matrix(overlayImage.width(), overlayImage.height()).new_from_image(255 * multiplier));
}
return image.insert(overlayImage, left, top);
}
}
VImage AlphaComposite(VImage dst, VImage src) {
// Split src into non-alpha and alpha channels
VImage srcWithoutAlpha = src.extract_band(0, VImage::option()->set("n", src.bands() - 1));
VImage srcAlpha = src[src.bands() - 1] * (1.0 / 255.0);
// Split dst into non-alpha and alpha channels
VImage dstWithoutAlpha = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
VImage dstAlpha = dst[dst.bands() - 1] * (1.0 / 255.0);
//
// Compute normalized output alpha channel:
//
// References:
// - http://en.wikipedia.org/wiki/Alpha_compositing#Alpha_blending
// - https://github.com/libvips/ruby-vips/issues/28#issuecomment-9014826
//
// out_a = src_a + dst_a * (1 - src_a)
// ^^^^^^^^^^^
// t0
VImage t0 = srcAlpha.linear(-1.0, 1.0);
VImage outAlphaNormalized = srcAlpha + dstAlpha * t0;
//
// Compute output RGB channels:
//
// Wikipedia:
// out_rgb = (src_rgb * src_a + dst_rgb * dst_a * (1 - src_a)) / out_a
// ^^^^^^^^^^^
// t0
//
// Omit division by `out_a` since `Compose` is supposed to output a
// premultiplied RGBA image as reversal of premultiplication is handled
// externally.
//
VImage outRGBPremultiplied = srcWithoutAlpha + dstWithoutAlpha * t0;
// Combine RGB and alpha channel into output image:
return outRGBPremultiplied.bandjoin(outAlphaNormalized * 255.0);
}
/*
Cutout src over dst with given gravity.
*/
VImage Cutout(VImage mask, VImage dst, const int gravity) {
using sharp::CalculateCrop;
using sharp::HasAlpha;
using sharp::MaximumImageAlpha;
bool maskHasAlpha = HasAlpha(mask);
if (!maskHasAlpha && mask.bands() > 1) {
throw VError("Overlay image must have an alpha channel or one band");
}
if (!HasAlpha(dst)) {
throw VError("Image to be overlaid must have an alpha channel");
}
if (mask.width() > dst.width() || mask.height() > dst.height()) {
throw VError("Overlay image must have same dimensions or smaller");
}
// Enlarge overlay mask, if required
if (mask.width() < dst.width() || mask.height() < dst.height()) {
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
int left;
int top;
std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), mask.width(), mask.height(), gravity);
// Embed onto transparent background
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
mask = mask.embed(left, top, dst.width(), dst.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background));
}
// we use the mask alpha if it has alpha
if (maskHasAlpha) {
mask = mask.extract_band(mask.bands() - 1, VImage::option()->set("n", 1));;
}
// Split dst into an optional alpha
VImage dstAlpha = dst.extract_band(dst.bands() - 1, VImage::option()->set("n", 1));
// we use the dst non-alpha
dst = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
// the range of the mask and the image need to match .. one could be
// 16-bit, one 8-bit
double const dstMax = MaximumImageAlpha(dst.interpretation());
double const maskMax = MaximumImageAlpha(mask.interpretation());
// combine the new mask and the existing alpha ... there are
// many ways of doing this, mult is the simplest
mask = dstMax * ((mask / maskMax) * (dstAlpha / dstMax));
// append the mask to the image data ... the mask might be float now,
// we must cast the format down to match the image data
return dst.bandjoin(mask.cast(dst.format()));
}
/* /*
* Tint an image using the specified chroma, preserving the original image luminance * Tint an image using the specified chroma, preserving the original image luminance
*/ */

View File

@ -35,27 +35,6 @@ namespace sharp {
*/ */
VImage EnsureAlpha(VImage image); VImage EnsureAlpha(VImage image);
/*
Alpha composite src over dst with given gravity.
Assumes alpha channels are already premultiplied and will be unpremultiplied after.
*/
VImage Composite(VImage src, VImage dst, const int gravity);
/*
Composite overlayImage over image at given position
*/
VImage Composite(VImage image, VImage overlayImage, int const x, int const y);
/*
Alpha composite overlayImage over image, assumes matching dimensions
*/
VImage AlphaComposite(VImage image, VImage overlayImage);
/*
Cutout src over dst with given gravity.
*/
VImage Cutout(VImage src, VImage dst, const int gravity);
/* /*
* Tint an image using the specified chroma, preserving the original image luminance * Tint an image using the specified chroma, preserving the original image luminance
*/ */

View File

@ -343,30 +343,19 @@ class PipelineWorker : public Nan::AsyncWorker {
image = image.colourspace(VIPS_INTERPRETATION_B_W); image = image.colourspace(VIPS_INTERPRETATION_B_W);
} }
// Ensure image has an alpha channel when there is an overlay with an alpha channel
VImage overlayImage;
ImageType overlayImageType = ImageType::UNKNOWN;
bool shouldOverlayWithAlpha = FALSE;
if (baton->overlay != nullptr) {
std::tie(overlayImage, overlayImageType) = OpenInput(baton->overlay, baton->accessMethod);
if (HasAlpha(overlayImage)) {
shouldOverlayWithAlpha = !baton->overlayCutout;
if (!HasAlpha(image)) {
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
}
}
bool const shouldResize = xfactor != 1.0 || yfactor != 1.0; bool const shouldResize = xfactor != 1.0 || yfactor != 1.0;
bool const shouldBlur = baton->blurSigma != 0.0; bool const shouldBlur = baton->blurSigma != 0.0;
bool const shouldConv = baton->convKernelWidth * baton->convKernelHeight > 0; bool const shouldConv = baton->convKernelWidth * baton->convKernelHeight > 0;
bool const shouldSharpen = baton->sharpenSigma != 0.0; bool const shouldSharpen = baton->sharpenSigma != 0.0;
bool const shouldApplyMedian = baton->medianSize > 0; bool const shouldApplyMedian = baton->medianSize > 0;
bool const shouldComposite = !baton->composite.empty();
if (shouldComposite && !HasAlpha(image)) {
image = sharp::EnsureAlpha(image);
}
bool const shouldPremultiplyAlpha = HasAlpha(image) && bool const shouldPremultiplyAlpha = HasAlpha(image) &&
(shouldResize || shouldBlur || shouldConv || shouldSharpen || shouldOverlayWithAlpha); (shouldResize || shouldBlur || shouldConv || shouldSharpen || shouldComposite);
// Premultiply image alpha channel before all transformations to avoid // Premultiply image alpha channel before all transformations to avoid
// dark fringing around bright pixels // dark fringing around bright pixels
@ -544,72 +533,67 @@ class PipelineWorker : public Nan::AsyncWorker {
image = sharp::Sharpen(image, baton->sharpenSigma, baton->sharpenFlat, baton->sharpenJagged); image = sharp::Sharpen(image, baton->sharpenSigma, baton->sharpenFlat, baton->sharpenJagged);
} }
// Composite with overlay, if present // Composite
if (baton->overlay != nullptr) { if (shouldComposite) {
// Verify overlay image is within current dimensions for (Composite *composite : baton->composite) {
if (overlayImage.width() > image.width() || overlayImage.height() > image.height()) { VImage compositeImage;
throw vips::VError("Overlay image must have same dimensions or smaller"); ImageType compositeImageType = ImageType::UNKNOWN;
std::tie(compositeImage, compositeImageType) = OpenInput(composite->input, baton->accessMethod);
// Verify within current dimensions
if (compositeImage.width() > image.width() || compositeImage.height() > image.height()) {
throw vips::VError("Image to composite must have same dimensions or smaller");
} }
// Check if overlay is tiled // Check if overlay is tiled
if (baton->overlayTile) { if (composite->tile) {
int const overlayImageWidth = overlayImage.width();
int const overlayImageHeight = overlayImage.height();
int across = 0; int across = 0;
int down = 0; int down = 0;
// Use gravity in overlay // Use gravity in overlay
if (overlayImageWidth <= baton->width) { if (compositeImage.width() <= baton->width) {
across = static_cast<int>(ceil(static_cast<double>(image.width()) / overlayImageWidth)); across = static_cast<int>(ceil(static_cast<double>(image.width()) / compositeImage.width()));
} }
if (overlayImageHeight <= baton->height) { if (compositeImage.height() <= baton->height) {
down = static_cast<int>(ceil(static_cast<double>(image.height()) / overlayImageHeight)); down = static_cast<int>(ceil(static_cast<double>(image.height()) / compositeImage.height()));
} }
if (across != 0 || down != 0) { if (across != 0 || down != 0) {
int left; int left;
int top; int top;
overlayImage = overlayImage.replicate(across, down); compositeImage = compositeImage.replicate(across, down);
if (baton->overlayXOffset >= 0 && baton->overlayYOffset >= 0) { if (composite->left >= 0 && composite->top >= 0) {
// the overlayX/YOffsets will now be used to CalculateCrop for extract_area
std::tie(left, top) = sharp::CalculateCrop( std::tie(left, top) = sharp::CalculateCrop(
overlayImage.width(), overlayImage.height(), image.width(), image.height(), compositeImage.width(), compositeImage.height(), image.width(), image.height(),
baton->overlayXOffset, baton->overlayYOffset); composite->left, composite->top);
} else { } else {
// the overlayGravity will now be used to CalculateCrop for extract_area
std::tie(left, top) = sharp::CalculateCrop( std::tie(left, top) = sharp::CalculateCrop(
overlayImage.width(), overlayImage.height(), image.width(), image.height(), baton->overlayGravity); compositeImage.width(), compositeImage.height(), image.width(), image.height(), composite->gravity);
} }
overlayImage = overlayImage.extract_area(left, top, image.width(), image.height()); compositeImage = compositeImage.extract_area(left, top, image.width(), image.height());
} }
// the overlayGravity was used for extract_area, therefore set it back to its default value of 0 // gravity was used for extract_area, set it back to its default value of 0
baton->overlayGravity = 0; composite->gravity = 0;
} }
if (baton->overlayCutout) { // Ensure image to composite is sRGB with premultiplied alpha
// 'cut out' the image, premultiplication is not required compositeImage = compositeImage.colourspace(VIPS_INTERPRETATION_sRGB);
image = sharp::Cutout(overlayImage, image, baton->overlayGravity); if (!HasAlpha(compositeImage)) {
} else { compositeImage = sharp::EnsureAlpha(compositeImage);
// Ensure overlay is sRGB
overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB);
// Ensure overlay matches premultiplication state
if (shouldPremultiplyAlpha) {
// Ensure overlay has alpha channel
if (!HasAlpha(overlayImage)) {
double const multiplier = sharp::Is16Bit(overlayImage.interpretation()) ? 256.0 : 1.0;
overlayImage = overlayImage.bandjoin(
VImage::new_matrix(overlayImage.width(), overlayImage.height()).new_from_image(255 * multiplier));
}
overlayImage = overlayImage.premultiply();
} }
compositeImage = compositeImage.premultiply();
// Calculate position
int left; int left;
int top; int top;
if (baton->overlayXOffset >= 0 && baton->overlayYOffset >= 0) { if (composite->left >= 0 && composite->top >= 0) {
// Composite images at given offsets // Composite image at given offsets
std::tie(left, top) = sharp::CalculateCrop(image.width(), image.height(), std::tie(left, top) = sharp::CalculateCrop(image.width(), image.height(),
overlayImage.width(), overlayImage.height(), baton->overlayXOffset, baton->overlayYOffset); compositeImage.width(), compositeImage.height(), composite->left, composite->top);
} else { } else {
// Composite images with given gravity // Composite image with given gravity
std::tie(left, top) = sharp::CalculateCrop(image.width(), image.height(), std::tie(left, top) = sharp::CalculateCrop(image.width(), image.height(),
overlayImage.width(), overlayImage.height(), baton->overlayGravity); compositeImage.width(), compositeImage.height(), composite->gravity);
} }
image = sharp::Composite(image, overlayImage, left, top); // Composite
image = image.composite2(compositeImage, composite->mode, VImage::option()
->set("premultiplied", TRUE)
->set("x", left)
->set("y", top));
} }
} }
@ -1029,13 +1013,17 @@ class PipelineWorker : public Nan::AsyncWorker {
GetFromPersistent(index); GetFromPersistent(index);
return index + 1; return index + 1;
}); });
// Delete baton
delete baton->input; delete baton->input;
delete baton->overlay;
delete baton->boolean; delete baton->boolean;
for_each(baton->joinChannelIn.begin(), baton->joinChannelIn.end(), for (Composite *composite : baton->composite) {
[this](sharp::InputDescriptor *joinChannelIn) { delete composite->input;
delete joinChannelIn; delete composite;
}); }
for (sharp::InputDescriptor *input : baton->joinChannelIn) {
delete input;
}
delete baton; delete baton;
// Handle warnings // Handle warnings
@ -1182,14 +1170,21 @@ NAN_METHOD(pipeline) {
// Tint chroma // Tint chroma
baton->tintA = AttrTo<double>(options, "tintA"); baton->tintA = AttrTo<double>(options, "tintA");
baton->tintB = AttrTo<double>(options, "tintB"); baton->tintB = AttrTo<double>(options, "tintB");
// Overlay options // Composite
if (HasAttr(options, "overlay")) { v8::Local<v8::Array> compositeArray = Nan::Get(options, Nan::New("composite").ToLocalChecked())
baton->overlay = CreateInputDescriptor(AttrAs<v8::Object>(options, "overlay"), buffersToPersist); .ToLocalChecked().As<v8::Array>();
baton->overlayGravity = AttrTo<int32_t>(options, "overlayGravity"); int const compositeArrayLength = AttrTo<uint32_t>(compositeArray, "length");
baton->overlayXOffset = AttrTo<int32_t>(options, "overlayXOffset"); for (int i = 0; i < compositeArrayLength; i++) {
baton->overlayYOffset = AttrTo<int32_t>(options, "overlayYOffset"); v8::Local<v8::Object> compositeObject = Nan::Get(compositeArray, i).ToLocalChecked().As<v8::Object>();
baton->overlayTile = AttrTo<bool>(options, "overlayTile"); Composite *composite = new Composite;
baton->overlayCutout = AttrTo<bool>(options, "overlayCutout"); composite->input = CreateInputDescriptor(AttrAs<v8::Object>(compositeObject, "input"), buffersToPersist);
composite->mode = static_cast<VipsBlendMode>(
vips_enum_from_nick(nullptr, VIPS_TYPE_BLEND_MODE, AttrAsStr(compositeObject, "blend").data()));
composite->gravity = AttrTo<uint32_t>(compositeObject, "gravity");
composite->left = AttrTo<int32_t>(compositeObject, "left");
composite->top = AttrTo<int32_t>(compositeObject, "top");
composite->tile = AttrTo<bool>(compositeObject, "tile");
baton->composite.push_back(composite);
} }
// Resize options // Resize options
baton->withoutEnlargement = AttrTo<bool>(options, "withoutEnlargement"); baton->withoutEnlargement = AttrTo<bool>(options, "withoutEnlargement");

View File

@ -34,6 +34,23 @@ enum class Canvas {
IGNORE_ASPECT IGNORE_ASPECT
}; };
struct Composite {
sharp::InputDescriptor *input;
VipsBlendMode mode;
int gravity;
int left;
int top;
bool tile;
Composite():
input(nullptr),
mode(VIPS_BLEND_MODE_OVER),
gravity(0),
left(-1),
top(-1),
tile(false) {}
};
struct PipelineBaton { struct PipelineBaton {
sharp::InputDescriptor *input; sharp::InputDescriptor *input;
std::string iccProfilePath; std::string iccProfilePath;
@ -42,12 +59,7 @@ struct PipelineBaton {
std::string fileOut; std::string fileOut;
void *bufferOut; void *bufferOut;
size_t bufferOutLength; size_t bufferOutLength;
sharp::InputDescriptor *overlay; std::vector<Composite *> composite;
int overlayGravity;
int overlayXOffset;
int overlayYOffset;
bool overlayTile;
bool overlayCutout;
std::vector<sharp::InputDescriptor *> joinChannelIn; std::vector<sharp::InputDescriptor *> joinChannelIn;
int topOffsetPre; int topOffsetPre;
int leftOffsetPre; int leftOffsetPre;
@ -161,12 +173,6 @@ struct PipelineBaton {
input(nullptr), input(nullptr),
limitInputPixels(0), limitInputPixels(0),
bufferOutLength(0), bufferOutLength(0),
overlay(nullptr),
overlayGravity(0),
overlayXOffset(-1),
overlayYOffset(-1),
overlayTile(false),
overlayCutout(false),
topOffsetPre(-1), topOffsetPre(-1),
topOffsetPost(-1), topOffsetPost(-1),
channels(0), channels(0),

Binary file not shown.

After

Width:  |  Height:  |  Size: 175 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 222 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 197 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 197 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 194 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 192 B

298
test/unit/composite.js Normal file
View File

@ -0,0 +1,298 @@
'use strict';
const assert = require('assert');
const fixtures = require('../fixtures');
const sharp = require('../../');
const red = { r: 255, g: 0, b: 0, alpha: 0.5 };
const green = { r: 0, g: 255, b: 0, alpha: 0.5 };
const blue = { r: 0, g: 0, b: 255, alpha: 0.5 };
const redRect = {
create: {
width: 80,
height: 60,
channels: 4,
background: red
}
};
const greenRect = {
create: {
width: 40,
height: 40,
channels: 4,
background: green
}
};
const blueRect = {
create: {
width: 60,
height: 40,
channels: 4,
background: blue
}
};
const blends = [
'over',
'xor',
'saturate',
'dest-over'
];
// Test
describe('composite', () => {
it('blend', () => Promise.all(
blends.map(blend => {
const filename = `composite.blend.${blend}.png`;
const actual = fixtures.path(`output.${filename}`);
const expected = fixtures.expected(filename);
return sharp(redRect)
.composite([{
input: blueRect,
blend
}])
.toFile(actual)
.then(() => {
fixtures.assertMaxColourDistance(actual, expected);
});
})
));
it('multiple', () => {
const filename = 'composite-multiple.png';
const actual = fixtures.path(`output.${filename}`);
const expected = fixtures.expected(filename);
return sharp(redRect)
.composite([{
input: blueRect,
gravity: 'northeast'
}, {
input: greenRect,
gravity: 'southwest'
}])
.toFile(actual)
.then(() => {
fixtures.assertMaxColourDistance(actual, expected);
});
});
it('zero offset', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
top: 0,
left: 0
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-0.jpg'), data, done);
});
});
it('offset and gravity', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
left: 10,
top: 10,
gravity: 4
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-with-gravity.jpg'), data, done);
});
});
it('offset, gravity and tile', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
left: 10,
top: 10,
gravity: 4,
tile: true
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-with-gravity-tile.jpg'), data, done);
});
});
it('offset and tile', done => {
sharp(fixtures.inputJpg)
.resize(400)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
left: 10,
top: 10,
tile: true
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected('overlay-offset-with-tile.jpg'), data, done);
});
});
it('cutout via dest-in', done => {
sharp(fixtures.inputJpg)
.resize(300, 300)
.composite([{
input: Buffer.from('<svg><rect x="0" y="0" width="200" height="200" rx="50" ry="50"/></svg>'),
density: 96,
blend: 'dest-in',
cutout: true
}])
.png()
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(300, info.width);
assert.strictEqual(300, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(fixtures.expected('composite-cutout.png'), data, done);
});
});
describe('numeric gravity', () => {
Object.keys(sharp.gravity).forEach(gravity => {
it(gravity, done => {
sharp(fixtures.inputJpg)
.resize(80)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
gravity: gravity
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(fixtures.expected(`overlay-gravity-${gravity}.jpg`), data, done);
});
});
});
});
describe('string gravity', () => {
Object.keys(sharp.gravity).forEach(gravity => {
it(gravity, done => {
const expected = fixtures.expected('overlay-gravity-' + gravity + '.jpg');
sharp(fixtures.inputJpg)
.resize(80)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
gravity: sharp.gravity[gravity]
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(expected, data, done);
});
});
});
});
describe('tile and gravity', () => {
Object.keys(sharp.gravity).forEach(gravity => {
it(gravity, done => {
const expected = fixtures.expected('overlay-tile-gravity-' + gravity + '.jpg');
sharp(fixtures.inputJpg)
.resize(80)
.composite([{
input: fixtures.inputPngWithTransparency16bit,
tile: true,
gravity: gravity
}])
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(80, info.width);
assert.strictEqual(65, info.height);
assert.strictEqual(3, info.channels);
fixtures.assertSimilar(expected, data, done);
});
});
});
});
describe('validation', () => {
it('missing images', () => {
assert.throws(() => {
sharp().composite();
}, /Expected array for images to composite but received undefined of type undefined/);
});
it('invalid images', () => {
assert.throws(() => {
sharp().composite(['invalid']);
}, /Expected object for image to composite but received invalid of type string/);
});
it('missing input', () => {
assert.throws(() => {
sharp().composite([{}]);
}, /Unsupported input/);
});
it('invalid blend', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', blend: 'invalid' }]);
}, /Expected valid blend name for blend but received invalid of type string/);
});
it('invalid tile', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', tile: 'invalid' }]);
}, /Expected boolean for tile but received invalid of type string/);
});
it('invalid left', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', left: 0.5 }]);
}, /Expected positive integer for left but received 0.5 of type number/);
});
it('invalid top', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', top: -1 }]);
}, /Expected positive integer for top but received -1 of type number/);
});
it('left but no top', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', left: 1 }]);
}, /Expected both left and top to be set/);
});
it('top but no left', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', top: 1 }]);
}, /Expected both left and top to be set/);
});
it('invalid gravity', () => {
assert.throws(() => {
sharp().composite([{ input: 'test', gravity: 'invalid' }]);
}, /Expected valid gravity for gravity but received invalid of type string/);
});
});
});

View File

@ -140,7 +140,6 @@ describe('Overlays', function () {
}); });
}); });
if (sharp.format.webp.input.file) {
it('Composite WebP onto JPEG', function (done) { it('Composite WebP onto JPEG', function (done) {
const paths = getPaths('overlay-jpeg-with-webp', 'jpg'); const paths = getPaths('overlay-jpeg-with-webp', 'jpg');
@ -153,24 +152,23 @@ describe('Overlays', function () {
done(); done();
}); });
}); });
}
it('Composite JPEG onto PNG, no premultiply', function (done) { it('Composite JPEG onto PNG, ensure premultiply', function (done) {
sharp(fixtures.inputPngOverlayLayer1) sharp(fixtures.inputPngOverlayLayer1)
.overlayWith(fixtures.inputJpgWithLandscapeExif1) .overlayWith(fixtures.inputJpgWithLandscapeExif1)
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(false, info.premultiplied); assert.strictEqual(true, info.premultiplied);
done(); done();
}); });
}); });
it('Composite opaque JPEG onto JPEG, no premultiply', function (done) { it('Composite opaque JPEG onto JPEG, ensure premultiply', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.overlayWith(fixtures.inputJpgWithLandscapeExif1) .overlayWith(fixtures.inputJpgWithLandscapeExif1)
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual(false, info.premultiplied); assert.strictEqual(true, info.premultiplied);
done(); done();
}); });
}); });
@ -409,12 +407,6 @@ describe('Overlays', function () {
}); });
}); });
it('Overlay with invalid cutout option', function () {
assert.throws(function () {
sharp().overlayWith('ignore', { cutout: 1 });
});
});
it('Overlay with invalid tile option', function () { it('Overlay with invalid tile option', function () {
assert.throws(function () { assert.throws(function () {
sharp().overlayWith('ignore', { tile: 1 }); sharp().overlayWith('ignore', { tile: 1 });
@ -580,18 +572,17 @@ describe('Overlays', function () {
}); });
}); });
it('Composite JPEG onto JPEG, no premultiply', function (done) { it('Composite JPEG onto JPEG', function (done) {
sharp(fixtures.inputJpg) sharp(fixtures.inputJpg)
.resize(480, 320) .resize(480, 320)
.overlayWith(fixtures.inputJpgBooleanTest) .overlayWith(fixtures.inputJpgBooleanTest)
.png()
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
if (err) throw err; if (err) throw err;
assert.strictEqual('png', info.format); assert.strictEqual('jpeg', info.format);
assert.strictEqual(480, info.width); assert.strictEqual(480, info.width);
assert.strictEqual(320, info.height); assert.strictEqual(320, info.height);
assert.strictEqual(3, info.channels); assert.strictEqual(3, info.channels);
assert.strictEqual(false, info.premultiplied); assert.strictEqual(true, info.premultiplied);
fixtures.assertSimilar(fixtures.expected('overlay-jpeg-with-jpeg.jpg'), data, done); fixtures.assertSimilar(fixtures.expected('overlay-jpeg-with-jpeg.jpg'), data, done);
}); });
}); });