overlayWith improvements: diff sizes/formats, gravity, buffer input
19
docs/api.md
@ -365,13 +365,18 @@ The output image will still be web-friendly sRGB and contain three (identical) c
|
||||
|
||||
Enhance output image contrast by stretching its luminance to cover the full dynamic range. This typically reduces performance by 30%.
|
||||
|
||||
#### overlayWith(path)
|
||||
#### overlayWith(image, [options])
|
||||
|
||||
_Experimental_
|
||||
Overlay (composite) a image containing an alpha channel over the processed (resized, extracted etc.) image.
|
||||
|
||||
Alpha composite image at `path` over the processed (resized, extracted) image. The dimensions of the two images must match.
|
||||
`image` is one of the following, and must be the same size or smaller than the processed image:
|
||||
|
||||
* `path` is a String containing the path to an image file with an alpha channel.
|
||||
* Buffer containing PNG, WebP, GIF or SVG image data, or
|
||||
* String containing the path to an image file, with most major transparency formats supported.
|
||||
|
||||
`options`, if present, is an Object with the following optional attributes:
|
||||
|
||||
* `gravity` is a String or an attribute of the `sharp.gravity` Object e.g. `sharp.gravity.north` at which to place the overlay, defaulting to `center`/`centre`.
|
||||
|
||||
```javascript
|
||||
sharp('input.png')
|
||||
@ -379,7 +384,7 @@ sharp('input.png')
|
||||
.resize(300)
|
||||
.flatten()
|
||||
.background('#ff6600')
|
||||
.overlayWith('overlay.png')
|
||||
.overlayWith('overlay.png', { gravity: sharp.gravity.southeast } )
|
||||
.sharpen()
|
||||
.withMetadata()
|
||||
.quality(90)
|
||||
@ -387,8 +392,8 @@ sharp('input.png')
|
||||
.toBuffer()
|
||||
.then(function(outputBuffer) {
|
||||
// outputBuffer contains upside down, 300px wide, alpha channel flattened
|
||||
// onto orange background, composited with overlay.png, sharpened,
|
||||
// with metadata, 90% quality WebP image data. Phew!
|
||||
// onto orange background, composited with overlay.png with SE gravity,
|
||||
// sharpened, with metadata, 90% quality WebP image data. Phew!
|
||||
});
|
||||
```
|
||||
|
||||
|
@ -1,5 +1,11 @@
|
||||
# Changelog
|
||||
|
||||
### v0.14 - "*needle*"
|
||||
|
||||
* Improvements to overlayWith: differing sizes/formats, gravity, buffer input.
|
||||
[#239](https://github.com/lovell/sharp/issues/239)
|
||||
[@chrisriley](https://github.com/chrisriley)
|
||||
|
||||
### v0.13 - "*mind*"
|
||||
|
||||
#### v0.13.1 - 27<sup>th</sup> February 2016
|
||||
|
46
index.js
@ -84,7 +84,9 @@ var Sharp = function(input, options) {
|
||||
greyscale: false,
|
||||
normalize: 0,
|
||||
// overlay
|
||||
overlayPath: '',
|
||||
overlayFileIn: '',
|
||||
overlayBufferIn: null,
|
||||
overlayGravity: 0,
|
||||
// output options
|
||||
formatOut: 'input',
|
||||
fileOut: '',
|
||||
@ -106,13 +108,13 @@ var Sharp = function(input, options) {
|
||||
module.exports.queue.emit('change', queueLength);
|
||||
}
|
||||
};
|
||||
if (typeof input === 'string') {
|
||||
if (isString(input)) {
|
||||
// input=file
|
||||
this.options.fileIn = input;
|
||||
} else if (typeof input === 'object' && input instanceof Buffer) {
|
||||
} else if (isBuffer(input)) {
|
||||
// input=buffer
|
||||
this.options.bufferIn = input;
|
||||
} else if (typeof input === 'undefined' || input === null) {
|
||||
} else if (!isDefined(input)) {
|
||||
// input=stream
|
||||
this.options.streamIn = true;
|
||||
} else {
|
||||
@ -148,6 +150,12 @@ var isDefined = function(val) {
|
||||
var isObject = function(val) {
|
||||
return typeof val === 'object';
|
||||
};
|
||||
var isBuffer = function(val) {
|
||||
return typeof val === 'object' && val instanceof Buffer;
|
||||
};
|
||||
var isString = function(val) {
|
||||
return typeof val === 'string' && val.length > 0;
|
||||
};
|
||||
var isInteger = function(val) {
|
||||
return typeof val === 'number' && !Number.isNaN(val) && val % 1 === 0;
|
||||
};
|
||||
@ -232,11 +240,11 @@ module.exports.gravity = {
|
||||
|
||||
Sharp.prototype.crop = function(gravity) {
|
||||
this.options.canvas = 'crop';
|
||||
if (typeof gravity === 'undefined') {
|
||||
if (!isDefined(gravity)) {
|
||||
this.options.gravity = module.exports.gravity.center;
|
||||
} else if (typeof gravity === 'number' && !Number.isNaN(gravity) && gravity >= 0 && gravity <= 8) {
|
||||
} else if (isInteger(gravity) && inRange(gravity, 0, 8)) {
|
||||
this.options.gravity = gravity;
|
||||
} else if (typeof gravity === 'string' && typeof module.exports.gravity[gravity] === 'number') {
|
||||
} else if (isString(gravity) && isInteger(module.exports.gravity[gravity])) {
|
||||
this.options.gravity = module.exports.gravity[gravity];
|
||||
} else {
|
||||
throw new Error('Unsupported crop gravity ' + gravity);
|
||||
@ -316,14 +324,26 @@ Sharp.prototype.negate = function(negate) {
|
||||
return this;
|
||||
};
|
||||
|
||||
Sharp.prototype.overlayWith = function(overlayPath) {
|
||||
if (typeof overlayPath !== 'string') {
|
||||
throw new Error('The overlay path must be a string');
|
||||
/*
|
||||
Overlay with another image, using an optional gravity
|
||||
*/
|
||||
Sharp.prototype.overlayWith = function(overlay, options) {
|
||||
if (isString(overlay)) {
|
||||
this.options.overlayFileIn = overlay;
|
||||
} else if (isBuffer(overlay)) {
|
||||
this.options.overlayBufferIn = overlay;
|
||||
} else {
|
||||
throw new Error('Unsupported overlay ' + typeof overlay);
|
||||
}
|
||||
if (overlayPath === '') {
|
||||
throw new Error('The overlay path cannot be empty');
|
||||
if (isObject(options)) {
|
||||
if (isInteger(options.gravity) && inRange(options.gravity, 0, 8)) {
|
||||
this.options.overlayGravity = options.gravity;
|
||||
} else if (isString(options.gravity) && isInteger(module.exports.gravity[options.gravity])) {
|
||||
this.options.overlayGravity = module.exports.gravity[options.gravity];
|
||||
} else if (isDefined(options.gravity)) {
|
||||
throw new Error('Unsupported overlay gravity ' + options.gravity);
|
||||
}
|
||||
}
|
||||
this.options.overlayPath = overlayPath;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "sharp",
|
||||
"version": "0.13.1",
|
||||
"version": "0.14.0",
|
||||
"author": "Lovell Fuller <npm@lovell.info>",
|
||||
"contributors": [
|
||||
"Pierre Inglebert <pierre.inglebert@gmail.com>",
|
||||
|
@ -185,4 +185,45 @@ namespace sharp {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate the (left, top) coordinates of the output image
|
||||
within the input image, applying the given gravity.
|
||||
*/
|
||||
std::tuple<int, int> CalculateCrop(int const inWidth, int const inHeight,
|
||||
int const outWidth, int const outHeight, int const gravity) {
|
||||
|
||||
int left = 0;
|
||||
int top = 0;
|
||||
switch (gravity) {
|
||||
case 1: // North
|
||||
left = (inWidth - outWidth + 1) / 2;
|
||||
break;
|
||||
case 2: // East
|
||||
left = inWidth - outWidth;
|
||||
top = (inHeight - outHeight + 1) / 2;
|
||||
break;
|
||||
case 3: // South
|
||||
left = (inWidth - outWidth + 1) / 2;
|
||||
top = inHeight - outHeight;
|
||||
break;
|
||||
case 4: // West
|
||||
top = (inHeight - outHeight + 1) / 2;
|
||||
break;
|
||||
case 5: // Northeast
|
||||
left = inWidth - outWidth;
|
||||
break;
|
||||
case 6: // Southeast
|
||||
left = inWidth - outWidth;
|
||||
top = inHeight - outHeight;
|
||||
case 7: // Southwest
|
||||
top = inHeight - outHeight;
|
||||
case 8: // Northwest
|
||||
break;
|
||||
default: // Centre
|
||||
left = (inWidth - outWidth + 1) / 2;
|
||||
top = (inHeight - outHeight + 1) / 2;
|
||||
}
|
||||
return std::make_tuple(left, top);
|
||||
}
|
||||
|
||||
} // namespace sharp
|
||||
|
@ -2,6 +2,8 @@
|
||||
#define SRC_COMMON_H_
|
||||
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
|
||||
#include <vips/vips8>
|
||||
|
||||
using vips::VImage;
|
||||
@ -80,6 +82,13 @@ namespace sharp {
|
||||
*/
|
||||
void FreeCallback(char* data, void* hint);
|
||||
|
||||
/*
|
||||
Calculate the (left, top) coordinates of the output image
|
||||
within the input image, applying the given gravity.
|
||||
*/
|
||||
std::tuple<int, int> CalculateCrop(int const inWidth, int const inHeight,
|
||||
int const outWidth, int const outHeight, int const gravity);
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
#endif // SRC_COMMON_H_
|
||||
|
@ -4,34 +4,49 @@
|
||||
#include "operations.h"
|
||||
|
||||
using vips::VImage;
|
||||
using vips::VError;
|
||||
|
||||
namespace sharp {
|
||||
|
||||
/*
|
||||
Alpha composite src over dst
|
||||
Assumes alpha channels are already premultiplied and will be unpremultiplied after
|
||||
Alpha composite src over dst with given gravity.
|
||||
Assumes alpha channels are already premultiplied and will be unpremultiplied after.
|
||||
*/
|
||||
VImage Composite(VImage src, VImage dst) {
|
||||
VImage Composite(VImage src, VImage dst, const int gravity) {
|
||||
using sharp::CalculateCrop;
|
||||
using sharp::HasAlpha;
|
||||
|
||||
// Split src into non-alpha and alpha
|
||||
if (!HasAlpha(src)) {
|
||||
throw VError("Overlay image must have an alpha channel");
|
||||
}
|
||||
if (!HasAlpha(dst)) {
|
||||
throw VError("Image to be overlaid must have an alpha channel");
|
||||
}
|
||||
if (src.width() > dst.width() || src.height() > dst.height()) {
|
||||
throw VError("Overlay image must have same dimensions or smaller");
|
||||
}
|
||||
|
||||
// Enlarge overlay src, if required
|
||||
if (src.width() < dst.width() || src.height() < dst.height()) {
|
||||
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
|
||||
int left;
|
||||
int top;
|
||||
std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), src.width(), src.height(), gravity);
|
||||
// Embed onto transparent background
|
||||
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
|
||||
src = src.embed(left, top, dst.width(), dst.height(), VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background)
|
||||
);
|
||||
}
|
||||
|
||||
// Split src into non-alpha and alpha channels
|
||||
VImage srcWithoutAlpha = src.extract_band(0, VImage::option()->set("n", src.bands() - 1));
|
||||
VImage srcAlpha = src[src.bands() - 1] * (1.0 / 255.0);
|
||||
|
||||
// Split dst into non-alpha and alpha channels
|
||||
VImage dstWithoutAlpha;
|
||||
VImage dstAlpha;
|
||||
if (HasAlpha(dst)) {
|
||||
// Non-alpha: extract all-but-last channel
|
||||
dstWithoutAlpha = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
|
||||
// Alpha: Extract last channel
|
||||
dstAlpha = dst[dst.bands() - 1] * (1.0 / 255.0);
|
||||
} else {
|
||||
// Non-alpha: Copy reference
|
||||
dstWithoutAlpha = dst;
|
||||
// Alpha: Use blank, opaque (0xFF) image
|
||||
dstAlpha = VImage::black(dst.width(), dst.height()).invert();
|
||||
}
|
||||
VImage dstWithoutAlpha = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
|
||||
VImage dstAlpha = dst[dst.bands() - 1] * (1.0 / 255.0);
|
||||
|
||||
//
|
||||
// Compute normalized output alpha channel:
|
||||
|
@ -8,10 +8,10 @@ using vips::VImage;
|
||||
namespace sharp {
|
||||
|
||||
/*
|
||||
Composite images `src` and `dst` with premultiplied alpha channel and output
|
||||
image with premultiplied alpha.
|
||||
Alpha composite src over dst with given gravity.
|
||||
Assumes alpha channels are already premultiplied and will be unpremultiplied after.
|
||||
*/
|
||||
VImage Composite(VImage src, VImage dst);
|
||||
VImage Composite(VImage src, VImage dst, const int gravity);
|
||||
|
||||
/*
|
||||
* Stretch luminance to cover full dynamic range.
|
||||
|
264
src/pipeline.cc
@ -1,10 +1,12 @@
|
||||
#include <tuple>
|
||||
#include <algorithm>
|
||||
#include <utility>
|
||||
#include <cmath>
|
||||
#include <tuple>
|
||||
#include <utility>
|
||||
|
||||
#include <vips/vips8>
|
||||
|
||||
#include <node.h>
|
||||
#include <node_buffer.h>
|
||||
#include <vips/vips8>
|
||||
|
||||
#include "nan.h"
|
||||
|
||||
@ -62,133 +64,22 @@ using sharp::IsWebp;
|
||||
using sharp::IsTiff;
|
||||
using sharp::IsDz;
|
||||
using sharp::FreeCallback;
|
||||
using sharp::CalculateCrop;
|
||||
using sharp::counterProcess;
|
||||
using sharp::counterQueue;
|
||||
|
||||
enum class Canvas {
|
||||
CROP,
|
||||
EMBED,
|
||||
MAX,
|
||||
MIN,
|
||||
IGNORE_ASPECT
|
||||
};
|
||||
|
||||
struct PipelineBaton {
|
||||
std::string fileIn;
|
||||
char *bufferIn;
|
||||
size_t bufferInLength;
|
||||
std::string iccProfilePath;
|
||||
int limitInputPixels;
|
||||
std::string density;
|
||||
int rawWidth;
|
||||
int rawHeight;
|
||||
int rawChannels;
|
||||
std::string formatOut;
|
||||
std::string fileOut;
|
||||
void *bufferOut;
|
||||
size_t bufferOutLength;
|
||||
int topOffsetPre;
|
||||
int leftOffsetPre;
|
||||
int widthPre;
|
||||
int heightPre;
|
||||
int topOffsetPost;
|
||||
int leftOffsetPost;
|
||||
int widthPost;
|
||||
int heightPost;
|
||||
int width;
|
||||
int height;
|
||||
int channels;
|
||||
Canvas canvas;
|
||||
int gravity;
|
||||
std::string interpolator;
|
||||
double background[4];
|
||||
bool flatten;
|
||||
bool negate;
|
||||
double blurSigma;
|
||||
int sharpenRadius;
|
||||
double sharpenFlat;
|
||||
double sharpenJagged;
|
||||
int threshold;
|
||||
std::string overlayPath;
|
||||
double gamma;
|
||||
bool greyscale;
|
||||
bool normalize;
|
||||
int angle;
|
||||
bool rotateBeforePreExtract;
|
||||
bool flip;
|
||||
bool flop;
|
||||
bool progressive;
|
||||
bool withoutEnlargement;
|
||||
VipsAccess accessMethod;
|
||||
int quality;
|
||||
int compressionLevel;
|
||||
bool withoutAdaptiveFiltering;
|
||||
bool withoutChromaSubsampling;
|
||||
bool trellisQuantisation;
|
||||
bool overshootDeringing;
|
||||
bool optimiseScans;
|
||||
std::string err;
|
||||
bool withMetadata;
|
||||
int withMetadataOrientation;
|
||||
int tileSize;
|
||||
int tileOverlap;
|
||||
|
||||
PipelineBaton():
|
||||
bufferInLength(0),
|
||||
limitInputPixels(0),
|
||||
density(""),
|
||||
rawWidth(0),
|
||||
rawHeight(0),
|
||||
rawChannels(0),
|
||||
formatOut(""),
|
||||
fileOut(""),
|
||||
bufferOutLength(0),
|
||||
topOffsetPre(-1),
|
||||
topOffsetPost(-1),
|
||||
channels(0),
|
||||
canvas(Canvas::CROP),
|
||||
gravity(0),
|
||||
flatten(false),
|
||||
negate(false),
|
||||
blurSigma(0.0),
|
||||
sharpenRadius(0),
|
||||
sharpenFlat(1.0),
|
||||
sharpenJagged(2.0),
|
||||
threshold(0),
|
||||
gamma(0.0),
|
||||
greyscale(false),
|
||||
normalize(false),
|
||||
angle(0),
|
||||
flip(false),
|
||||
flop(false),
|
||||
progressive(false),
|
||||
withoutEnlargement(false),
|
||||
quality(80),
|
||||
compressionLevel(6),
|
||||
withoutAdaptiveFiltering(false),
|
||||
withoutChromaSubsampling(false),
|
||||
trellisQuantisation(false),
|
||||
overshootDeringing(false),
|
||||
optimiseScans(false),
|
||||
withMetadata(false),
|
||||
withMetadataOrientation(-1),
|
||||
tileSize(256),
|
||||
tileOverlap(0) {
|
||||
background[0] = 0.0;
|
||||
background[1] = 0.0;
|
||||
background[2] = 0.0;
|
||||
background[3] = 255.0;
|
||||
}
|
||||
};
|
||||
|
||||
class PipelineWorker : public AsyncWorker {
|
||||
|
||||
public:
|
||||
PipelineWorker(Callback *callback, PipelineBaton *baton, Callback *queueListener, const Local<Object> &bufferIn) :
|
||||
PipelineWorker(Callback *callback, PipelineBaton *baton, Callback *queueListener,
|
||||
const Local<Object> &bufferIn, const Local<Object> &overlayBufferIn) :
|
||||
AsyncWorker(callback), baton(baton), queueListener(queueListener) {
|
||||
if (baton->bufferInLength > 0) {
|
||||
SaveToPersistent("bufferIn", bufferIn);
|
||||
}
|
||||
if (baton->overlayBufferInLength > 0) {
|
||||
SaveToPersistent("overlayBufferIn", overlayBufferIn);
|
||||
}
|
||||
}
|
||||
~PipelineWorker() {}
|
||||
|
||||
@ -508,11 +399,19 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure image has an alpha channel when there is an overlay
|
||||
bool hasOverlay = baton->overlayBufferInLength > 0 || !baton->overlayFileIn.empty();
|
||||
if (hasOverlay && !HasAlpha(image)) {
|
||||
double multiplier = (image.interpretation() == VIPS_INTERPRETATION_RGB16) ? 256.0 : 1.0;
|
||||
image = image.bandjoin(
|
||||
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier)
|
||||
);
|
||||
}
|
||||
|
||||
bool shouldAffineTransform = xresidual != 0.0 || yresidual != 0.0;
|
||||
bool shouldBlur = baton->blurSigma != 0.0;
|
||||
bool shouldSharpen = baton->sharpenRadius != 0;
|
||||
bool shouldThreshold = baton->threshold != 0;
|
||||
bool hasOverlay = !baton->overlayPath.empty();
|
||||
bool shouldPremultiplyAlpha = HasAlpha(image) &&
|
||||
(shouldAffineTransform || shouldBlur || shouldSharpen || hasOverlay);
|
||||
|
||||
@ -634,38 +533,41 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Composite with overlay, if present
|
||||
if (hasOverlay) {
|
||||
VImage overlayImage;
|
||||
ImageType overlayImageType = DetermineImageType(baton->overlayPath.data());
|
||||
if (overlayImageType != ImageType::UNKNOWN) {
|
||||
overlayImage = VImage::new_from_file(
|
||||
baton->overlayPath.data(),
|
||||
VImage::option()->set("access", baton->accessMethod)
|
||||
);
|
||||
ImageType overlayImageType = ImageType::UNKNOWN;
|
||||
if (baton->overlayBufferInLength > 0) {
|
||||
// Overlay with image from buffer
|
||||
overlayImageType = DetermineImageType(baton->overlayBufferIn, baton->overlayBufferInLength);
|
||||
if (overlayImageType != ImageType::UNKNOWN) {
|
||||
try {
|
||||
overlayImage = VImage::new_from_buffer(baton->overlayBufferIn, baton->overlayBufferInLength,
|
||||
nullptr, VImage::option()->set("access", baton->accessMethod));
|
||||
} catch (...) {
|
||||
(baton->err).append("Overlay buffer has corrupt header");
|
||||
overlayImageType = ImageType::UNKNOWN;
|
||||
}
|
||||
} else {
|
||||
(baton->err).append("Overlay buffer contains unsupported image format");
|
||||
}
|
||||
} else {
|
||||
(baton->err).append("Overlay image is of an unsupported image format");
|
||||
// Overlay with image from file
|
||||
overlayImageType = DetermineImageType(baton->overlayFileIn.data());
|
||||
if (overlayImageType != ImageType::UNKNOWN) {
|
||||
try {
|
||||
overlayImage = VImage::new_from_file(baton->overlayFileIn.data(),
|
||||
VImage::option()->set("access", baton->accessMethod));
|
||||
} catch (...) {
|
||||
(baton->err).append("Overlay file has corrupt header");
|
||||
overlayImageType = ImageType::UNKNOWN;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (overlayImageType == ImageType::UNKNOWN) {
|
||||
return Error();
|
||||
}
|
||||
if (image.format() != VIPS_FORMAT_UCHAR && image.format() != VIPS_FORMAT_FLOAT) {
|
||||
(baton->err).append("Expected image band format to be uchar or float: ");
|
||||
(baton->err).append(vips_enum_nick(VIPS_TYPE_BAND_FORMAT, image.format()));
|
||||
return Error();
|
||||
}
|
||||
if (overlayImage.format() != VIPS_FORMAT_UCHAR && overlayImage.format() != VIPS_FORMAT_FLOAT) {
|
||||
(baton->err).append("Expected overlay image band format to be uchar or float: ");
|
||||
(baton->err).append(vips_enum_nick(VIPS_TYPE_BAND_FORMAT, overlayImage.format()));
|
||||
return Error();
|
||||
}
|
||||
if (!HasAlpha(overlayImage)) {
|
||||
(baton->err).append("Overlay image must have an alpha channel");
|
||||
return Error();
|
||||
}
|
||||
if (overlayImage.width() != image.width() && overlayImage.height() != image.height()) {
|
||||
(baton->err).append("Overlay image must have same dimensions as resized image");
|
||||
return Error();
|
||||
}
|
||||
// Ensure overlay is sRGB and premutiplied
|
||||
// Ensure overlay is premultiplied sRGB
|
||||
overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB).premultiply();
|
||||
|
||||
image = Composite(overlayImage, image);
|
||||
// Composite images with given gravity
|
||||
image = Composite(overlayImage, image, baton->overlayGravity);
|
||||
}
|
||||
|
||||
// Reverse premultiplication after all transformations:
|
||||
@ -708,6 +610,9 @@ class PipelineWorker : public AsyncWorker {
|
||||
SetExifOrientation(image, baton->withMetadataOrientation);
|
||||
}
|
||||
|
||||
// Number of channels used in output image
|
||||
baton->channels = image.bands();
|
||||
|
||||
// Output
|
||||
if (baton->fileOut == "") {
|
||||
// Buffer output
|
||||
@ -728,6 +633,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
area->free_fn = nullptr;
|
||||
vips_area_unref(area);
|
||||
baton->formatOut = "jpeg";
|
||||
baton->channels = std::min(baton->channels, 3);
|
||||
} else if (baton->formatOut == "png" || (baton->formatOut == "input" && inputImageType == ImageType::PNG)) {
|
||||
// Write PNG to buffer
|
||||
VipsArea *area = VIPS_AREA(image.pngsave_buffer(VImage::option()
|
||||
@ -800,6 +706,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
->set("interlace", baton->progressive)
|
||||
);
|
||||
baton->formatOut = "jpeg";
|
||||
baton->channels = std::min(baton->channels, 3);
|
||||
} else if (baton->formatOut == "png" || isPng || (matchInput && inputImageType == ImageType::PNG)) {
|
||||
// Write PNG to file
|
||||
image.pngsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
@ -824,6 +731,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
->set("compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG)
|
||||
);
|
||||
baton->formatOut = "tiff";
|
||||
baton->channels = std::min(baton->channels, 3);
|
||||
} else if (baton->formatOut == "dz" || IsDz(baton->fileOut)) {
|
||||
// Write DZ to file
|
||||
image.dzsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
@ -838,8 +746,6 @@ class PipelineWorker : public AsyncWorker {
|
||||
return Error();
|
||||
}
|
||||
}
|
||||
// Number of channels used in output image
|
||||
baton->channels = image.bands();
|
||||
} catch (VError const &err) {
|
||||
(baton->err).append(err.what());
|
||||
}
|
||||
@ -890,10 +796,13 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
}
|
||||
|
||||
// Dispose of Persistent wrapper around input Buffer so it can be garbage collected
|
||||
// Dispose of Persistent wrapper around input Buffers so they can be garbage collected
|
||||
if (baton->bufferInLength > 0) {
|
||||
GetFromPersistent("bufferIn");
|
||||
}
|
||||
if (baton->overlayBufferInLength > 0) {
|
||||
GetFromPersistent("overlayBufferIn");
|
||||
}
|
||||
delete baton;
|
||||
|
||||
// Decrement processing task counter
|
||||
@ -944,46 +853,6 @@ class PipelineWorker : public AsyncWorker {
|
||||
return std::make_tuple(rotate, flip, flop);
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate the (left, top) coordinates of the output image
|
||||
within the input image, applying the given gravity.
|
||||
*/
|
||||
std::tuple<int, int>
|
||||
CalculateCrop(int const inWidth, int const inHeight, int const outWidth, int const outHeight, int const gravity) {
|
||||
int left = 0;
|
||||
int top = 0;
|
||||
switch (gravity) {
|
||||
case 1: // North
|
||||
left = (inWidth - outWidth + 1) / 2;
|
||||
break;
|
||||
case 2: // East
|
||||
left = inWidth - outWidth;
|
||||
top = (inHeight - outHeight + 1) / 2;
|
||||
break;
|
||||
case 3: // South
|
||||
left = (inWidth - outWidth + 1) / 2;
|
||||
top = inHeight - outHeight;
|
||||
break;
|
||||
case 4: // West
|
||||
top = (inHeight - outHeight + 1) / 2;
|
||||
break;
|
||||
case 5: // Northeast
|
||||
left = inWidth - outWidth;
|
||||
break;
|
||||
case 6: // Southeast
|
||||
left = inWidth - outWidth;
|
||||
top = inHeight - outHeight;
|
||||
case 7: // Southwest
|
||||
top = inHeight - outHeight;
|
||||
case 8: // Northwest
|
||||
break;
|
||||
default: // Centre
|
||||
left = (inWidth - outWidth + 1) / 2;
|
||||
top = (inHeight - outHeight + 1) / 2;
|
||||
}
|
||||
return std::make_tuple(left, top);
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate integral shrink given factor and interpolator window size
|
||||
*/
|
||||
@ -1088,7 +957,14 @@ NAN_METHOD(pipeline) {
|
||||
baton->background[i] = To<int32_t>(Get(background, i).ToLocalChecked()).FromJust();
|
||||
}
|
||||
// Overlay options
|
||||
baton->overlayPath = attrAsStr(options, "overlayPath");
|
||||
baton->overlayFileIn = attrAsStr(options, "overlayFileIn");
|
||||
Local<Object> overlayBufferIn;
|
||||
if (node::Buffer::HasInstance(Get(options, New("overlayBufferIn").ToLocalChecked()).ToLocalChecked())) {
|
||||
overlayBufferIn = Get(options, New("overlayBufferIn").ToLocalChecked()).ToLocalChecked().As<Object>();
|
||||
baton->overlayBufferInLength = node::Buffer::Length(overlayBufferIn);
|
||||
baton->overlayBufferIn = node::Buffer::Data(overlayBufferIn);
|
||||
}
|
||||
baton->overlayGravity = attrAs<int32_t>(options, "overlayGravity");
|
||||
// Resize options
|
||||
baton->withoutEnlargement = attrAs<bool>(options, "withoutEnlargement");
|
||||
baton->gravity = attrAs<int32_t>(options, "gravity");
|
||||
@ -1131,7 +1007,7 @@ NAN_METHOD(pipeline) {
|
||||
|
||||
// Join queue for worker thread
|
||||
Callback *callback = new Callback(info[1].As<Function>());
|
||||
AsyncQueueWorker(new PipelineWorker(callback, baton, queueListener, bufferIn));
|
||||
AsyncQueueWorker(new PipelineWorker(callback, baton, queueListener, bufferIn, overlayBufferIn));
|
||||
|
||||
// Increment queued task counter
|
||||
g_atomic_int_inc(&counterQueue);
|
||||
|
121
src/pipeline.h
@ -5,4 +5,125 @@
|
||||
|
||||
NAN_METHOD(pipeline);
|
||||
|
||||
enum class Canvas {
|
||||
CROP,
|
||||
EMBED,
|
||||
MAX,
|
||||
MIN,
|
||||
IGNORE_ASPECT
|
||||
};
|
||||
|
||||
struct PipelineBaton {
|
||||
std::string fileIn;
|
||||
char *bufferIn;
|
||||
size_t bufferInLength;
|
||||
std::string iccProfilePath;
|
||||
int limitInputPixels;
|
||||
std::string density;
|
||||
int rawWidth;
|
||||
int rawHeight;
|
||||
int rawChannels;
|
||||
std::string formatOut;
|
||||
std::string fileOut;
|
||||
void *bufferOut;
|
||||
size_t bufferOutLength;
|
||||
std::string overlayFileIn;
|
||||
char *overlayBufferIn;
|
||||
size_t overlayBufferInLength;
|
||||
int overlayGravity;
|
||||
int topOffsetPre;
|
||||
int leftOffsetPre;
|
||||
int widthPre;
|
||||
int heightPre;
|
||||
int topOffsetPost;
|
||||
int leftOffsetPost;
|
||||
int widthPost;
|
||||
int heightPost;
|
||||
int width;
|
||||
int height;
|
||||
int channels;
|
||||
Canvas canvas;
|
||||
int gravity;
|
||||
std::string interpolator;
|
||||
double background[4];
|
||||
bool flatten;
|
||||
bool negate;
|
||||
double blurSigma;
|
||||
int sharpenRadius;
|
||||
double sharpenFlat;
|
||||
double sharpenJagged;
|
||||
int threshold;
|
||||
double gamma;
|
||||
bool greyscale;
|
||||
bool normalize;
|
||||
int angle;
|
||||
bool rotateBeforePreExtract;
|
||||
bool flip;
|
||||
bool flop;
|
||||
bool progressive;
|
||||
bool withoutEnlargement;
|
||||
VipsAccess accessMethod;
|
||||
int quality;
|
||||
int compressionLevel;
|
||||
bool withoutAdaptiveFiltering;
|
||||
bool withoutChromaSubsampling;
|
||||
bool trellisQuantisation;
|
||||
bool overshootDeringing;
|
||||
bool optimiseScans;
|
||||
std::string err;
|
||||
bool withMetadata;
|
||||
int withMetadataOrientation;
|
||||
int tileSize;
|
||||
int tileOverlap;
|
||||
|
||||
PipelineBaton():
|
||||
bufferInLength(0),
|
||||
limitInputPixels(0),
|
||||
density(""),
|
||||
rawWidth(0),
|
||||
rawHeight(0),
|
||||
rawChannels(0),
|
||||
formatOut(""),
|
||||
fileOut(""),
|
||||
bufferOutLength(0),
|
||||
overlayBufferInLength(0),
|
||||
overlayGravity(0),
|
||||
topOffsetPre(-1),
|
||||
topOffsetPost(-1),
|
||||
channels(0),
|
||||
canvas(Canvas::CROP),
|
||||
gravity(0),
|
||||
flatten(false),
|
||||
negate(false),
|
||||
blurSigma(0.0),
|
||||
sharpenRadius(0),
|
||||
sharpenFlat(1.0),
|
||||
sharpenJagged(2.0),
|
||||
threshold(0),
|
||||
gamma(0.0),
|
||||
greyscale(false),
|
||||
normalize(false),
|
||||
angle(0),
|
||||
flip(false),
|
||||
flop(false),
|
||||
progressive(false),
|
||||
withoutEnlargement(false),
|
||||
quality(80),
|
||||
compressionLevel(6),
|
||||
withoutAdaptiveFiltering(false),
|
||||
withoutChromaSubsampling(false),
|
||||
trellisQuantisation(false),
|
||||
overshootDeringing(false),
|
||||
optimiseScans(false),
|
||||
withMetadata(false),
|
||||
withMetadataOrientation(-1),
|
||||
tileSize(256),
|
||||
tileOverlap(0) {
|
||||
background[0] = 0.0;
|
||||
background[1] = 0.0;
|
||||
background[2] = 0.0;
|
||||
background[3] = 255.0;
|
||||
}
|
||||
};
|
||||
|
||||
#endif // SRC_PIPELINE_H_
|
||||
|
BIN
test/fixtures/expected/overlay-gravity-center.jpg
vendored
Normal file
After Width: | Height: | Size: 2.0 KiB |
BIN
test/fixtures/expected/overlay-gravity-centre.jpg
vendored
Normal file
After Width: | Height: | Size: 2.0 KiB |
BIN
test/fixtures/expected/overlay-gravity-east.jpg
vendored
Normal file
After Width: | Height: | Size: 2.1 KiB |
BIN
test/fixtures/expected/overlay-gravity-north.jpg
vendored
Normal file
After Width: | Height: | Size: 2.1 KiB |
BIN
test/fixtures/expected/overlay-gravity-northeast.jpg
vendored
Normal file
After Width: | Height: | Size: 2.2 KiB |
BIN
test/fixtures/expected/overlay-gravity-northwest.jpg
vendored
Normal file
After Width: | Height: | Size: 2.1 KiB |
BIN
test/fixtures/expected/overlay-gravity-south.jpg
vendored
Normal file
After Width: | Height: | Size: 2.0 KiB |
BIN
test/fixtures/expected/overlay-gravity-southeast.jpg
vendored
Normal file
After Width: | Height: | Size: 2.1 KiB |
BIN
test/fixtures/expected/overlay-gravity-southwest.jpg
vendored
Normal file
After Width: | Height: | Size: 2.1 KiB |
BIN
test/fixtures/expected/overlay-gravity-west.jpg
vendored
Normal file
After Width: | Height: | Size: 2.1 KiB |
@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
var fs = require('fs');
|
||||
var assert = require('assert');
|
||||
var fixtures = require('../fixtures');
|
||||
var sharp = require('../../index');
|
||||
@ -17,7 +18,7 @@ var getPaths = function(baseName, extension) {
|
||||
|
||||
// Test
|
||||
describe('Overlays', function() {
|
||||
it('Overlay transparent PNG on solid background', function(done) {
|
||||
it('Overlay transparent PNG file on solid background', function(done) {
|
||||
var paths = getPaths('alpha-layer-01');
|
||||
|
||||
sharp(fixtures.inputPngOverlayLayer0)
|
||||
@ -29,6 +30,18 @@ describe('Overlays', function() {
|
||||
});
|
||||
});
|
||||
|
||||
it('Overlay transparent PNG Buffer on solid background', function(done) {
|
||||
var paths = getPaths('alpha-layer-01');
|
||||
|
||||
sharp(fixtures.inputPngOverlayLayer0)
|
||||
.overlayWith(fs.readFileSync(fixtures.inputPngOverlayLayer1))
|
||||
.toFile(paths.actual, function (error) {
|
||||
if (error) return done(error);
|
||||
fixtures.assertMaxColourDistance(paths.actual, paths.expected);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Overlay low-alpha transparent PNG on solid background', function(done) {
|
||||
var paths = getPaths('alpha-layer-01-low-alpha');
|
||||
|
||||
@ -141,18 +154,19 @@ describe('Overlays', function() {
|
||||
});
|
||||
}
|
||||
|
||||
it('Fail when compositing images with different dimensions', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.overlayWith(fixtures.inputPngWithGreyAlpha)
|
||||
it('Fail when overlay does not contain alpha channel', function(done) {
|
||||
sharp(fixtures.inputPngOverlayLayer1)
|
||||
.overlayWith(fixtures.inputJpg)
|
||||
.toBuffer(function(error) {
|
||||
assert.strictEqual(true, error instanceof Error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Fail when compositing non-PNG image', function(done) {
|
||||
sharp(fixtures.inputPngOverlayLayer1)
|
||||
.overlayWith(fixtures.inputJpg)
|
||||
it('Fail when overlay is larger', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320)
|
||||
.overlayWith(fixtures.inputPngOverlayLayer1)
|
||||
.toBuffer(function(error) {
|
||||
assert.strictEqual(true, error instanceof Error);
|
||||
done();
|
||||
@ -170,4 +184,62 @@ describe('Overlays', function() {
|
||||
sharp().overlayWith(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('Fail with unsupported gravity', function() {
|
||||
assert.throws(function() {
|
||||
sharp()
|
||||
.overlayWith(fixtures.inputPngOverlayLayer1, {
|
||||
gravity: 9
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Empty options', function() {
|
||||
assert.doesNotThrow(function() {
|
||||
sharp().overlayWith(fixtures.inputPngOverlayLayer1, {});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Overlay with numeric gravity', function() {
|
||||
Object.keys(sharp.gravity).forEach(function(gravity) {
|
||||
it(gravity, function(done) {
|
||||
var expected = fixtures.expected('overlay-gravity-' + gravity + '.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(80)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
gravity: gravity
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(65, info.height);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Overlay with string-based gravity', function() {
|
||||
Object.keys(sharp.gravity).forEach(function(gravity) {
|
||||
it(gravity, function(done) {
|
||||
var expected = fixtures.expected('overlay-gravity-' + gravity + '.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(80)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
gravity: sharp.gravity[gravity]
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(65, info.height);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|