mirror of
https://github.com/lovell/sharp.git
synced 2025-12-19 07:15:08 +01:00
Switch default interpolator to bicubic #289
Only use gaussian blur for non-linear interpolators Improves performance of bilinear by ~15% Add liborc to the packaged build to improve bicubic perf Add examples of the various interpolation methods Add bilinear vs bicubic to perf tests
This commit is contained in:
@@ -78,7 +78,7 @@ namespace sharp {
|
||||
Initialise and return a VipsImage from a buffer. Supports JPEG, PNG, WebP and TIFF.
|
||||
*/
|
||||
VipsImage* InitImage(void *buffer, size_t const length, VipsAccess const access) {
|
||||
return vips_image_new_from_buffer(buffer, length, NULL, "access", access, NULL);
|
||||
return vips_image_new_from_buffer(buffer, length, nullptr, "access", access, nullptr);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -87,7 +87,7 @@ namespace sharp {
|
||||
ImageType DetermineImageType(char const *file) {
|
||||
ImageType imageType = ImageType::UNKNOWN;
|
||||
char const *load = vips_foreign_find_load(file);
|
||||
if (load != NULL) {
|
||||
if (load != nullptr) {
|
||||
std::string loader = load;
|
||||
if (EndsWith(loader, "JpegFile")) {
|
||||
imageType = ImageType::JPEG;
|
||||
@@ -110,7 +110,7 @@ namespace sharp {
|
||||
Initialise and return a VipsImage from a file.
|
||||
*/
|
||||
VipsImage* InitImage(char const *file, VipsAccess const access) {
|
||||
return vips_image_new_from_file(file, "access", access, NULL);
|
||||
return vips_image_new_from_file(file, "access", access, nullptr);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -169,7 +169,7 @@ namespace sharp {
|
||||
*/
|
||||
int InterpolatorWindowSize(char const *name) {
|
||||
VipsInterpolate *interpolator = vips_interpolate_new(name);
|
||||
if (interpolator == NULL) {
|
||||
if (interpolator == nullptr) {
|
||||
return -1;
|
||||
}
|
||||
int window_size = vips_interpolate_get_window_size(interpolator);
|
||||
@@ -181,7 +181,7 @@ namespace sharp {
|
||||
Called when a Buffer undergoes GC, required to support mixed runtime libraries in Windows
|
||||
*/
|
||||
void FreeCallback(char* data, void* hint) {
|
||||
if (data != NULL) {
|
||||
if (data != nullptr) {
|
||||
g_free(data);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,13 +81,13 @@ class MetadataWorker : public AsyncWorker {
|
||||
g_atomic_int_dec_and_test(&counterQueue);
|
||||
|
||||
ImageType imageType = ImageType::UNKNOWN;
|
||||
VipsImage *image = NULL;
|
||||
VipsImage *image = nullptr;
|
||||
if (baton->bufferInLength > 0) {
|
||||
// From buffer
|
||||
imageType = DetermineImageType(baton->bufferIn, baton->bufferInLength);
|
||||
if (imageType != ImageType::UNKNOWN) {
|
||||
image = InitImage(baton->bufferIn, baton->bufferInLength, VIPS_ACCESS_RANDOM);
|
||||
if (image == NULL) {
|
||||
if (image == nullptr) {
|
||||
(baton->err).append("Input buffer has corrupt header");
|
||||
imageType = ImageType::UNKNOWN;
|
||||
}
|
||||
@@ -96,10 +96,10 @@ class MetadataWorker : public AsyncWorker {
|
||||
}
|
||||
} else {
|
||||
// From file
|
||||
imageType = DetermineImageType(baton->fileIn.c_str());
|
||||
imageType = DetermineImageType(baton->fileIn.data());
|
||||
if (imageType != ImageType::UNKNOWN) {
|
||||
image = InitImage(baton->fileIn.c_str(), VIPS_ACCESS_RANDOM);
|
||||
if (image == NULL) {
|
||||
image = InitImage(baton->fileIn.data(), VIPS_ACCESS_RANDOM);
|
||||
if (image == nullptr) {
|
||||
(baton->err).append("Input file has corrupt header");
|
||||
imageType = ImageType::UNKNOWN;
|
||||
}
|
||||
@@ -107,7 +107,7 @@ class MetadataWorker : public AsyncWorker {
|
||||
(baton->err).append("Input file is of an unsupported image format");
|
||||
}
|
||||
}
|
||||
if (image != NULL && imageType != ImageType::UNKNOWN) {
|
||||
if (image != nullptr && imageType != ImageType::UNKNOWN) {
|
||||
// Image type
|
||||
switch (imageType) {
|
||||
case ImageType::JPEG: baton->format = "jpeg"; break;
|
||||
@@ -161,7 +161,7 @@ class MetadataWorker : public AsyncWorker {
|
||||
Local<Value> argv[2] = { Null(), Null() };
|
||||
if (!baton->err.empty()) {
|
||||
// Error
|
||||
argv[0] = Error(baton->err.c_str());
|
||||
argv[0] = Error(baton->err.data());
|
||||
} else {
|
||||
// Metadata Object
|
||||
Local<Object> info = New<Object>();
|
||||
|
||||
58
src/operations.cc
Executable file → Normal file
58
src/operations.cc
Executable file → Normal file
@@ -14,11 +14,11 @@ namespace sharp {
|
||||
|
||||
// Split src into non-alpha and alpha
|
||||
VipsImage *srcWithoutAlpha;
|
||||
if (vips_extract_band(src, &srcWithoutAlpha, 0, "n", src->Bands - 1, NULL))
|
||||
if (vips_extract_band(src, &srcWithoutAlpha, 0, "n", src->Bands - 1, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, srcWithoutAlpha);
|
||||
VipsImage *srcAlpha;
|
||||
if (vips_extract_band(src, &srcAlpha, src->Bands - 1, "n", 1, NULL))
|
||||
if (vips_extract_band(src, &srcAlpha, src->Bands - 1, "n", 1, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, srcAlpha);
|
||||
|
||||
@@ -27,12 +27,12 @@ namespace sharp {
|
||||
VipsImage *dstAlpha;
|
||||
if (HasAlpha(dst)) {
|
||||
// Non-alpha: extract all-but-last channel
|
||||
if (vips_extract_band(dst, &dstWithoutAlpha, 0, "n", dst->Bands - 1, NULL)) {
|
||||
if (vips_extract_band(dst, &dstWithoutAlpha, 0, "n", dst->Bands - 1, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, dstWithoutAlpha);
|
||||
// Alpha: Extract last channel
|
||||
if (vips_extract_band(dst, &dstAlpha, dst->Bands - 1, "n", 1, NULL)) {
|
||||
if (vips_extract_band(dst, &dstAlpha, dst->Bands - 1, "n", 1, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, dstAlpha);
|
||||
@@ -41,11 +41,11 @@ namespace sharp {
|
||||
dstWithoutAlpha = dst;
|
||||
// Alpha: Use blank, opaque (0xFF) image
|
||||
VipsImage *black;
|
||||
if (vips_black(&black, dst->Xsize, dst->Ysize, NULL)) {
|
||||
if (vips_black(&black, dst->Xsize, dst->Ysize, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, black);
|
||||
if (vips_invert(black, &dstAlpha, NULL)) {
|
||||
if (vips_invert(black, &dstAlpha, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, dstAlpha);
|
||||
@@ -53,12 +53,12 @@ namespace sharp {
|
||||
|
||||
// Compute normalized input alpha channels:
|
||||
VipsImage *srcAlphaNormalized;
|
||||
if (vips_linear1(srcAlpha, &srcAlphaNormalized, 1.0 / 255.0, 0.0, NULL))
|
||||
if (vips_linear1(srcAlpha, &srcAlphaNormalized, 1.0 / 255.0, 0.0, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, srcAlphaNormalized);
|
||||
|
||||
VipsImage *dstAlphaNormalized;
|
||||
if (vips_linear1(dstAlpha, &dstAlphaNormalized, 1.0 / 255.0, 0.0, NULL))
|
||||
if (vips_linear1(dstAlpha, &dstAlphaNormalized, 1.0 / 255.0, 0.0, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, dstAlphaNormalized);
|
||||
|
||||
@@ -75,17 +75,17 @@ namespace sharp {
|
||||
// ^^^^^^^^^^^^^^^^^^^
|
||||
// t1
|
||||
VipsImage *t0;
|
||||
if (vips_linear1(srcAlphaNormalized, &t0, -1.0, 1.0, NULL))
|
||||
if (vips_linear1(srcAlphaNormalized, &t0, -1.0, 1.0, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, t0);
|
||||
|
||||
VipsImage *t1;
|
||||
if (vips_multiply(dstAlphaNormalized, t0, &t1, NULL))
|
||||
if (vips_multiply(dstAlphaNormalized, t0, &t1, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, t1);
|
||||
|
||||
VipsImage *outAlphaNormalized;
|
||||
if (vips_add(srcAlphaNormalized, t1, &outAlphaNormalized, NULL))
|
||||
if (vips_add(srcAlphaNormalized, t1, &outAlphaNormalized, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, outAlphaNormalized);
|
||||
|
||||
@@ -102,23 +102,23 @@ namespace sharp {
|
||||
// externally.
|
||||
//
|
||||
VipsImage *t2;
|
||||
if (vips_multiply(dstWithoutAlpha, t0, &t2, NULL))
|
||||
if (vips_multiply(dstWithoutAlpha, t0, &t2, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, t2);
|
||||
|
||||
VipsImage *outRGBPremultiplied;
|
||||
if (vips_add(srcWithoutAlpha, t2, &outRGBPremultiplied, NULL))
|
||||
if (vips_add(srcWithoutAlpha, t2, &outRGBPremultiplied, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, outRGBPremultiplied);
|
||||
|
||||
// Denormalize output alpha channel:
|
||||
VipsImage *outAlpha;
|
||||
if (vips_linear1(outAlphaNormalized, &outAlpha, 255.0, 0.0, NULL))
|
||||
if (vips_linear1(outAlphaNormalized, &outAlpha, 255.0, 0.0, nullptr))
|
||||
return -1;
|
||||
vips_object_local(context, outAlpha);
|
||||
|
||||
// Combine RGB and alpha channel into output image:
|
||||
return vips_bandjoin2(outRGBPremultiplied, outAlpha, out, NULL);
|
||||
return vips_bandjoin2(outRGBPremultiplied, outAlpha, out, nullptr);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -132,25 +132,25 @@ namespace sharp {
|
||||
}
|
||||
// Convert to LAB colourspace
|
||||
VipsImage *lab;
|
||||
if (vips_colourspace(image, &lab, VIPS_INTERPRETATION_LAB, NULL)) {
|
||||
if (vips_colourspace(image, &lab, VIPS_INTERPRETATION_LAB, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, lab);
|
||||
// Extract luminance
|
||||
VipsImage *luminance;
|
||||
if (vips_extract_band(lab, &luminance, 0, "n", 1, NULL)) {
|
||||
if (vips_extract_band(lab, &luminance, 0, "n", 1, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, luminance);
|
||||
// Extract chroma
|
||||
VipsImage *chroma;
|
||||
if (vips_extract_band(lab, &chroma, 1, "n", 2, NULL)) {
|
||||
if (vips_extract_band(lab, &chroma, 1, "n", 2, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, chroma);
|
||||
// Find luminance range
|
||||
VipsImage *stats;
|
||||
if (vips_stats(luminance, &stats, NULL)) {
|
||||
if (vips_stats(luminance, &stats, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, stats);
|
||||
@@ -161,19 +161,19 @@ namespace sharp {
|
||||
double a = -(min * f);
|
||||
// Scale luminance
|
||||
VipsImage *luminance100;
|
||||
if (vips_linear1(luminance, &luminance100, f, a, NULL)) {
|
||||
if (vips_linear1(luminance, &luminance100, f, a, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, luminance100);
|
||||
// Join scaled luminance to chroma
|
||||
VipsImage *normalizedLab;
|
||||
if (vips_bandjoin2(luminance100, chroma, &normalizedLab, NULL)) {
|
||||
if (vips_bandjoin2(luminance100, chroma, &normalizedLab, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, normalizedLab);
|
||||
// Convert to original colourspace
|
||||
VipsImage *normalized;
|
||||
if (vips_colourspace(normalizedLab, &normalized, typeBeforeNormalize, NULL)) {
|
||||
if (vips_colourspace(normalizedLab, &normalized, typeBeforeNormalize, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, normalized);
|
||||
@@ -181,13 +181,13 @@ namespace sharp {
|
||||
if (HasAlpha(image)) {
|
||||
// Extract original alpha channel
|
||||
VipsImage *alpha;
|
||||
if (vips_extract_band(image, &alpha, image->Bands - 1, "n", 1, NULL)) {
|
||||
if (vips_extract_band(image, &alpha, image->Bands - 1, "n", 1, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, alpha);
|
||||
// Join alpha channel to normalised image
|
||||
VipsImage *normalizedAlpha;
|
||||
if (vips_bandjoin2(normalized, alpha, &normalizedAlpha, NULL)) {
|
||||
if (vips_bandjoin2(normalized, alpha, &normalizedAlpha, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, normalizedAlpha);
|
||||
@@ -215,19 +215,19 @@ namespace sharp {
|
||||
1.0, 1.0, 1.0);
|
||||
vips_image_set_double(blur, "scale", 9);
|
||||
vips_object_local(context, blur);
|
||||
if (vips_conv(image, &blurred, blur, NULL)) {
|
||||
if (vips_conv(image, &blurred, blur, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
// Slower, accurate Gaussian blur
|
||||
// Create Gaussian function for standard deviation
|
||||
VipsImage *gaussian;
|
||||
if (vips_gaussmat(&gaussian, sigma, 0.2, "separable", TRUE, "integer", TRUE, NULL)) {
|
||||
if (vips_gaussmat(&gaussian, sigma, 0.2, "separable", TRUE, "integer", TRUE, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, gaussian);
|
||||
// Apply Gaussian function
|
||||
if (vips_convsep(image, &blurred, gaussian, "precision", VIPS_PRECISION_INTEGER, NULL)) {
|
||||
if (vips_convsep(image, &blurred, gaussian, "precision", VIPS_PRECISION_INTEGER, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
@@ -249,12 +249,12 @@ namespace sharp {
|
||||
-1.0, -1.0, -1.0);
|
||||
vips_image_set_double(sharpen, "scale", 24);
|
||||
vips_object_local(context, sharpen);
|
||||
if (vips_conv(image, &sharpened, sharpen, NULL)) {
|
||||
if (vips_conv(image, &sharpened, sharpen, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
// Slow, accurate sharpen in LAB colour space, with control over flat vs jagged areas
|
||||
if (vips_sharpen(image, &sharpened, "radius", radius, "m1", flat, "m2", jagged, NULL)) {
|
||||
if (vips_sharpen(image, &sharpened, "radius", radius, "m1", flat, "m2", jagged, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
212
src/pipeline.cc
212
src/pipeline.cc
@@ -197,13 +197,13 @@ class PipelineWorker : public AsyncWorker {
|
||||
|
||||
// Input
|
||||
ImageType inputImageType = ImageType::UNKNOWN;
|
||||
VipsImage *image = NULL;
|
||||
VipsImage *image = nullptr;
|
||||
if (baton->bufferInLength > 0) {
|
||||
// From buffer
|
||||
inputImageType = DetermineImageType(baton->bufferIn, baton->bufferInLength);
|
||||
if (inputImageType != ImageType::UNKNOWN) {
|
||||
image = InitImage(baton->bufferIn, baton->bufferInLength, baton->accessMethod);
|
||||
if (image == NULL) {
|
||||
if (image == nullptr) {
|
||||
// Could not read header data
|
||||
(baton->err).append("Input buffer has corrupt header");
|
||||
inputImageType = ImageType::UNKNOWN;
|
||||
@@ -213,10 +213,10 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
} else {
|
||||
// From file
|
||||
inputImageType = DetermineImageType(baton->fileIn.c_str());
|
||||
inputImageType = DetermineImageType(baton->fileIn.data());
|
||||
if (inputImageType != ImageType::UNKNOWN) {
|
||||
image = InitImage(baton->fileIn.c_str(), baton->accessMethod);
|
||||
if (image == NULL) {
|
||||
image = InitImage(baton->fileIn.data(), baton->accessMethod);
|
||||
if (image == nullptr) {
|
||||
(baton->err).append("Input file has corrupt header");
|
||||
inputImageType = ImageType::UNKNOWN;
|
||||
}
|
||||
@@ -224,7 +224,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
(baton->err).append("Input file is of an unsupported image format");
|
||||
}
|
||||
}
|
||||
if (image == NULL || inputImageType == ImageType::UNKNOWN) {
|
||||
if (image == nullptr || inputImageType == ImageType::UNKNOWN) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, image);
|
||||
@@ -252,7 +252,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Rotate pre-extract
|
||||
if (baton->rotateBeforePreExtract && rotation != Angle::D0) {
|
||||
VipsImage *rotated;
|
||||
if (vips_rot(image, &rotated, static_cast<VipsAngle>(rotation), NULL)) {
|
||||
if (vips_rot(image, &rotated, static_cast<VipsAngle>(rotation), nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, rotated);
|
||||
@@ -263,7 +263,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Pre extraction
|
||||
if (baton->topOffsetPre != -1) {
|
||||
VipsImage *extractedPre;
|
||||
if (vips_extract_area(image, &extractedPre, baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre, NULL)) {
|
||||
if (vips_extract_area(image, &extractedPre, baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, extractedPre);
|
||||
@@ -281,7 +281,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
|
||||
// Get window size of interpolator, used for determining shrink vs affine
|
||||
int interpolatorWindowSize = InterpolatorWindowSize(baton->interpolator.c_str());
|
||||
int interpolatorWindowSize = InterpolatorWindowSize(baton->interpolator.data());
|
||||
if (interpolatorWindowSize < 0) {
|
||||
return Error();
|
||||
}
|
||||
@@ -400,11 +400,11 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Reload input using shrink-on-load
|
||||
VipsImage *shrunkOnLoad;
|
||||
if (baton->bufferInLength > 1) {
|
||||
if (vips_jpegload_buffer(baton->bufferIn, baton->bufferInLength, &shrunkOnLoad, "shrink", shrink_on_load, NULL)) {
|
||||
if (vips_jpegload_buffer(baton->bufferIn, baton->bufferInLength, &shrunkOnLoad, "shrink", shrink_on_load, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
} else {
|
||||
if (vips_jpegload((baton->fileIn).c_str(), &shrunkOnLoad, "shrink", shrink_on_load, NULL)) {
|
||||
if (vips_jpegload((baton->fileIn).data(), &shrunkOnLoad, "shrink", shrink_on_load, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
}
|
||||
@@ -416,7 +416,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (HasProfile(image)) {
|
||||
// Convert to sRGB using embedded profile
|
||||
VipsImage *transformed;
|
||||
if (!vips_icc_transform(image, &transformed, srgbProfile.c_str(), "embedded", TRUE, NULL)) {
|
||||
if (!vips_icc_transform(image, &transformed, srgbProfile.data(), "embedded", TRUE, nullptr)) {
|
||||
// Embedded profile can fail, so only update references on success
|
||||
vips_object_local(hook, transformed);
|
||||
image = transformed;
|
||||
@@ -425,7 +425,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Convert to sRGB using default "USWebCoatedSWOP" CMYK profile
|
||||
std::string cmykProfile = baton->iccProfilePath + "USWebCoatedSWOP.icc";
|
||||
VipsImage *transformed;
|
||||
if (vips_icc_transform(image, &transformed, srgbProfile.c_str(), "input_profile", cmykProfile.c_str(), NULL)) {
|
||||
if (vips_icc_transform(image, &transformed, srgbProfile.data(), "input_profile", cmykProfile.data(), nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, transformed);
|
||||
@@ -442,7 +442,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
baton->background[2]
|
||||
);
|
||||
VipsImage *flattened;
|
||||
if (vips_flatten(image, &flattened, "background", background, NULL)) {
|
||||
if (vips_flatten(image, &flattened, "background", background, nullptr)) {
|
||||
vips_area_unref(reinterpret_cast<VipsArea*>(background));
|
||||
return Error();
|
||||
}
|
||||
@@ -454,7 +454,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Gamma encoding (darken)
|
||||
if (baton->gamma >= 1 && baton->gamma <= 3 && !HasAlpha(image)) {
|
||||
VipsImage *gammaEncoded;
|
||||
if (vips_gamma(image, &gammaEncoded, "exponent", 1.0 / baton->gamma, NULL)) {
|
||||
if (vips_gamma(image, &gammaEncoded, "exponent", 1.0 / baton->gamma, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, gammaEncoded);
|
||||
@@ -464,7 +464,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Convert to greyscale (linear, therefore after gamma encoding, if any)
|
||||
if (baton->greyscale) {
|
||||
VipsImage *greyscale;
|
||||
if (vips_colourspace(image, &greyscale, VIPS_INTERPRETATION_B_W, NULL)) {
|
||||
if (vips_colourspace(image, &greyscale, VIPS_INTERPRETATION_B_W, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, greyscale);
|
||||
@@ -474,7 +474,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (xshrink > 1 || yshrink > 1) {
|
||||
VipsImage *shrunk;
|
||||
// Use vips_shrink with the integral reduction
|
||||
if (vips_shrink(image, &shrunk, xshrink, yshrink, NULL)) {
|
||||
if (vips_shrink(image, &shrunk, xshrink, yshrink, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, shrunk);
|
||||
@@ -510,7 +510,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// See: http://entropymine.com/imageworsener/resizealpha/
|
||||
if (shouldPremultiplyAlpha) {
|
||||
VipsImage *imagePremultiplied;
|
||||
if (vips_premultiply(image, &imagePremultiplied, NULL)) {
|
||||
if (vips_premultiply(image, &imagePremultiplied, nullptr)) {
|
||||
(baton->err).append("Failed to premultiply alpha channel.");
|
||||
return Error();
|
||||
}
|
||||
@@ -520,46 +520,48 @@ class PipelineWorker : public AsyncWorker {
|
||||
|
||||
// Use vips_affine with the remaining float part
|
||||
if (shouldAffineTransform) {
|
||||
// Create interpolator
|
||||
VipsInterpolate *interpolator = vips_interpolate_new(baton->interpolator.data());
|
||||
if (interpolator == nullptr) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, interpolator);
|
||||
// Use average of x and y residuals to compute sigma for Gaussian blur
|
||||
double residual = (xresidual + yresidual) / 2.0;
|
||||
// Apply Gaussian blur before large affine reductions
|
||||
if (residual < 1.0) {
|
||||
// Apply Gaussian blur before large affine reductions with non-linear interpolators
|
||||
if (residual < 1.0 && (
|
||||
baton->interpolator == "bicubic" ||
|
||||
baton->interpolator == "locallyBoundedBicubic" ||
|
||||
baton->interpolator == "nohalo"
|
||||
)) {
|
||||
// Calculate standard deviation
|
||||
double sigma = ((1.0 / residual) - 0.4) / 3.0;
|
||||
if (sigma >= 0.3) {
|
||||
// Create Gaussian function for standard deviation
|
||||
VipsImage *gaussian;
|
||||
if (vips_gaussmat(&gaussian, sigma, 0.2, "separable", TRUE, "integer", TRUE, NULL)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, gaussian);
|
||||
// Sequential input requires a small linecache before use of convolution
|
||||
if (baton->accessMethod == VIPS_ACCESS_SEQUENTIAL) {
|
||||
VipsImage *lineCached;
|
||||
if (vips_linecache(image, &lineCached, "access", VIPS_ACCESS_SEQUENTIAL, "tile_height", 1, "threaded", TRUE, NULL)) {
|
||||
if (vips_linecache(image, &lineCached, "access", VIPS_ACCESS_SEQUENTIAL,
|
||||
"tile_height", 1, "threaded", TRUE, nullptr)
|
||||
) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, lineCached);
|
||||
image = lineCached;
|
||||
}
|
||||
// Apply Gaussian function
|
||||
// Apply Gaussian blur
|
||||
VipsImage *blurred;
|
||||
if (vips_convsep(image, &blurred, gaussian, "precision", VIPS_PRECISION_INTEGER, NULL)) {
|
||||
if (vips_gaussblur(image, &blurred, sigma, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, blurred);
|
||||
image = blurred;
|
||||
}
|
||||
}
|
||||
// Create interpolator - "bilinear" (default), "bicubic" or "nohalo"
|
||||
VipsInterpolate *interpolator = vips_interpolate_new(baton->interpolator.c_str());
|
||||
if (interpolator == NULL) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, interpolator);
|
||||
// Perform affine transformation
|
||||
VipsImage *affined;
|
||||
if (vips_affine(image, &affined, xresidual, 0.0, 0.0, yresidual, "interpolate", interpolator, NULL)) {
|
||||
if (vips_affine(image, &affined, xresidual, 0.0, 0.0, yresidual,
|
||||
"interpolate", interpolator, nullptr)
|
||||
) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, affined);
|
||||
@@ -569,7 +571,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Rotate
|
||||
if (!baton->rotateBeforePreExtract && rotation != Angle::D0) {
|
||||
VipsImage *rotated;
|
||||
if (vips_rot(image, &rotated, static_cast<VipsAngle>(rotation), NULL)) {
|
||||
if (vips_rot(image, &rotated, static_cast<VipsAngle>(rotation), nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, rotated);
|
||||
@@ -580,7 +582,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Flip (mirror about Y axis)
|
||||
if (baton->flip) {
|
||||
VipsImage *flipped;
|
||||
if (vips_flip(image, &flipped, VIPS_DIRECTION_VERTICAL, NULL)) {
|
||||
if (vips_flip(image, &flipped, VIPS_DIRECTION_VERTICAL, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, flipped);
|
||||
@@ -591,7 +593,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Flop (mirror about X axis)
|
||||
if (baton->flop) {
|
||||
VipsImage *flopped;
|
||||
if (vips_flip(image, &flopped, VIPS_DIRECTION_HORIZONTAL, NULL)) {
|
||||
if (vips_flip(image, &flopped, VIPS_DIRECTION_HORIZONTAL, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, flopped);
|
||||
@@ -606,7 +608,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (image->Type != VIPS_INTERPRETATION_sRGB) {
|
||||
// Convert to sRGB colour space
|
||||
VipsImage *colourspaced;
|
||||
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, NULL)) {
|
||||
if (vips_colourspace(image, &colourspaced, VIPS_INTERPRETATION_sRGB, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, colourspaced);
|
||||
@@ -616,19 +618,19 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (baton->background[3] < 255.0 && !HasAlpha(image)) {
|
||||
// Create single-channel transparency
|
||||
VipsImage *black;
|
||||
if (vips_black(&black, image->Xsize, image->Ysize, "bands", 1, NULL)) {
|
||||
if (vips_black(&black, image->Xsize, image->Ysize, "bands", 1, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, black);
|
||||
// Invert to become non-transparent
|
||||
VipsImage *alpha;
|
||||
if (vips_invert(black, &alpha, NULL)) {
|
||||
if (vips_invert(black, &alpha, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, alpha);
|
||||
// Append alpha channel to existing image
|
||||
VipsImage *joined;
|
||||
if (vips_bandjoin2(image, alpha, &joined, NULL)) {
|
||||
if (vips_bandjoin2(image, alpha, &joined, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, joined);
|
||||
@@ -650,7 +652,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
int top = (baton->height - image->Ysize) / 2;
|
||||
VipsImage *embedded;
|
||||
if (vips_embed(image, &embedded, left, top, baton->width, baton->height,
|
||||
"extend", VIPS_EXTEND_BACKGROUND, "background", background, NULL
|
||||
"extend", VIPS_EXTEND_BACKGROUND, "background", background, nullptr
|
||||
)) {
|
||||
vips_area_unref(reinterpret_cast<VipsArea*>(background));
|
||||
return Error();
|
||||
@@ -666,7 +668,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
int width = std::min(image->Xsize, baton->width);
|
||||
int height = std::min(image->Ysize, baton->height);
|
||||
VipsImage *extracted;
|
||||
if (vips_extract_area(image, &extracted, left, top, width, height, NULL)) {
|
||||
if (vips_extract_area(image, &extracted, left, top, width, height, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, extracted);
|
||||
@@ -678,7 +680,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (baton->topOffsetPost != -1) {
|
||||
VipsImage *extractedPost;
|
||||
if (vips_extract_area(image, &extractedPost,
|
||||
baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost, NULL
|
||||
baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost, nullptr
|
||||
)) {
|
||||
return Error();
|
||||
}
|
||||
@@ -706,12 +708,12 @@ class PipelineWorker : public AsyncWorker {
|
||||
|
||||
// Composite with overlay, if present
|
||||
if (hasOverlay) {
|
||||
VipsImage *overlayImage = NULL;
|
||||
VipsImage *overlayImage = nullptr;
|
||||
ImageType overlayImageType = ImageType::UNKNOWN;
|
||||
overlayImageType = DetermineImageType(baton->overlayPath.c_str());
|
||||
overlayImageType = DetermineImageType(baton->overlayPath.data());
|
||||
if (overlayImageType != ImageType::UNKNOWN) {
|
||||
overlayImage = InitImage(baton->overlayPath.c_str(), baton->accessMethod);
|
||||
if (overlayImage == NULL) {
|
||||
overlayImage = InitImage(baton->overlayPath.data(), baton->accessMethod);
|
||||
if (overlayImage == nullptr) {
|
||||
(baton->err).append("Overlay image has corrupt header");
|
||||
return Error();
|
||||
} else {
|
||||
@@ -742,15 +744,15 @@ class PipelineWorker : public AsyncWorker {
|
||||
|
||||
// Ensure overlay is sRGB
|
||||
VipsImage *overlayImageRGB;
|
||||
if (vips_colourspace(overlayImage, &overlayImageRGB, VIPS_INTERPRETATION_sRGB, NULL)) {
|
||||
if (vips_colourspace(overlayImage, &overlayImageRGB, VIPS_INTERPRETATION_sRGB, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, overlayImageRGB);
|
||||
|
||||
// Premultiply overlay
|
||||
VipsImage *overlayImagePremultiplied;
|
||||
if (vips_premultiply(overlayImageRGB, &overlayImagePremultiplied, NULL)) {
|
||||
(baton->err).append("Failed to premultiply alpha channel of overlay image.");
|
||||
if (vips_premultiply(overlayImageRGB, &overlayImagePremultiplied, nullptr)) {
|
||||
(baton->err).append("Failed to premultiply alpha channel of overlay image");
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, overlayImagePremultiplied);
|
||||
@@ -767,8 +769,8 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Reverse premultiplication after all transformations:
|
||||
if (shouldPremultiplyAlpha) {
|
||||
VipsImage *imageUnpremultiplied;
|
||||
if (vips_unpremultiply(image, &imageUnpremultiplied, NULL)) {
|
||||
(baton->err).append("Failed to unpremultiply alpha channel.");
|
||||
if (vips_unpremultiply(image, &imageUnpremultiplied, nullptr)) {
|
||||
(baton->err).append("Failed to unpremultiply alpha channel");
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, imageUnpremultiplied);
|
||||
@@ -778,7 +780,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Gamma decoding (brighten)
|
||||
if (baton->gamma >= 1 && baton->gamma <= 3 && !HasAlpha(image)) {
|
||||
VipsImage *gammaDecoded;
|
||||
if (vips_gamma(image, &gammaDecoded, "exponent", baton->gamma, NULL)) {
|
||||
if (vips_gamma(image, &gammaDecoded, "exponent", baton->gamma, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, gammaDecoded);
|
||||
@@ -798,7 +800,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (image->Type != VIPS_INTERPRETATION_sRGB) {
|
||||
// Switch interpretation to sRGB
|
||||
VipsImage *rgb;
|
||||
if (vips_colourspace(image, &rgb, VIPS_INTERPRETATION_sRGB, NULL)) {
|
||||
if (vips_colourspace(image, &rgb, VIPS_INTERPRETATION_sRGB, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, rgb);
|
||||
@@ -806,7 +808,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Transform colours from embedded profile to sRGB profile
|
||||
if (baton->withMetadata && HasProfile(image)) {
|
||||
VipsImage *profiled;
|
||||
if (vips_icc_transform(image, &profiled, srgbProfile.c_str(), "embedded", TRUE, NULL)) {
|
||||
if (vips_icc_transform(image, &profiled, srgbProfile.data(), "embedded", TRUE, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, profiled);
|
||||
@@ -822,28 +824,42 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Output
|
||||
if (baton->output == "__jpeg" || (baton->output == "__input" && inputImageType == ImageType::JPEG)) {
|
||||
// Write JPEG to buffer
|
||||
if (vips_jpegsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
|
||||
"Q", baton->quality, "optimize_coding", TRUE, "no_subsample", baton->withoutChromaSubsampling,
|
||||
if (vips_jpegsave_buffer(
|
||||
image, &baton->bufferOut, &baton->bufferOutLength,
|
||||
"strip", !baton->withMetadata,
|
||||
"Q", baton->quality,
|
||||
"optimize_coding", TRUE,
|
||||
"no_subsample", baton->withoutChromaSubsampling,
|
||||
"trellis_quant", baton->trellisQuantisation,
|
||||
"overshoot_deringing", baton->overshootDeringing,
|
||||
"optimize_scans", baton->optimiseScans,
|
||||
"interlace", baton->progressive, NULL)) {
|
||||
"interlace", baton->progressive,
|
||||
nullptr
|
||||
)) {
|
||||
return Error();
|
||||
}
|
||||
baton->outputFormat = "jpeg";
|
||||
} else if (baton->output == "__png" || (baton->output == "__input" && inputImageType == ImageType::PNG)) {
|
||||
// Select PNG row filter
|
||||
int filter = baton->withoutAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_NONE : VIPS_FOREIGN_PNG_FILTER_ALL;
|
||||
// Write PNG to buffer
|
||||
if (vips_pngsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
|
||||
"compression", baton->compressionLevel, "interlace", baton->progressive, "filter", filter, NULL)) {
|
||||
if (vips_pngsave_buffer(
|
||||
image, &baton->bufferOut, &baton->bufferOutLength,
|
||||
"strip", !baton->withMetadata,
|
||||
"compression", baton->compressionLevel,
|
||||
"interlace", baton->progressive,
|
||||
"filter", baton->withoutAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_NONE : VIPS_FOREIGN_PNG_FILTER_ALL,
|
||||
nullptr
|
||||
)) {
|
||||
return Error();
|
||||
}
|
||||
baton->outputFormat = "png";
|
||||
} else if (baton->output == "__webp" || (baton->output == "__input" && inputImageType == ImageType::WEBP)) {
|
||||
// Write WEBP to buffer
|
||||
if (vips_webpsave_buffer(image, &baton->bufferOut, &baton->bufferOutLength, "strip", !baton->withMetadata,
|
||||
"Q", baton->quality, NULL)) {
|
||||
if (vips_webpsave_buffer(
|
||||
image, &baton->bufferOut, &baton->bufferOutLength,
|
||||
"strip", !baton->withMetadata,
|
||||
"Q", baton->quality,
|
||||
nullptr
|
||||
)) {
|
||||
return Error();
|
||||
}
|
||||
baton->outputFormat = "webp";
|
||||
@@ -852,7 +868,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (baton->greyscale || image->Type == VIPS_INTERPRETATION_B_W) {
|
||||
// Extract first band for greyscale image
|
||||
VipsImage *grey;
|
||||
if (vips_extract_band(image, &grey, 0, NULL)) {
|
||||
if (vips_extract_band(image, &grey, 0, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, grey);
|
||||
@@ -861,7 +877,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (image->BandFmt != VIPS_FORMAT_UCHAR) {
|
||||
// Cast pixels to uint8 (unsigned char)
|
||||
VipsImage *uchar;
|
||||
if (vips_cast(image, &uchar, VIPS_FORMAT_UCHAR, NULL)) {
|
||||
if (vips_cast(image, &uchar, VIPS_FORMAT_UCHAR, nullptr)) {
|
||||
return Error();
|
||||
}
|
||||
vips_object_local(hook, uchar);
|
||||
@@ -869,7 +885,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
// Get raw image data
|
||||
baton->bufferOut = vips_image_write_to_memory(image, &baton->bufferOutLength);
|
||||
if (baton->bufferOut == NULL) {
|
||||
if (baton->bufferOut == nullptr) {
|
||||
(baton->err).append("Could not allocate enough memory for raw output");
|
||||
return Error();
|
||||
}
|
||||
@@ -883,42 +899,66 @@ class PipelineWorker : public AsyncWorker {
|
||||
bool matchInput = !(outputJpeg || outputPng || outputWebp || outputTiff || outputDz);
|
||||
if (outputJpeg || (matchInput && inputImageType == ImageType::JPEG)) {
|
||||
// Write JPEG to file
|
||||
if (vips_jpegsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
|
||||
"Q", baton->quality, "optimize_coding", TRUE, "no_subsample", baton->withoutChromaSubsampling,
|
||||
if (vips_jpegsave(
|
||||
image, baton->output.data(),
|
||||
"strip", !baton->withMetadata,
|
||||
"Q", baton->quality,
|
||||
"optimize_coding", TRUE,
|
||||
"no_subsample", baton->withoutChromaSubsampling,
|
||||
"trellis_quant", baton->trellisQuantisation,
|
||||
"overshoot_deringing", baton->overshootDeringing,
|
||||
"optimize_scans", baton->optimiseScans,
|
||||
"interlace", baton->progressive, NULL)) {
|
||||
"interlace", baton->progressive,
|
||||
nullptr
|
||||
)) {
|
||||
return Error();
|
||||
}
|
||||
baton->outputFormat = "jpeg";
|
||||
} else if (outputPng || (matchInput && inputImageType == ImageType::PNG)) {
|
||||
// Select PNG row filter
|
||||
int filter = baton->withoutAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_NONE : VIPS_FOREIGN_PNG_FILTER_ALL;
|
||||
// Write PNG to file
|
||||
if (vips_pngsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
|
||||
"compression", baton->compressionLevel, "interlace", baton->progressive, "filter", filter, NULL)) {
|
||||
if (vips_pngsave(
|
||||
image, baton->output.data(),
|
||||
"strip", !baton->withMetadata,
|
||||
"compression", baton->compressionLevel,
|
||||
"interlace", baton->progressive,
|
||||
"filter", baton->withoutAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_NONE : VIPS_FOREIGN_PNG_FILTER_ALL,
|
||||
nullptr
|
||||
)) {
|
||||
return Error();
|
||||
}
|
||||
baton->outputFormat = "png";
|
||||
} else if (outputWebp || (matchInput && inputImageType == ImageType::WEBP)) {
|
||||
// Write WEBP to file
|
||||
if (vips_webpsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
|
||||
"Q", baton->quality, NULL)) {
|
||||
if (vips_webpsave(
|
||||
image, baton->output.data(),
|
||||
"strip", !baton->withMetadata,
|
||||
"Q", baton->quality,
|
||||
nullptr
|
||||
)) {
|
||||
return Error();
|
||||
}
|
||||
baton->outputFormat = "webp";
|
||||
} else if (outputTiff || (matchInput && inputImageType == ImageType::TIFF)) {
|
||||
// Write TIFF to file
|
||||
if (vips_tiffsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
|
||||
"compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG, "Q", baton->quality, NULL)) {
|
||||
if (vips_tiffsave(
|
||||
image, baton->output.data(),
|
||||
"strip", !baton->withMetadata,
|
||||
"compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG,
|
||||
"Q", baton->quality,
|
||||
nullptr
|
||||
)) {
|
||||
return Error();
|
||||
}
|
||||
baton->outputFormat = "tiff";
|
||||
} else if (outputDz) {
|
||||
// Write DZ to file
|
||||
if (vips_dzsave(image, baton->output.c_str(), "strip", !baton->withMetadata,
|
||||
"tile_size", baton->tileSize, "overlap", baton->tileOverlap, NULL)) {
|
||||
if (vips_dzsave(
|
||||
image, baton->output.data(),
|
||||
"strip", !baton->withMetadata,
|
||||
"tile_size", baton->tileSize,
|
||||
"overlap", baton->tileOverlap,
|
||||
nullptr
|
||||
)) {
|
||||
return Error();
|
||||
}
|
||||
baton->outputFormat = "dz";
|
||||
@@ -937,10 +977,10 @@ class PipelineWorker : public AsyncWorker {
|
||||
void HandleOKCallback () {
|
||||
HandleScope();
|
||||
|
||||
Local<Value> argv[3] = { Null(), Null(), Null() };
|
||||
Local<Value> argv[3] = { Null(), Null(), Null() };
|
||||
if (!baton->err.empty()) {
|
||||
// Error
|
||||
argv[0] = Nan::Error(baton->err.c_str());
|
||||
argv[0] = Nan::Error(baton->err.data());
|
||||
} else {
|
||||
int width = baton->width;
|
||||
int height = baton->height;
|
||||
@@ -969,7 +1009,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
} else {
|
||||
// Add file size to info
|
||||
GStatBuf st;
|
||||
g_stat(baton->output.c_str(), &st);
|
||||
g_stat(baton->output.data(), &st);
|
||||
Set(info, New("size").ToLocalChecked(), New<Uint32>(static_cast<uint32_t>(st.st_size)));
|
||||
argv[1] = info;
|
||||
}
|
||||
|
||||
20
src/utilities.cc
Executable file → Normal file
20
src/utilities.cc
Executable file → Normal file
@@ -168,11 +168,11 @@ NAN_METHOD(_maxColourDistance) {
|
||||
VipsObject *hook = reinterpret_cast<VipsObject*>(vips_image_new());
|
||||
|
||||
// Open input files
|
||||
VipsImage *image1 = NULL;
|
||||
VipsImage *image1 = nullptr;
|
||||
ImageType imageType1 = DetermineImageType(*Utf8String(info[0]));
|
||||
if (imageType1 != ImageType::UNKNOWN) {
|
||||
image1 = InitImage(*Utf8String(info[0]), VIPS_ACCESS_SEQUENTIAL);
|
||||
if (image1 == NULL) {
|
||||
if (image1 == nullptr) {
|
||||
g_object_unref(hook);
|
||||
return ThrowError("Input file 1 has corrupt header");
|
||||
} else {
|
||||
@@ -182,11 +182,11 @@ NAN_METHOD(_maxColourDistance) {
|
||||
g_object_unref(hook);
|
||||
return ThrowError("Input file 1 is of an unsupported image format");
|
||||
}
|
||||
VipsImage *image2 = NULL;
|
||||
VipsImage *image2 = nullptr;
|
||||
ImageType imageType2 = DetermineImageType(*Utf8String(info[1]));
|
||||
if (imageType2 != ImageType::UNKNOWN) {
|
||||
image2 = InitImage(*Utf8String(info[1]), VIPS_ACCESS_SEQUENTIAL);
|
||||
if (image2 == NULL) {
|
||||
if (image2 == nullptr) {
|
||||
g_object_unref(hook);
|
||||
return ThrowError("Input file 2 has corrupt header");
|
||||
} else {
|
||||
@@ -211,13 +211,13 @@ NAN_METHOD(_maxColourDistance) {
|
||||
// Premultiply and remove alpha
|
||||
if (HasAlpha(image1)) {
|
||||
VipsImage *imagePremultiplied1;
|
||||
if (vips_premultiply(image1, &imagePremultiplied1, NULL)) {
|
||||
if (vips_premultiply(image1, &imagePremultiplied1, nullptr)) {
|
||||
g_object_unref(hook);
|
||||
return ThrowError(vips_error_buffer());
|
||||
}
|
||||
vips_object_local(hook, imagePremultiplied1);
|
||||
VipsImage *imagePremultipliedNoAlpha1;
|
||||
if (vips_extract_band(image1, &imagePremultipliedNoAlpha1, 1, "n", image1->Bands - 1, NULL)) {
|
||||
if (vips_extract_band(image1, &imagePremultipliedNoAlpha1, 1, "n", image1->Bands - 1, nullptr)) {
|
||||
g_object_unref(hook);
|
||||
return ThrowError(vips_error_buffer());
|
||||
}
|
||||
@@ -226,13 +226,13 @@ NAN_METHOD(_maxColourDistance) {
|
||||
}
|
||||
if (HasAlpha(image2)) {
|
||||
VipsImage *imagePremultiplied2;
|
||||
if (vips_premultiply(image2, &imagePremultiplied2, NULL)) {
|
||||
if (vips_premultiply(image2, &imagePremultiplied2, nullptr)) {
|
||||
g_object_unref(hook);
|
||||
return ThrowError(vips_error_buffer());
|
||||
}
|
||||
vips_object_local(hook, imagePremultiplied2);
|
||||
VipsImage *imagePremultipliedNoAlpha2;
|
||||
if (vips_extract_band(image2, &imagePremultipliedNoAlpha2, 1, "n", image2->Bands - 1, NULL)) {
|
||||
if (vips_extract_band(image2, &imagePremultipliedNoAlpha2, 1, "n", image2->Bands - 1, nullptr)) {
|
||||
g_object_unref(hook);
|
||||
return ThrowError(vips_error_buffer());
|
||||
}
|
||||
@@ -241,14 +241,14 @@ NAN_METHOD(_maxColourDistance) {
|
||||
}
|
||||
// Calculate colour distance
|
||||
VipsImage *difference;
|
||||
if (vips_dE00(image1, image2, &difference, NULL)) {
|
||||
if (vips_dE00(image1, image2, &difference, nullptr)) {
|
||||
g_object_unref(hook);
|
||||
return ThrowError(vips_error_buffer());
|
||||
}
|
||||
vips_object_local(hook, difference);
|
||||
// Extract maximum distance
|
||||
double maxColourDistance;
|
||||
if (vips_max(difference, &maxColourDistance, NULL)) {
|
||||
if (vips_max(difference, &maxColourDistance, nullptr)) {
|
||||
g_object_unref(hook);
|
||||
return ThrowError(vips_error_buffer());
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user