mirror of
https://github.com/lovell/sharp.git
synced 2025-07-09 10:30:15 +02:00
Merge pull request #194 from bkw/normalize
Add normalize() to use full luminance range.
This commit is contained in:
commit
be39297f3b
@ -486,6 +486,10 @@ This is a linear operation. If the input image is in a non-linear colour space s
|
|||||||
|
|
||||||
The output image will still be web-friendly sRGB and contain three (identical) channels.
|
The output image will still be web-friendly sRGB and contain three (identical) channels.
|
||||||
|
|
||||||
|
#### normalize() / normalise()
|
||||||
|
|
||||||
|
Stretch histogram to cover full dynamic range before output to enhance contrast.
|
||||||
|
|
||||||
### Output options
|
### Output options
|
||||||
|
|
||||||
#### jpeg()
|
#### jpeg()
|
||||||
|
10
index.js
10
index.js
@ -59,6 +59,7 @@ var Sharp = function(input) {
|
|||||||
sharpenJagged: 2,
|
sharpenJagged: 2,
|
||||||
gamma: 0,
|
gamma: 0,
|
||||||
greyscale: false,
|
greyscale: false,
|
||||||
|
normalize: 0,
|
||||||
// output options
|
// output options
|
||||||
output: '__input',
|
output: '__input',
|
||||||
progressive: false,
|
progressive: false,
|
||||||
@ -333,6 +334,15 @@ Sharp.prototype.gamma = function(gamma) {
|
|||||||
return this;
|
return this;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
Normalize histogram
|
||||||
|
*/
|
||||||
|
Sharp.prototype.normalize = function(normalize) {
|
||||||
|
this.options.normalize = (typeof normalize === 'boolean') ? normalize : true;
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
Sharp.prototype.normalise = Sharp.prototype.normalize;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Convert to greyscale
|
Convert to greyscale
|
||||||
*/
|
*/
|
||||||
|
@ -15,7 +15,8 @@
|
|||||||
"Maurus Cuelenaere <mcuelenaere@gmail.com>",
|
"Maurus Cuelenaere <mcuelenaere@gmail.com>",
|
||||||
"Linus Unnebäck <linus@folkdatorn.se>",
|
"Linus Unnebäck <linus@folkdatorn.se>",
|
||||||
"Victor Mateevitsi <mvictoras@gmail.com>",
|
"Victor Mateevitsi <mvictoras@gmail.com>",
|
||||||
"Alaric Holloway <alaric.holloway@gmail.com>"
|
"Alaric Holloway <alaric.holloway@gmail.com>",
|
||||||
|
"Bernhard K. Weisshuhn <bkw@codingforce.com>"
|
||||||
],
|
],
|
||||||
"description": "High performance Node.js module to resize JPEG, PNG, WebP and TIFF images using the libvips library",
|
"description": "High performance Node.js module to resize JPEG, PNG, WebP and TIFF images using the libvips library",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -83,6 +83,7 @@ struct ResizeBaton {
|
|||||||
double sharpenJagged;
|
double sharpenJagged;
|
||||||
double gamma;
|
double gamma;
|
||||||
bool greyscale;
|
bool greyscale;
|
||||||
|
bool normalize;
|
||||||
int angle;
|
int angle;
|
||||||
bool rotateBeforePreExtract;
|
bool rotateBeforePreExtract;
|
||||||
bool flip;
|
bool flip;
|
||||||
@ -115,6 +116,7 @@ struct ResizeBaton {
|
|||||||
sharpenJagged(2.0),
|
sharpenJagged(2.0),
|
||||||
gamma(0.0),
|
gamma(0.0),
|
||||||
greyscale(false),
|
greyscale(false),
|
||||||
|
normalize(false),
|
||||||
angle(0),
|
angle(0),
|
||||||
flip(false),
|
flip(false),
|
||||||
flop(false),
|
flop(false),
|
||||||
@ -694,6 +696,86 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
image = gammaDecoded;
|
image = gammaDecoded;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Apply normalization
|
||||||
|
if (baton->normalize) {
|
||||||
|
VipsInterpretation typeBeforeNormalize = image->Type;
|
||||||
|
if (typeBeforeNormalize == VIPS_INTERPRETATION_RGB) {
|
||||||
|
typeBeforeNormalize = VIPS_INTERPRETATION_sRGB;
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalize the luminance band in LAB space:
|
||||||
|
VipsImage *lab;
|
||||||
|
if (vips_colourspace(image, &lab, VIPS_INTERPRETATION_LAB, NULL)) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, lab);
|
||||||
|
|
||||||
|
VipsImage *luminance;
|
||||||
|
if (vips_extract_band(lab, &luminance, 0, "n", 1, NULL)) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, luminance);
|
||||||
|
|
||||||
|
VipsImage *chroma;
|
||||||
|
if (vips_extract_band(lab, &chroma, 1, "n", 2, NULL)) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, chroma);
|
||||||
|
|
||||||
|
VipsImage *stats;
|
||||||
|
if (vips_stats(luminance, &stats, NULL)) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, stats);
|
||||||
|
double min = *VIPS_MATRIX(stats, 0, 0);
|
||||||
|
double max = *VIPS_MATRIX(stats, 1, 0);
|
||||||
|
|
||||||
|
VipsImage *normalized;
|
||||||
|
if (min == max) {
|
||||||
|
// Range of zero: create black image
|
||||||
|
if (vips_black(&normalized, image->Xsize, image->Ysize, "bands", 1, NULL )) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, normalized);
|
||||||
|
} else {
|
||||||
|
double f = 100.0 / (max - min);
|
||||||
|
double a = -(min * f);
|
||||||
|
|
||||||
|
VipsImage *luminance100;
|
||||||
|
if (vips_linear1(luminance, &luminance100, f, a, NULL)) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, luminance100);
|
||||||
|
|
||||||
|
VipsImage *normalizedLab;
|
||||||
|
if (vips_bandjoin2(luminance100, chroma, &normalizedLab, NULL)) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, normalizedLab);
|
||||||
|
if (vips_colourspace(normalizedLab, &normalized, typeBeforeNormalize, NULL)) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, normalized);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (HasAlpha(image)) {
|
||||||
|
VipsImage *alpha;
|
||||||
|
if (vips_extract_band(image, &alpha, image->Bands - 1, "n", 1, NULL)) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, alpha);
|
||||||
|
|
||||||
|
VipsImage *normalizedAlpha;
|
||||||
|
if (vips_bandjoin2(normalized, alpha, &normalizedAlpha, NULL)) {
|
||||||
|
return Error();
|
||||||
|
}
|
||||||
|
vips_object_local(hook, normalizedAlpha);
|
||||||
|
image = normalizedAlpha;
|
||||||
|
} else {
|
||||||
|
image = normalized;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Convert image to sRGB, if not already
|
// Convert image to sRGB, if not already
|
||||||
if (image->Type != VIPS_INTERPRETATION_sRGB) {
|
if (image->Type != VIPS_INTERPRETATION_sRGB) {
|
||||||
// Switch intrepretation to sRGB
|
// Switch intrepretation to sRGB
|
||||||
@ -762,10 +844,10 @@ class ResizeWorker : public NanAsyncWorker {
|
|||||||
#if (VIPS_MAJOR_VERSION >= 8 || (VIPS_MAJOR_VERSION >= 7 && VIPS_MINOR_VERSION >= 42))
|
#if (VIPS_MAJOR_VERSION >= 8 || (VIPS_MAJOR_VERSION >= 7 && VIPS_MINOR_VERSION >= 42))
|
||||||
} else if (baton->output == "__raw") {
|
} else if (baton->output == "__raw") {
|
||||||
// Write raw, uncompressed image data to buffer
|
// Write raw, uncompressed image data to buffer
|
||||||
if (baton->greyscale) {
|
if (baton->greyscale || image->Type == VIPS_INTERPRETATION_B_W) {
|
||||||
// Extract first band for greyscale image
|
// Extract first band for greyscale image
|
||||||
VipsImage *grey;
|
VipsImage *grey;
|
||||||
if (vips_extract_band(image, &grey, 1, NULL)) {
|
if (vips_extract_band(image, &grey, 0, NULL)) {
|
||||||
return Error();
|
return Error();
|
||||||
}
|
}
|
||||||
vips_object_local(hook, grey);
|
vips_object_local(hook, grey);
|
||||||
@ -1082,6 +1164,7 @@ NAN_METHOD(resize) {
|
|||||||
baton->sharpenJagged = options->Get(NanNew<String>("sharpenJagged"))->NumberValue();
|
baton->sharpenJagged = options->Get(NanNew<String>("sharpenJagged"))->NumberValue();
|
||||||
baton->gamma = options->Get(NanNew<String>("gamma"))->NumberValue();
|
baton->gamma = options->Get(NanNew<String>("gamma"))->NumberValue();
|
||||||
baton->greyscale = options->Get(NanNew<String>("greyscale"))->BooleanValue();
|
baton->greyscale = options->Get(NanNew<String>("greyscale"))->BooleanValue();
|
||||||
|
baton->normalize = options->Get(NanNew<String>("normalize"))->BooleanValue();
|
||||||
baton->angle = options->Get(NanNew<String>("angle"))->Int32Value();
|
baton->angle = options->Get(NanNew<String>("angle"))->Int32Value();
|
||||||
baton->rotateBeforePreExtract = options->Get(NanNew<String>("rotateBeforePreExtract"))->BooleanValue();
|
baton->rotateBeforePreExtract = options->Get(NanNew<String>("rotateBeforePreExtract"))->BooleanValue();
|
||||||
baton->flip = options->Get(NanNew<String>("flip"))->BooleanValue();
|
baton->flip = options->Get(NanNew<String>("flip"))->BooleanValue();
|
||||||
|
BIN
test/fixtures/2x2_fdcce6.png
vendored
Normal file
BIN
test/fixtures/2x2_fdcce6.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 76 B |
BIN
test/fixtures/grey-8bit-alpha.png
vendored
Normal file
BIN
test/fixtures/grey-8bit-alpha.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 23 KiB |
3
test/fixtures/index.js
vendored
3
test/fixtures/index.js
vendored
@ -15,9 +15,12 @@ module.exports = {
|
|||||||
inputJpgWithCmykProfile: getPath('Channel_digital_image_CMYK_color.jpg'), // http://en.wikipedia.org/wiki/File:Channel_digital_image_CMYK_color.jpg
|
inputJpgWithCmykProfile: getPath('Channel_digital_image_CMYK_color.jpg'), // http://en.wikipedia.org/wiki/File:Channel_digital_image_CMYK_color.jpg
|
||||||
inputJpgWithCmykNoProfile: getPath('Channel_digital_image_CMYK_color_no_profile.jpg'),
|
inputJpgWithCmykNoProfile: getPath('Channel_digital_image_CMYK_color_no_profile.jpg'),
|
||||||
inputJpgWithCorruptHeader: getPath('corrupt-header.jpg'),
|
inputJpgWithCorruptHeader: getPath('corrupt-header.jpg'),
|
||||||
|
inputJpgWithLowContrast: getPath('low-contrast.jpg'), // http://www.flickr.com/photos/grizdave/2569067123/
|
||||||
|
|
||||||
inputPng: getPath('50020484-00001.png'), // http://c.searspartsdirect.com/lis_png/PLDM/50020484-00001.png
|
inputPng: getPath('50020484-00001.png'), // http://c.searspartsdirect.com/lis_png/PLDM/50020484-00001.png
|
||||||
inputPngWithTransparency: getPath('blackbug.png'), // public domain
|
inputPngWithTransparency: getPath('blackbug.png'), // public domain
|
||||||
|
inputPngWithGreyAlpha: getPath('grey-8bit-alpha.png'),
|
||||||
|
inputPngWithOneColor: getPath('2x2_fdcce6.png'),
|
||||||
|
|
||||||
inputWebP: getPath('4.webp'), // http://www.gstatic.com/webp/gallery/4.webp
|
inputWebP: getPath('4.webp'), // http://www.gstatic.com/webp/gallery/4.webp
|
||||||
inputTiff: getPath('G31D.TIF'), // http://www.fileformat.info/format/tiff/sample/e6c9a6e5253348f4aef6d17b534360ab/index.htm
|
inputTiff: getPath('G31D.TIF'), // http://www.fileformat.info/format/tiff/sample/e6c9a6e5253348f4aef6d17b534360ab/index.htm
|
||||||
|
BIN
test/fixtures/low-contrast.jpg
vendored
Normal file
BIN
test/fixtures/low-contrast.jpg
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 27 KiB |
136
test/unit/normalize.js
Executable file
136
test/unit/normalize.js
Executable file
@ -0,0 +1,136 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
var assert = require('assert');
|
||||||
|
|
||||||
|
var sharp = require('../../index');
|
||||||
|
var fixtures = require('../fixtures');
|
||||||
|
|
||||||
|
sharp.cache(0);
|
||||||
|
|
||||||
|
describe('Normalization', function () {
|
||||||
|
|
||||||
|
it('uses the same prototype for both spellings', function () {
|
||||||
|
assert.strictEqual(sharp.prototype.normalize, sharp.prototype.normalise);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('spreads rgb image values between 0 and 255', function(done) {
|
||||||
|
sharp(fixtures.inputJpgWithLowContrast)
|
||||||
|
.normalize()
|
||||||
|
.raw()
|
||||||
|
.toBuffer(function (err, data, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
var min = 255, max = 0, i;
|
||||||
|
for (i = 0; i < data.length; i += 3) {
|
||||||
|
min = Math.min(min, data[i], data[i + 1], data[i + 2]);
|
||||||
|
max = Math.max(max, data[i], data[i + 1], data[i + 2]);
|
||||||
|
}
|
||||||
|
assert.strictEqual(0, min);
|
||||||
|
assert.strictEqual(255, max);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('spreads grayscaled image values between 0 and 255', function(done) {
|
||||||
|
sharp(fixtures.inputJpgWithLowContrast)
|
||||||
|
.gamma()
|
||||||
|
.greyscale()
|
||||||
|
.normalize()
|
||||||
|
.raw()
|
||||||
|
.toBuffer(function (err, data, info) {
|
||||||
|
if (err) throw err;
|
||||||
|
var min = 255, max = 0, i;
|
||||||
|
for (i = 0; i < data.length; i++) {
|
||||||
|
min = Math.min(min, data[i]);
|
||||||
|
max = Math.max(max, data[i]);
|
||||||
|
}
|
||||||
|
assert.strictEqual(0, min);
|
||||||
|
assert.strictEqual(255, max);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('stretches greyscale images with alpha channel', function (done) {
|
||||||
|
sharp(fixtures.inputPngWithGreyAlpha)
|
||||||
|
.normalize()
|
||||||
|
.raw()
|
||||||
|
.toBuffer(function (err, data, info) {
|
||||||
|
// raw toBuffer does not return the alpha channel (yet?)
|
||||||
|
var min = 255, max = 0, i;
|
||||||
|
for (i = 0; i < data.length; i++) {
|
||||||
|
min = Math.min(min, data[i]);
|
||||||
|
max = Math.max(max, data[i]);
|
||||||
|
}
|
||||||
|
assert.strictEqual(0, min);
|
||||||
|
assert.strictEqual(255, max);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('keeps an existing alpha channel', function (done) {
|
||||||
|
sharp(fixtures.inputPngWithTransparency)
|
||||||
|
.normalize()
|
||||||
|
.toBuffer(function (err, data, info) {
|
||||||
|
sharp(data)
|
||||||
|
.metadata()
|
||||||
|
.then(function (metadata) {
|
||||||
|
assert.strictEqual(4, metadata.channels);
|
||||||
|
assert.strictEqual(true, metadata.hasAlpha);
|
||||||
|
assert.strictEqual('srgb', metadata.space);
|
||||||
|
})
|
||||||
|
.finally(done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('keeps the alpha channel of greyscale images intact', function (done) {
|
||||||
|
sharp(fixtures.inputPngWithGreyAlpha)
|
||||||
|
.normalize()
|
||||||
|
.toBuffer(function (err, data, info) {
|
||||||
|
sharp(data)
|
||||||
|
.metadata()
|
||||||
|
.then(function (metadata) {
|
||||||
|
assert.strictEqual(true, metadata.hasAlpha);
|
||||||
|
// because of complications with greyscale
|
||||||
|
// we return everything in srgb for now.
|
||||||
|
//
|
||||||
|
// assert.strictEqual(2, metadata.channels);
|
||||||
|
// assert.strictEqual('b-w', metadata.space);
|
||||||
|
assert.strictEqual(4, metadata.channels);
|
||||||
|
assert.strictEqual('srgb', metadata.space);
|
||||||
|
})
|
||||||
|
.finally(done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns a black image for images with only one color', function (done) {
|
||||||
|
sharp(fixtures.inputPngWithOneColor)
|
||||||
|
.normalize()
|
||||||
|
.toBuffer()
|
||||||
|
.bind({})
|
||||||
|
.then(function (imageData) {
|
||||||
|
this.imageData = imageData;
|
||||||
|
return sharp(imageData)
|
||||||
|
.metadata();
|
||||||
|
})
|
||||||
|
.then(function (metadata) {
|
||||||
|
assert.strictEqual(false, metadata.hasAlpha);
|
||||||
|
// because of complications with greyscale
|
||||||
|
// we return everything in srgb for now.
|
||||||
|
//
|
||||||
|
// assert.strictEqual(1, metadata.channels);
|
||||||
|
// assert.strictEqual('b-w', metadata.space);
|
||||||
|
assert.strictEqual(3, metadata.channels);
|
||||||
|
assert.strictEqual('srgb', metadata.space);
|
||||||
|
})
|
||||||
|
.then(function () {
|
||||||
|
return sharp(this.imageData)
|
||||||
|
.raw()
|
||||||
|
.toBuffer();
|
||||||
|
})
|
||||||
|
.then(function (rawData) {
|
||||||
|
// var blackBuffer = new Buffer([0,0,0,0]);
|
||||||
|
var blackBuffer = new Buffer([0,0,0, 0,0,0, 0,0,0, 0,0,0]);
|
||||||
|
assert.strictEqual(blackBuffer.toString(), rawData.toString());
|
||||||
|
})
|
||||||
|
.finally(done);
|
||||||
|
});
|
||||||
|
});
|
Loading…
x
Reference in New Issue
Block a user