mirror of
https://github.com/lovell/sharp.git
synced 2025-07-08 18:20:13 +02:00
Implements greyscale thresholding
This commit is contained in:
parent
5dfeaa9fd1
commit
3af62446fc
12
.editorconfig
Normal file
12
.editorconfig
Normal file
@ -0,0 +1,12 @@
|
||||
# http://editorconfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
@ -328,6 +328,12 @@ When a `radius` is provided, performs a slower, more accurate sharpen of the L c
|
||||
* `flat`, if present, is a Number representing the level of sharpening to apply to "flat" areas, defaulting to a value of 1.0.
|
||||
* `jagged`, if present, is a Number representing the level of sharpening to apply to "jagged" areas, defaulting to a value of 2.0.
|
||||
|
||||
#### threshold([threshold])
|
||||
|
||||
Converts all pixels in the image to greyscale white or black. Any pixel greather-than-or-equal-to the threshold (0..255) will be white. All others will be black.
|
||||
|
||||
* `threshold`, if present, is a Number, representing the level above which pixels will be forced to white.
|
||||
|
||||
#### gamma([gamma])
|
||||
|
||||
Apply a gamma correction by reducing the encoding (darken) pre-resize at a factor of `1/gamma` then increasing the encoding (brighten) post-resize at a factor of `gamma`.
|
||||
|
31
index.js
31
index.js
@ -62,6 +62,7 @@ var Sharp = function(input) {
|
||||
sharpenRadius: 0,
|
||||
sharpenFlat: 1,
|
||||
sharpenJagged: 2,
|
||||
threshold: 0,
|
||||
gamma: 0,
|
||||
greyscale: false,
|
||||
normalize: 0,
|
||||
@ -142,7 +143,18 @@ Sharp.prototype._write = function(chunk, encoding, callback) {
|
||||
};
|
||||
|
||||
// Crop this part of the resized image (Center/Centre, North, East, South, West)
|
||||
module.exports.gravity = {'center': 0, 'centre': 0, 'north': 1, 'east': 2, 'south': 3, 'west': 4, 'northeast': 5, 'southeast': 6, 'southwest': 7, 'northwest': 8};
|
||||
module.exports.gravity = {
|
||||
'center': 0,
|
||||
'centre': 0,
|
||||
'north': 1,
|
||||
'east': 2,
|
||||
'south': 3,
|
||||
'west': 4,
|
||||
'northeast': 5,
|
||||
'southeast': 6,
|
||||
'southwest': 7,
|
||||
'northwest': 8
|
||||
};
|
||||
|
||||
Sharp.prototype.crop = function(gravity) {
|
||||
this.options.canvas = 'crop';
|
||||
@ -328,6 +340,19 @@ Sharp.prototype.sharpen = function(radius, flat, jagged) {
|
||||
return this;
|
||||
};
|
||||
|
||||
Sharp.prototype.threshold = function(threshold) {
|
||||
if (typeof threshold === 'undefined') {
|
||||
this.options.threshold = 128;
|
||||
} else if (typeof threshold === 'boolean') {
|
||||
this.options.threshold = threshold ? 128 : 0;
|
||||
} else if (typeof threshold === 'number' && !Number.isNaN(threshold) && (threshold % 1 === 0) && threshold >= 0 && threshold <= 255) {
|
||||
this.options.threshold = threshold;
|
||||
} else {
|
||||
throw new Error('Invalid threshold (0 to 255) ' + threshold);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/*
|
||||
Set the interpolator to use for the affine transformation
|
||||
*/
|
||||
@ -478,7 +503,7 @@ Sharp.prototype.withMetadata = function(withMetadata) {
|
||||
typeof withMetadata.orientation === 'number' &&
|
||||
!Number.isNaN(withMetadata.orientation) &&
|
||||
withMetadata.orientation % 1 === 0 &&
|
||||
withMetadata.orientation >=0 &&
|
||||
withMetadata.orientation >= 0 &&
|
||||
withMetadata.orientation <= 7
|
||||
) {
|
||||
this.options.withMetadataOrientation = withMetadata.orientation;
|
||||
@ -504,7 +529,7 @@ Sharp.prototype.tile = function(size, overlap) {
|
||||
}
|
||||
// Overlap of tiles, in pixels
|
||||
if (typeof overlap !== 'undefined' && overlap !== null) {
|
||||
if (!Number.isNaN(overlap) && overlap % 1 === 0 && overlap >=0 && overlap <= 8192) {
|
||||
if (!Number.isNaN(overlap) && overlap % 1 === 0 && overlap >= 0 && overlap <= 8192) {
|
||||
if (overlap > this.options.tileSize) {
|
||||
throw new Error('Tile overlap ' + overlap + ' cannot be larger than tile size ' + this.options.tileSize);
|
||||
}
|
||||
|
@ -17,7 +17,8 @@
|
||||
"Victor Mateevitsi <mvictoras@gmail.com>",
|
||||
"Alaric Holloway <alaric.holloway@gmail.com>",
|
||||
"Bernhard K. Weisshuhn <bkw@codingforce.com>",
|
||||
"Chris Riley <criley@primedia.com>"
|
||||
"Chris Riley <criley@primedia.com>",
|
||||
"David Carley <dacarley@gmail.com>"
|
||||
],
|
||||
"description": "High performance Node.js module to resize JPEG, PNG, WebP and TIFF images using the libvips library",
|
||||
"scripts": {
|
||||
|
@ -262,4 +262,21 @@ namespace sharp {
|
||||
*out = sharpened;
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Threshold(VipsObject *context, VipsImage *image, VipsImage **out, int threshold) {
|
||||
VipsImage *greyscale;
|
||||
if (vips_colourspace(image, &greyscale, VIPS_INTERPRETATION_B_W, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, greyscale);
|
||||
image = greyscale;
|
||||
|
||||
VipsImage *thresholded;
|
||||
if (vips_moreeq_const1(image, &thresholded, threshold, nullptr)) {
|
||||
return -1;
|
||||
}
|
||||
vips_object_local(context, thresholded);
|
||||
*out = thresholded;
|
||||
return 0;
|
||||
}
|
||||
} // namespace sharp
|
||||
|
@ -24,6 +24,11 @@ namespace sharp {
|
||||
*/
|
||||
int Sharpen(VipsObject *context, VipsImage *image, VipsImage **out, int radius, double flat, double jagged);
|
||||
|
||||
/*
|
||||
* Perform thresholding on an image. If the image is not greyscale, will convert before thresholding.
|
||||
* Pixels with a greyscale value greater-than-or-equal-to `threshold` will be pure white. All others will be pure black.
|
||||
*/
|
||||
int Threshold(VipsObject *context, VipsImage *image, VipsImage **out, int threshold);
|
||||
} // namespace sharp
|
||||
|
||||
#endif // SRC_OPERATIONS_H_
|
||||
|
@ -40,6 +40,7 @@ using sharp::Composite;
|
||||
using sharp::Normalize;
|
||||
using sharp::Blur;
|
||||
using sharp::Sharpen;
|
||||
using sharp::Threshold;
|
||||
|
||||
using sharp::ImageType;
|
||||
using sharp::DetermineImageType;
|
||||
@ -104,6 +105,7 @@ struct PipelineBaton {
|
||||
int sharpenRadius;
|
||||
double sharpenFlat;
|
||||
double sharpenJagged;
|
||||
int threshold;
|
||||
std::string overlayPath;
|
||||
double gamma;
|
||||
bool greyscale;
|
||||
@ -142,6 +144,7 @@ struct PipelineBaton {
|
||||
sharpenRadius(0),
|
||||
sharpenFlat(1.0),
|
||||
sharpenJagged(2.0),
|
||||
threshold(0),
|
||||
gamma(0.0),
|
||||
greyscale(false),
|
||||
normalize(false),
|
||||
@ -502,6 +505,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
bool shouldAffineTransform = xresidual != 0.0 || yresidual != 0.0;
|
||||
bool shouldBlur = baton->blurSigma != 0.0;
|
||||
bool shouldSharpen = baton->sharpenRadius != 0;
|
||||
bool shouldThreshold = baton->threshold != 0;
|
||||
bool hasOverlay = !baton->overlayPath.empty();
|
||||
bool shouldPremultiplyAlpha = HasAlpha(image) && (shouldAffineTransform || shouldBlur || shouldSharpen || hasOverlay);
|
||||
|
||||
@ -686,6 +690,15 @@ class PipelineWorker : public AsyncWorker {
|
||||
image = extractedPost;
|
||||
}
|
||||
|
||||
// Threshold - must happen before blurring, due to the utility of blurring after thresholding
|
||||
if (shouldThreshold) {
|
||||
VipsImage *thresholded;
|
||||
if (Threshold(hook, image, &thresholded, baton->threshold)) {
|
||||
return Error();
|
||||
}
|
||||
image = thresholded;
|
||||
}
|
||||
|
||||
// Blur
|
||||
if (shouldBlur) {
|
||||
VipsImage *blurred;
|
||||
@ -1216,6 +1229,7 @@ NAN_METHOD(pipeline) {
|
||||
baton->sharpenRadius = To<int32_t>(Get(options, New("sharpenRadius").ToLocalChecked()).ToLocalChecked()).FromJust();
|
||||
baton->sharpenFlat = To<double>(Get(options, New("sharpenFlat").ToLocalChecked()).ToLocalChecked()).FromJust();
|
||||
baton->sharpenJagged = To<double>(Get(options, New("sharpenJagged").ToLocalChecked()).ToLocalChecked()).FromJust();
|
||||
baton->threshold = To<int32_t>(Get(options, New("threshold").ToLocalChecked()).ToLocalChecked()).FromJust();
|
||||
baton->gamma = To<int32_t>(Get(options, New("gamma").ToLocalChecked()).ToLocalChecked()).FromJust();
|
||||
baton->greyscale = To<bool>(Get(options, New("greyscale").ToLocalChecked()).ToLocalChecked()).FromJust();
|
||||
baton->normalize = To<bool>(Get(options, New("normalize").ToLocalChecked()).ToLocalChecked()).FromJust();
|
||||
|
BIN
test/fixtures/expected/threshold-1.jpg
vendored
Normal file
BIN
test/fixtures/expected/threshold-1.jpg
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 882 B |
BIN
test/fixtures/expected/threshold-128-alpha.png
vendored
Normal file
BIN
test/fixtures/expected/threshold-128-alpha.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.1 KiB |
BIN
test/fixtures/expected/threshold-128-transparency.png
vendored
Normal file
BIN
test/fixtures/expected/threshold-128-transparency.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.1 KiB |
BIN
test/fixtures/expected/threshold-128-transparency.webp
vendored
Normal file
BIN
test/fixtures/expected/threshold-128-transparency.webp
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 12 KiB |
BIN
test/fixtures/expected/threshold-128.jpg
vendored
Normal file
BIN
test/fixtures/expected/threshold-128.jpg
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 14 KiB |
BIN
test/fixtures/expected/threshold-40.jpg
vendored
Normal file
BIN
test/fixtures/expected/threshold-40.jpg
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 16 KiB |
105
test/unit/threshold.js
Normal file
105
test/unit/threshold.js
Normal file
@ -0,0 +1,105 @@
|
||||
'use strict';
|
||||
|
||||
var assert = require('assert');
|
||||
|
||||
var sharp = require('../../index');
|
||||
var fixtures = require('../fixtures');
|
||||
|
||||
sharp.cache(0);
|
||||
|
||||
describe('Threshold', function() {
|
||||
it('threshold 1 jpeg', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.threshold(1)
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('threshold-1.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('threshold 40 jpeg', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.threshold(40)
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('threshold-40.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('threshold 128', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.threshold(128)
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('threshold-128.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('threshold default jpeg', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.threshold()
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('threshold-128.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('threshold default png transparency', function(done) {
|
||||
sharp(fixtures.inputPngWithTransparency)
|
||||
.resize(320, 240)
|
||||
.threshold()
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('threshold-128-transparency.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('threshold default png alpha', function(done) {
|
||||
sharp(fixtures.inputPngWithGreyAlpha)
|
||||
.resize(320, 240)
|
||||
.threshold()
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('threshold-128-alpha.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
if (sharp.format.webp.output.file) {
|
||||
it('threshold default webp transparency', function(done) {
|
||||
sharp(fixtures.inputWebPWithTransparency)
|
||||
.threshold()
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('webp', info.format);
|
||||
fixtures.assertSimilar(fixtures.expected('threshold-128-transparency.webp'), data, done);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
it('invalid threshold -1', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).threshold(-1);
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid threshold 256', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).threshold(256);
|
||||
});
|
||||
});
|
||||
});
|
Loading…
x
Reference in New Issue
Block a user