diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..5760be58 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +# http://editorconfig.org +root = true + +[*] +indent_style = space +indent_size = 2 +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false diff --git a/docs/api.md b/docs/api.md index 52c6c456..74339d03 100644 --- a/docs/api.md +++ b/docs/api.md @@ -328,6 +328,12 @@ When a `radius` is provided, performs a slower, more accurate sharpen of the L c * `flat`, if present, is a Number representing the level of sharpening to apply to "flat" areas, defaulting to a value of 1.0. * `jagged`, if present, is a Number representing the level of sharpening to apply to "jagged" areas, defaulting to a value of 2.0. +#### threshold([threshold]) + +Converts all pixels in the image to greyscale white or black. Any pixel greather-than-or-equal-to the threshold (0..255) will be white. All others will be black. + +* `threshold`, if present, is a Number, representing the level above which pixels will be forced to white. + #### gamma([gamma]) Apply a gamma correction by reducing the encoding (darken) pre-resize at a factor of `1/gamma` then increasing the encoding (brighten) post-resize at a factor of `gamma`. diff --git a/index.js b/index.js index c2585f3b..e13244aa 100644 --- a/index.js +++ b/index.js @@ -62,6 +62,7 @@ var Sharp = function(input) { sharpenRadius: 0, sharpenFlat: 1, sharpenJagged: 2, + threshold: 0, gamma: 0, greyscale: false, normalize: 0, @@ -142,7 +143,18 @@ Sharp.prototype._write = function(chunk, encoding, callback) { }; // Crop this part of the resized image (Center/Centre, North, East, South, West) -module.exports.gravity = {'center': 0, 'centre': 0, 'north': 1, 'east': 2, 'south': 3, 'west': 4, 'northeast': 5, 'southeast': 6, 'southwest': 7, 'northwest': 8}; +module.exports.gravity = { + 'center': 0, + 'centre': 0, + 'north': 1, + 'east': 2, + 'south': 3, + 'west': 4, + 'northeast': 5, + 'southeast': 6, + 'southwest': 7, + 'northwest': 8 +}; Sharp.prototype.crop = function(gravity) { this.options.canvas = 'crop'; @@ -328,6 +340,19 @@ Sharp.prototype.sharpen = function(radius, flat, jagged) { return this; }; +Sharp.prototype.threshold = function(threshold) { + if (typeof threshold === 'undefined') { + this.options.threshold = 128; + } else if (typeof threshold === 'boolean') { + this.options.threshold = threshold ? 128 : 0; + } else if (typeof threshold === 'number' && !Number.isNaN(threshold) && (threshold % 1 === 0) && threshold >= 0 && threshold <= 255) { + this.options.threshold = threshold; + } else { + throw new Error('Invalid threshold (0 to 255) ' + threshold); + } + return this; +}; + /* Set the interpolator to use for the affine transformation */ @@ -478,7 +503,7 @@ Sharp.prototype.withMetadata = function(withMetadata) { typeof withMetadata.orientation === 'number' && !Number.isNaN(withMetadata.orientation) && withMetadata.orientation % 1 === 0 && - withMetadata.orientation >=0 && + withMetadata.orientation >= 0 && withMetadata.orientation <= 7 ) { this.options.withMetadataOrientation = withMetadata.orientation; @@ -504,7 +529,7 @@ Sharp.prototype.tile = function(size, overlap) { } // Overlap of tiles, in pixels if (typeof overlap !== 'undefined' && overlap !== null) { - if (!Number.isNaN(overlap) && overlap % 1 === 0 && overlap >=0 && overlap <= 8192) { + if (!Number.isNaN(overlap) && overlap % 1 === 0 && overlap >= 0 && overlap <= 8192) { if (overlap > this.options.tileSize) { throw new Error('Tile overlap ' + overlap + ' cannot be larger than tile size ' + this.options.tileSize); } diff --git a/package.json b/package.json index 1c3e33fa..b49d47f4 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,8 @@ "Victor Mateevitsi ", "Alaric Holloway ", "Bernhard K. Weisshuhn ", - "Chris Riley " + "Chris Riley ", + "David Carley " ], "description": "High performance Node.js module to resize JPEG, PNG, WebP and TIFF images using the libvips library", "scripts": { diff --git a/src/operations.cc b/src/operations.cc index f2e42549..b00c1463 100644 --- a/src/operations.cc +++ b/src/operations.cc @@ -262,4 +262,21 @@ namespace sharp { *out = sharpened; return 0; } + + int Threshold(VipsObject *context, VipsImage *image, VipsImage **out, int threshold) { + VipsImage *greyscale; + if (vips_colourspace(image, &greyscale, VIPS_INTERPRETATION_B_W, nullptr)) { + return -1; + } + vips_object_local(context, greyscale); + image = greyscale; + + VipsImage *thresholded; + if (vips_moreeq_const1(image, &thresholded, threshold, nullptr)) { + return -1; + } + vips_object_local(context, thresholded); + *out = thresholded; + return 0; + } } // namespace sharp diff --git a/src/operations.h b/src/operations.h index ca70346c..cabe4a2e 100755 --- a/src/operations.h +++ b/src/operations.h @@ -24,6 +24,11 @@ namespace sharp { */ int Sharpen(VipsObject *context, VipsImage *image, VipsImage **out, int radius, double flat, double jagged); + /* + * Perform thresholding on an image. If the image is not greyscale, will convert before thresholding. + * Pixels with a greyscale value greater-than-or-equal-to `threshold` will be pure white. All others will be pure black. + */ + int Threshold(VipsObject *context, VipsImage *image, VipsImage **out, int threshold); } // namespace sharp #endif // SRC_OPERATIONS_H_ diff --git a/src/pipeline.cc b/src/pipeline.cc index d9629c32..df3a98ff 100644 --- a/src/pipeline.cc +++ b/src/pipeline.cc @@ -40,6 +40,7 @@ using sharp::Composite; using sharp::Normalize; using sharp::Blur; using sharp::Sharpen; +using sharp::Threshold; using sharp::ImageType; using sharp::DetermineImageType; @@ -104,6 +105,7 @@ struct PipelineBaton { int sharpenRadius; double sharpenFlat; double sharpenJagged; + int threshold; std::string overlayPath; double gamma; bool greyscale; @@ -142,6 +144,7 @@ struct PipelineBaton { sharpenRadius(0), sharpenFlat(1.0), sharpenJagged(2.0), + threshold(0), gamma(0.0), greyscale(false), normalize(false), @@ -502,6 +505,7 @@ class PipelineWorker : public AsyncWorker { bool shouldAffineTransform = xresidual != 0.0 || yresidual != 0.0; bool shouldBlur = baton->blurSigma != 0.0; bool shouldSharpen = baton->sharpenRadius != 0; + bool shouldThreshold = baton->threshold != 0; bool hasOverlay = !baton->overlayPath.empty(); bool shouldPremultiplyAlpha = HasAlpha(image) && (shouldAffineTransform || shouldBlur || shouldSharpen || hasOverlay); @@ -686,6 +690,15 @@ class PipelineWorker : public AsyncWorker { image = extractedPost; } + // Threshold - must happen before blurring, due to the utility of blurring after thresholding + if (shouldThreshold) { + VipsImage *thresholded; + if (Threshold(hook, image, &thresholded, baton->threshold)) { + return Error(); + } + image = thresholded; + } + // Blur if (shouldBlur) { VipsImage *blurred; @@ -1216,6 +1229,7 @@ NAN_METHOD(pipeline) { baton->sharpenRadius = To(Get(options, New("sharpenRadius").ToLocalChecked()).ToLocalChecked()).FromJust(); baton->sharpenFlat = To(Get(options, New("sharpenFlat").ToLocalChecked()).ToLocalChecked()).FromJust(); baton->sharpenJagged = To(Get(options, New("sharpenJagged").ToLocalChecked()).ToLocalChecked()).FromJust(); + baton->threshold = To(Get(options, New("threshold").ToLocalChecked()).ToLocalChecked()).FromJust(); baton->gamma = To(Get(options, New("gamma").ToLocalChecked()).ToLocalChecked()).FromJust(); baton->greyscale = To(Get(options, New("greyscale").ToLocalChecked()).ToLocalChecked()).FromJust(); baton->normalize = To(Get(options, New("normalize").ToLocalChecked()).ToLocalChecked()).FromJust(); diff --git a/test/fixtures/expected/threshold-1.jpg b/test/fixtures/expected/threshold-1.jpg new file mode 100644 index 00000000..2e6ad457 Binary files /dev/null and b/test/fixtures/expected/threshold-1.jpg differ diff --git a/test/fixtures/expected/threshold-128-alpha.png b/test/fixtures/expected/threshold-128-alpha.png new file mode 100644 index 00000000..5e900fea Binary files /dev/null and b/test/fixtures/expected/threshold-128-alpha.png differ diff --git a/test/fixtures/expected/threshold-128-transparency.png b/test/fixtures/expected/threshold-128-transparency.png new file mode 100644 index 00000000..a21e9173 Binary files /dev/null and b/test/fixtures/expected/threshold-128-transparency.png differ diff --git a/test/fixtures/expected/threshold-128-transparency.webp b/test/fixtures/expected/threshold-128-transparency.webp new file mode 100644 index 00000000..c86a254f Binary files /dev/null and b/test/fixtures/expected/threshold-128-transparency.webp differ diff --git a/test/fixtures/expected/threshold-128.jpg b/test/fixtures/expected/threshold-128.jpg new file mode 100644 index 00000000..40752777 Binary files /dev/null and b/test/fixtures/expected/threshold-128.jpg differ diff --git a/test/fixtures/expected/threshold-40.jpg b/test/fixtures/expected/threshold-40.jpg new file mode 100644 index 00000000..edc5795d Binary files /dev/null and b/test/fixtures/expected/threshold-40.jpg differ diff --git a/test/unit/threshold.js b/test/unit/threshold.js new file mode 100644 index 00000000..61a62a32 --- /dev/null +++ b/test/unit/threshold.js @@ -0,0 +1,105 @@ +'use strict'; + +var assert = require('assert'); + +var sharp = require('../../index'); +var fixtures = require('../fixtures'); + +sharp.cache(0); + +describe('Threshold', function() { + it('threshold 1 jpeg', function(done) { + sharp(fixtures.inputJpg) + .resize(320, 240) + .threshold(1) + .toBuffer(function(err, data, info) { + assert.strictEqual('jpeg', info.format); + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + fixtures.assertSimilar(fixtures.expected('threshold-1.jpg'), data, done); + }); + }); + + it('threshold 40 jpeg', function(done) { + sharp(fixtures.inputJpg) + .resize(320, 240) + .threshold(40) + .toBuffer(function(err, data, info) { + assert.strictEqual('jpeg', info.format); + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + fixtures.assertSimilar(fixtures.expected('threshold-40.jpg'), data, done); + }); + }); + + it('threshold 128', function(done) { + sharp(fixtures.inputJpg) + .resize(320, 240) + .threshold(128) + .toBuffer(function(err, data, info) { + assert.strictEqual('jpeg', info.format); + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + fixtures.assertSimilar(fixtures.expected('threshold-128.jpg'), data, done); + }); + }); + + it('threshold default jpeg', function(done) { + sharp(fixtures.inputJpg) + .resize(320, 240) + .threshold() + .toBuffer(function(err, data, info) { + assert.strictEqual('jpeg', info.format); + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + fixtures.assertSimilar(fixtures.expected('threshold-128.jpg'), data, done); + }); + }); + + it('threshold default png transparency', function(done) { + sharp(fixtures.inputPngWithTransparency) + .resize(320, 240) + .threshold() + .toBuffer(function(err, data, info) { + assert.strictEqual('png', info.format); + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + fixtures.assertSimilar(fixtures.expected('threshold-128-transparency.png'), data, done); + }); + }); + + it('threshold default png alpha', function(done) { + sharp(fixtures.inputPngWithGreyAlpha) + .resize(320, 240) + .threshold() + .toBuffer(function(err, data, info) { + assert.strictEqual('png', info.format); + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + fixtures.assertSimilar(fixtures.expected('threshold-128-alpha.png'), data, done); + }); + }); + + if (sharp.format.webp.output.file) { + it('threshold default webp transparency', function(done) { + sharp(fixtures.inputWebPWithTransparency) + .threshold() + .toBuffer(function(err, data, info) { + assert.strictEqual('webp', info.format); + fixtures.assertSimilar(fixtures.expected('threshold-128-transparency.webp'), data, done); + }); + }); + } + + it('invalid threshold -1', function() { + assert.throws(function() { + sharp(fixtures.inputJpg).threshold(-1); + }); + }); + + it('invalid threshold 256', function() { + assert.throws(function() { + sharp(fixtures.inputJpg).threshold(256); + }); + }); +});