diff --git a/docs/api.md b/docs/api.md index c3afa78e..2dc4d89c 100644 --- a/docs/api.md +++ b/docs/api.md @@ -60,7 +60,7 @@ Fast access to image metadata without decoding any compressed image data. * `format`: Name of decoder used to decompress image data e.g. `jpeg`, `png`, `webp`, `gif`, `svg` * `width`: Number of pixels wide * `height`: Number of pixels high -* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L522) +* `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L568) * `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK * `density`: Number of pixels per inch (DPI), if present * `hasProfile`: Boolean indicating the presence of an embedded ICC profile @@ -445,7 +445,7 @@ Convert to 8-bit greyscale; 256 shades of grey. This is a linear operation. If the input image is in a non-linear colour space such as sRGB, use `gamma()` with `greyscale()` for the best results. -The output image will still be web-friendly sRGB and contain three (identical) channels. +By default the output image will be web-friendly sRGB and contain three (identical) color channels. This may be overridden by other sharp operations such as `toColourspace('b-w')`, which will produce an output image containing one color channel. An alpha channel may be present, and will be unchanged by the operation. #### normalize() / normalise() @@ -490,11 +490,17 @@ sharp('input.png') }); ``` +#### toColourspace(colourspace) / toColorspace(colorspace) + +Set the output colourspace. By default output image will be web-friendly sRGB, with additional channels interpreted as alpha channels. + +`colourspace` is a string or `sharp.colourspace` enum that identifies an output colourspace. String arguments comprise vips colour space interpretation names e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L568) + #### extractChannel(channel) Extract a single channel from a multi-channel image. -* `channel` is a zero-indexed integral Number representing the band number to extract. `red`, `green` or `blue` are also accepted as an alternative to `0`, `1` or `2` respectively. +`channel` is a zero-indexed integral Number representing the band number to extract. `red`, `green` or `blue` are also accepted as an alternative to `0`, `1` or `2` respectively. ```javascript sharp(input) @@ -505,6 +511,22 @@ sharp(input) }); ``` +#### joinChannel(channels, [options]) + +Join a data channel to the image. The meaning of the added channels depends on the output colourspace, set with `toColourspace()`. By default the output image will be web-friendly sRGB, with additional channels interpreted as alpha channels. + +`channels` is one of +* a single file path +* an array of file paths +* a single buffer +* an array of buffers + +Note that channel ordering follows vips convention: +* sRGB: 0: Red, 1: Green, 2: Blue, 3: Alpha +* CMYK: 0: Magenta, 1: Cyan, 2: Yellow, 3: Black, 4: Alpha + +Buffers may be any of the image formats supported by sharp: JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data. In the case of a RAW buffer, the `options` object should contain a `raw` attribute, which follows the format of the attribute of the same name in the `sharp()` constructor. See `sharp()` for details. See `raw()` for pixel ordering. + #### bandbool(operation) Perform a bitwise boolean operation on all input image channels (bands) to produce a single channel output image. diff --git a/index.js b/index.js index 5f85fee2..9cc908c9 100644 --- a/index.js +++ b/index.js @@ -86,6 +86,7 @@ var Sharp = function(input, options) { normalize: 0, booleanBufferIn: null, booleanFileIn: '', + joinChannelIn: [], // overlay overlayGravity: 0, overlayXOffset : -1, @@ -109,6 +110,7 @@ var Sharp = function(input, options) { tileSize: 256, tileOverlap: 0, extractChannel: -1, + colourspace: 'srgb', // Function to notify of queue length changes queueListener: function(queueLength) { module.exports.queue.emit('change', queueLength); @@ -421,6 +423,20 @@ Sharp.prototype.overlayWith = function(overlay, options) { return this; }; +/* + Add another color channel to the image +*/ +Sharp.prototype.joinChannel = function(images, options) { + if (Array.isArray(images)) { + images.forEach(function(image) { + this.options.joinChannelIn.push(this._createInputDescriptor(image, options)); + }, this); + } else { + this.options.joinChannelIn.push(this._createInputDescriptor(images, options)); + } + return this; +}; + /* Rotate output image by 0, 90, 180 or 270 degrees Auto-rotation based on the EXIF Orientation tag is represented by an angle of -1 @@ -629,6 +645,18 @@ Sharp.prototype.greyscale = function(greyscale) { }; Sharp.prototype.grayscale = Sharp.prototype.greyscale; +/* + Set output colourspace +*/ +Sharp.prototype.toColourspace = function(colourspace) { + if (!isString(colourspace) ) { + throw new Error('Invalid output colourspace ' + colourspace); + } + this.options.colourspace = colourspace; + return this; +}; +Sharp.prototype.toColorspace = Sharp.prototype.toColourspace; + Sharp.prototype.progressive = function(progressive) { this.options.progressive = isBoolean(progressive) ? progressive : true; return this; @@ -817,6 +845,15 @@ module.exports.bool = { or: 'or', eor: 'eor' }; +// Colourspaces +module.exports.colourspace = { + multiband: 'multiband', + 'b-w': 'b-w', + bw: 'b-w', + cmyk: 'cmyk', + srgb: 'srgb' +}; +module.exports.colorspace = module.exports.colourspace; /* Resize image to width x height pixels diff --git a/src/common.cc b/src/common.cc index 94786dd4..1babef46 100644 --- a/src/common.cc +++ b/src/common.cc @@ -430,4 +430,13 @@ namespace sharp { ); } + /* + Get interpretation type from string + */ + VipsInterpretation GetInterpretation(std::string const typeStr) { + return static_cast( + vips_enum_from_nick(nullptr, VIPS_TYPE_INTERPRETATION, typeStr.data()) + ); + } + } // namespace sharp diff --git a/src/common.h b/src/common.h index cd7598d9..41b51f7f 100644 --- a/src/common.h +++ b/src/common.h @@ -196,6 +196,11 @@ namespace sharp { */ VipsOperationBoolean GetBooleanOperation(std::string const opStr); + /* + Get interpretation type from string + */ + VipsInterpretation GetInterpretation(std::string const typeStr); + } // namespace sharp #endif // SRC_COMMON_H_ diff --git a/src/pipeline.cc b/src/pipeline.cc index afc36961..4db7a400 100644 --- a/src/pipeline.cc +++ b/src/pipeline.cc @@ -4,6 +4,7 @@ #include #include #include +#include #include #include @@ -39,8 +40,15 @@ class PipelineWorker : public Nan::AsyncWorker { // Increment processing task counter g_atomic_int_inc(&sharp::counterProcess); + std::map profileMap; // Default sRGB ICC profile from https://packages.debian.org/sid/all/icc-profiles-free/filelist - std::string srgbProfile = baton->iccProfilePath + "sRGB.icc"; + profileMap.insert( + std::pair(VIPS_INTERPRETATION_sRGB, + baton->iccProfilePath + "sRGB.icc")); + // Convert to sRGB using default CMYK profile from http://www.argyllcms.com/cmyk.icm + profileMap.insert( + std::pair(VIPS_INTERPRETATION_CMYK, + baton->iccProfilePath + "cmyk.icm")); try { // Open input @@ -266,7 +274,8 @@ class PipelineWorker : public Nan::AsyncWorker { if (sharp::HasProfile(image)) { // Convert to sRGB using embedded profile try { - image = image.icc_transform(const_cast(srgbProfile.data()), VImage::option() + image = image.icc_transform( + const_cast(profileMap[VIPS_INTERPRETATION_sRGB].data()), VImage::option() ->set("embedded", TRUE) ->set("intent", VIPS_INTENT_PERCEPTUAL) ); @@ -274,10 +283,9 @@ class PipelineWorker : public Nan::AsyncWorker { // Ignore failure of embedded profile } } else if (image.interpretation() == VIPS_INTERPRETATION_CMYK) { - // Convert to sRGB using default CMYK profile from http://www.argyllcms.com/cmyk.icm - std::string cmykProfile = baton->iccProfilePath + "cmyk.icm"; - image = image.icc_transform(const_cast(srgbProfile.data()), VImage::option() - ->set("input_profile", cmykProfile.data()) + image = image.icc_transform( + const_cast(profileMap[VIPS_INTERPRETATION_sRGB].data()), VImage::option() + ->set("input_profile", profileMap[VIPS_INTERPRETATION_CMYK].data()) ->set("intent", VIPS_INTENT_PERCEPTUAL) ); } @@ -420,6 +428,19 @@ class PipelineWorker : public Nan::AsyncWorker { sharp::RemoveExifOrientation(image); } + // Join additional color channels to the image + if(baton->joinChannelIn.size() > 0) { + VImage joinImage; + ImageType joinImageType = ImageType::UNKNOWN; + + for(unsigned int i = 0; i < baton->joinChannelIn.size(); i++) { + std::tie(joinImage, joinImageType) = sharp::OpenInput(baton->joinChannelIn[i], baton->accessMethod); + + image = image.bandjoin(joinImage); + } + image = image.copy(VImage::option()->set("interpretation", baton->colourspace)); + } + // Crop/embed if (image.width() != baton->width || image.height() != baton->height) { if (baton->canvas == Canvas::EMBED) { @@ -654,12 +675,15 @@ class PipelineWorker : public Nan::AsyncWorker { if (sharp::Is16Bit(image.interpretation())) { image = image.cast(VIPS_FORMAT_USHORT); } - if (image.interpretation() != VIPS_INTERPRETATION_sRGB) { - image = image.colourspace(VIPS_INTERPRETATION_sRGB); - // Transform colours from embedded profile to sRGB profile - if (baton->withMetadata && sharp::HasProfile(image)) { - image = image.icc_transform(const_cast(srgbProfile.data()), VImage::option() - ->set("embedded", TRUE) + if (image.interpretation() != baton->colourspace) { + // Need to convert image + image = image.colourspace(baton->colourspace); + // Transform colours from embedded profile to output profile + if (baton->withMetadata && + sharp::HasProfile(image) && + profileMap[baton->colourspace] != std::string()) { + image = image.icc_transform(const_cast(profileMap[baton->colourspace].data()), + VImage::option()->set("embedded", TRUE) ); } } @@ -693,7 +717,11 @@ class PipelineWorker : public Nan::AsyncWorker { area->free_fn = nullptr; vips_area_unref(area); baton->formatOut = "jpeg"; - baton->channels = std::min(baton->channels, 3); + if(baton->colourspace == VIPS_INTERPRETATION_CMYK) { + baton->channels = std::min(baton->channels, 4); + } else { + baton->channels = std::min(baton->channels, 3); + } } else if (baton->formatOut == "png" || (baton->formatOut == "input" && inputImageType == ImageType::PNG)) { // Strip profile if (!baton->withMetadata) { @@ -1021,6 +1049,19 @@ NAN_METHOD(pipeline) { baton->crop = AttrTo(options, "crop"); baton->kernel = AttrAsStr(options, "kernel"); baton->interpolator = AttrAsStr(options, "interpolator"); + // Join Channel Options + if(HasAttr(options, "joinChannelIn")) { + v8::Local joinChannelObject = Nan::Get(options, Nan::New("joinChannelIn").ToLocalChecked()) + .ToLocalChecked().As(); + v8::Local joinChannelArray = joinChannelObject.As(); + int joinChannelArrayLength = AttrTo(joinChannelObject, "length"); + for(int i = 0; i < joinChannelArrayLength; i++) { + baton->joinChannelIn.push_back( + CreateInputDescriptor( + Nan::Get(joinChannelArray, i).ToLocalChecked().As(), + buffersToPersist)); + } + } // Operators baton->flatten = AttrTo(options, "flatten"); baton->negate = AttrTo(options, "negate"); @@ -1077,6 +1118,9 @@ NAN_METHOD(pipeline) { baton->optimiseScans = AttrTo(options, "optimiseScans"); baton->withMetadata = AttrTo(options, "withMetadata"); baton->withMetadataOrientation = AttrTo(options, "withMetadataOrientation"); + baton->colourspace = sharp::GetInterpretation(AttrAsStr(options, "colourspace")); + if(baton->colourspace == VIPS_INTERPRETATION_ERROR) + baton->colourspace = VIPS_INTERPRETATION_sRGB; // Output baton->formatOut = AttrAsStr(options, "formatOut"); baton->fileOut = AttrAsStr(options, "fileOut"); diff --git a/src/pipeline.h b/src/pipeline.h index 2a20dfca..420da2ec 100644 --- a/src/pipeline.h +++ b/src/pipeline.h @@ -32,6 +32,7 @@ struct PipelineBaton { int overlayYOffset; bool overlayTile; bool overlayCutout; + std::vector joinChannelIn; int topOffsetPre; int leftOffsetPre; int widthPre; @@ -90,6 +91,7 @@ struct PipelineBaton { VipsOperationBoolean booleanOp; VipsOperationBoolean bandBoolOp; int extractChannel; + VipsInterpretation colourspace; int tileSize; int tileOverlap; VipsForeignDzContainer tileContainer; @@ -148,6 +150,7 @@ struct PipelineBaton { booleanOp(VIPS_OPERATION_BOOLEAN_LAST), bandBoolOp(VIPS_OPERATION_BOOLEAN_LAST), extractChannel(-1), + colourspace(VIPS_INTERPRETATION_LAST), tileSize(256), tileOverlap(0), tileContainer(VIPS_FOREIGN_DZ_CONTAINER_FS), diff --git a/test/fixtures/expected/joinChannel-cmyk.jpg b/test/fixtures/expected/joinChannel-cmyk.jpg new file mode 100644 index 00000000..6c0fe421 Binary files /dev/null and b/test/fixtures/expected/joinChannel-cmyk.jpg differ diff --git a/test/fixtures/expected/joinChannel-rgb.jpg b/test/fixtures/expected/joinChannel-rgb.jpg new file mode 100644 index 00000000..37f8769a Binary files /dev/null and b/test/fixtures/expected/joinChannel-rgb.jpg differ diff --git a/test/fixtures/expected/joinChannel-rgba.png b/test/fixtures/expected/joinChannel-rgba.png new file mode 100644 index 00000000..b26e22ea Binary files /dev/null and b/test/fixtures/expected/joinChannel-rgba.png differ diff --git a/test/fixtures/expected/output.greyscale-single.jpg b/test/fixtures/expected/output.greyscale-single.jpg new file mode 100644 index 00000000..465c9e8b Binary files /dev/null and b/test/fixtures/expected/output.greyscale-single.jpg differ diff --git a/test/fixtures/index.js b/test/fixtures/index.js index 66facc28..5ee799e1 100644 --- a/test/fixtures/index.js +++ b/test/fixtures/index.js @@ -80,6 +80,7 @@ module.exports = { inputPngAlphaPremultiplicationSmall: getPath('alpha-premultiply-1024x768-paper.png'), inputPngAlphaPremultiplicationLarge: getPath('alpha-premultiply-2048x1536-paper.png'), inputPngBooleanNoAlpha: getPath('bandbool.png'), + inputPngTestJoinChannel: getPath('testJoinChannel.png'), inputWebP: getPath('4.webp'), // http://www.gstatic.com/webp/gallery/4.webp inputWebPWithTransparency: getPath('5_webp_a.webp'), // http://www.gstatic.com/webp/gallery3/5_webp_a.webp diff --git a/test/fixtures/stripesH.png b/test/fixtures/stripesH.png index f0cff675..d2227e3c 100644 Binary files a/test/fixtures/stripesH.png and b/test/fixtures/stripesH.png differ diff --git a/test/fixtures/stripesV.png b/test/fixtures/stripesV.png index d4482e95..9c015ee0 100644 Binary files a/test/fixtures/stripesV.png and b/test/fixtures/stripesV.png differ diff --git a/test/fixtures/testJoinChannel.png b/test/fixtures/testJoinChannel.png new file mode 100644 index 00000000..888ce6a9 Binary files /dev/null and b/test/fixtures/testJoinChannel.png differ diff --git a/test/unit/bandbool.js b/test/unit/bandbool.js index 2194dcc4..81e1ec2a 100644 --- a/test/unit/bandbool.js +++ b/test/unit/bandbool.js @@ -15,13 +15,12 @@ describe('Bandbool per-channel boolean operations', function() { it(op + ' operation', function(done) { sharp(fixtures.inputPngBooleanNoAlpha) .bandbool(op) + .toColourspace('b-w') .toBuffer(function(err, data, info) { - // should use .toColourspace('b-w') here to get 1 channel output, when it is merged if (err) throw err; assert.strictEqual(200, info.width); assert.strictEqual(200, info.height); - //assert.strictEqual(1, info.channels); - assert.strictEqual(3, info.channels); + assert.strictEqual(1, info.channels); fixtures.assertSimilar(fixtures.expected('bandbool_' + op + '_result.png'), data, done); }); }); diff --git a/test/unit/colourspace.js b/test/unit/colourspace.js index 2582516e..1eb4ee31 100644 --- a/test/unit/colourspace.js +++ b/test/unit/colourspace.js @@ -29,6 +29,20 @@ describe('Colour space conversion', function() { .toFile(fixtures.path('output.greyscale-not.jpg'), done); }); + it('Greyscale with single channel output', function(done) { + sharp(fixtures.inputJpg) + .resize(320, 240) + .greyscale() + .toColourspace('b-w') + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(1, info.channels); + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + fixtures.assertSimilar(fixtures.expected('output.greyscale-single.jpg'), data, done); + }); + }); + if (sharp.format.tiff.input.file && sharp.format.webp.output.buffer) { it('From 1-bit TIFF to sRGB WebP [slow]', function(done) { sharp(fixtures.inputTiff) @@ -79,4 +93,10 @@ describe('Colour space conversion', function() { }); }); + it('Invalid input', function() { + assert.throws(function() { + sharp(fixtures.inputJpg) + .toColourspace(null); + }); + }); }); diff --git a/test/unit/joinChannel.js b/test/unit/joinChannel.js new file mode 100644 index 00000000..36c80e26 --- /dev/null +++ b/test/unit/joinChannel.js @@ -0,0 +1,151 @@ +'use strict'; + +var assert = require('assert'); +var fs = require('fs'); +var sharp = require('../../index'); +var fixtures = require('../fixtures'); +var BluebirdPromise = require('bluebird'); + +describe('Image channel insertion', function() { + + it('Grayscale to RGB, buffer', function(done) { + sharp(fixtures.inputPng) // gray -> red + .resize(320, 240) + .joinChannel(fixtures.inputPngTestJoinChannel) // new green channel + .joinChannel(fixtures.inputPngStripesH) // new blue channel + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + assert.strictEqual(3, info.channels); + fixtures.assertSimilar(fixtures.expected('joinChannel-rgb.jpg'), data, done); + }); + }); + + it('Grayscale to RGB, file', function(done) { + sharp(fixtures.inputPng) // gray -> red + .resize(320, 240) + .joinChannel(fs.readFileSync(fixtures.inputPngTestJoinChannel)) // new green channel + .joinChannel(fs.readFileSync(fixtures.inputPngStripesH)) // new blue channel + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + assert.strictEqual(3, info.channels); + fixtures.assertSimilar(fixtures.expected('joinChannel-rgb.jpg'), data, done); + }); + }); + + it('Grayscale to RGBA, buffer', function(done) { + sharp(fixtures.inputPng) // gray -> red + .resize(320, 240) + .joinChannel([fixtures.inputPngTestJoinChannel, + fixtures.inputPngStripesH, + fixtures.inputPngStripesV]) // new green + blue + alpha channel + .toColourspace(sharp.colourspace.srgb) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + assert.strictEqual(4, info.channels); + fixtures.assertSimilar(fixtures.expected('joinChannel-rgba.png'), data, done); + }); + }); + + it('Grayscale to RGBA, file', function(done) { + sharp(fixtures.inputPng) // gray -> red + .resize(320, 240) + .joinChannel([fs.readFileSync(fixtures.inputPngTestJoinChannel), // new green channel + fs.readFileSync(fixtures.inputPngStripesH), // new blue channel + fs.readFileSync(fixtures.inputPngStripesV)]) // new alpha channel + .toColourspace('srgb') + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + assert.strictEqual(4, info.channels); + fixtures.assertSimilar(fixtures.expected('joinChannel-rgba.png'), data, done); + }); + }); + + it('Grayscale to CMYK, buffers', function(done) { + sharp(fixtures.inputPng) // gray -> magenta + .resize(320, 240) + .joinChannel([fs.readFileSync(fixtures.inputPngTestJoinChannel), // new cyan channel + fs.readFileSync(fixtures.inputPngStripesH), // new yellow channel + fs.readFileSync(fixtures.inputPngStripesV)]) // new black channel + .toColorspace('cmyk') + .toFormat('jpeg') + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + assert.strictEqual(4, info.channels); + fixtures.assertSimilar(fixtures.expected('joinChannel-cmyk.jpg'), data, done); + }); + }); + + it('Join raw buffers to RGB', function(done) { + BluebirdPromise.all([ + sharp(fixtures.inputPngTestJoinChannel).toColourspace('b-w').raw().toBuffer(), + sharp(fixtures.inputPngStripesH).toColourspace('b-w').raw().toBuffer() + ]) + .then(function(buffers) { + sharp(fixtures.inputPng) + .resize(320, 240) + .joinChannel(buffers, + { raw: { + width: 320, + height: 240, + channels: 1 + }}) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + assert.strictEqual(3, info.channels); + fixtures.assertSimilar(fixtures.expected('joinChannel-rgb.jpg'), data, done); + }); + }) + .catch(function(err) { + throw err; + }); + }); + + it('Grayscale to RGBA, files, two arrays', function(done) { + sharp(fixtures.inputPng) // gray -> red + .resize(320, 240) + .joinChannel([fs.readFileSync(fixtures.inputPngTestJoinChannel)]) // new green channel + .joinChannel([fs.readFileSync(fixtures.inputPngStripesH), // new blue channel + fs.readFileSync(fixtures.inputPngStripesV)]) // new alpha channel + .toColourspace('srgb') + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + assert.strictEqual(4, info.channels); + fixtures.assertSimilar(fixtures.expected('joinChannel-rgba.png'), data, done); + }); + }); + + it('Invalid raw buffer description', function() { + assert.throws(function() { + sharp().joinChannel(fs.readFileSync(fixtures.inputPng),{raw:{}}); + }); + }); + + it('Invalid input', function() { + assert.throws(function() { + sharp(fixtures.inputJpg) + .joinChannel(1); + }); + }); + + it('No arguments', function() { + assert.throws(function() { + sharp(fixtures.inputJpg) + .joinChannel(); + }); + }); + +});