diff --git a/lib/output.js b/lib/output.js index d4445e51..e19971e9 100644 --- a/lib/output.js +++ b/lib/output.js @@ -61,6 +61,7 @@ const bitdepthFromColourCount = (colours) => 1 << 31 - Math.clz32(Math.ceil(Math * `info` contains the output image `format`, `size` (bytes), `width`, `height`, * `channels` and `premultiplied` (indicating if premultiplication was used). * When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`. + * When using attention as crop strategy also contains the center of the cropped region in the fields `attentionX` and `attentionY`. * May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text. * @returns {Promise} - when no callback is provided * @throws {Error} Invalid parameters diff --git a/src/pipeline.cc b/src/pipeline.cc index 01030061..6430a7d3 100644 --- a/src/pipeline.cc +++ b/src/pipeline.cc @@ -456,6 +456,7 @@ class PipelineWorker : public Napi::AsyncWorker { // Gravity-based crop int left; int top; + std::tie(left, top) = sharp::CalculateCrop( inputWidth, inputHeight, baton->width, baton->height, baton->position); int width = std::min(inputWidth, baton->width); @@ -466,16 +467,25 @@ class PipelineWorker : public Napi::AsyncWorker { left, top, width, height, nPages, &targetPageHeight) : image.extract_area(left, top, width, height); } else { + int attention_x; + int attention_y; + // Attention-based or Entropy-based crop MultiPageUnsupported(nPages, "Resize strategy"); image = image.tilecache(VImage::option() ->set("access", VIPS_ACCESS_RANDOM) ->set("threaded", TRUE)); + image = image.smartcrop(baton->width, baton->height, VImage::option() - ->set("interesting", baton->position == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION)); + ->set("interesting", baton->position == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION) + ->set("attention_x", &attention_x) + ->set("attention_y", &attention_y)); baton->hasCropOffset = true; baton->cropOffsetLeft = static_cast(image.xoffset()); baton->cropOffsetTop = static_cast(image.yoffset()); + baton->hasAttentionCenter = true; + baton->attentionX = static_cast(attention_x * jpegShrinkOnLoad / scale); + baton->attentionY = static_cast(attention_y * jpegShrinkOnLoad / scale); } } } @@ -1198,6 +1208,10 @@ class PipelineWorker : public Napi::AsyncWorker { info.Set("cropOffsetLeft", static_cast(baton->cropOffsetLeft)); info.Set("cropOffsetTop", static_cast(baton->cropOffsetTop)); } + if (baton->hasAttentionCenter) { + info.Set("attentionX", static_cast(baton->attentionX)); + info.Set("attentionY", static_cast(baton->attentionY)); + } if (baton->trimThreshold > 0.0) { info.Set("trimOffsetLeft", static_cast(baton->trimOffsetLeft)); info.Set("trimOffsetTop", static_cast(baton->trimOffsetTop)); diff --git a/src/pipeline.h b/src/pipeline.h index 1e35f033..4a9f4650 100644 --- a/src/pipeline.h +++ b/src/pipeline.h @@ -74,6 +74,9 @@ struct PipelineBaton { bool hasCropOffset; int cropOffsetLeft; int cropOffsetTop; + bool hasAttentionCenter; + int attentionX; + int attentionY; bool premultiplied; bool tileCentre; bool fastShrinkOnLoad; @@ -236,6 +239,9 @@ struct PipelineBaton { hasCropOffset(false), cropOffsetLeft(0), cropOffsetTop(0), + hasAttentionCenter(false), + attentionX(0), + attentionY(0), premultiplied(false), tintA(128.0), tintB(128.0), diff --git a/test/fixtures/expected/crop-strategy.webp b/test/fixtures/expected/crop-strategy.webp new file mode 100644 index 00000000..857a471a Binary files /dev/null and b/test/fixtures/expected/crop-strategy.webp differ diff --git a/test/unit/resize-cover.js b/test/unit/resize-cover.js index 4274c9cd..ec7c9466 100644 --- a/test/unit/resize-cover.js +++ b/test/unit/resize-cover.js @@ -376,6 +376,8 @@ describe('Resize fit=cover', function () { assert.strictEqual(320, info.height); assert.strictEqual(-107, info.cropOffsetLeft); assert.strictEqual(0, info.cropOffsetTop); + assert.strictEqual(588, info.attentionX); + assert.strictEqual(640, info.attentionY); fixtures.assertSimilar(fixtures.expected('crop-strategy-attention.jpg'), data, done); }); }); @@ -394,10 +396,32 @@ describe('Resize fit=cover', function () { assert.strictEqual(80, info.height); assert.strictEqual(0, info.cropOffsetLeft); assert.strictEqual(0, info.cropOffsetTop); + assert.strictEqual(0, info.attentionX); + assert.strictEqual(0, info.attentionY); fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done); }); }); + it('WebP', function (done) { + sharp(fixtures.inputWebP) + .resize(320, 80, { + fit: 'cover', + position: sharp.strategy.attention + }) + .toBuffer(function (err, data, info) { + if (err) throw err; + assert.strictEqual('webp', info.format); + assert.strictEqual(3, info.channels); + assert.strictEqual(320, info.width); + assert.strictEqual(80, info.height); + assert.strictEqual(0, info.cropOffsetLeft); + assert.strictEqual(-161, info.cropOffsetTop); + assert.strictEqual(288, info.attentionX); + assert.strictEqual(745, info.attentionY); + fixtures.assertSimilar(fixtures.expected('crop-strategy.webp'), data, done); + }); + }); + it('supports the strategy passed as a string', function (done) { sharp(fixtures.inputPngWithTransparency) .resize(320, 80, {