From 18b9991fe7bf22852a6e3b56f03d026a971bad48 Mon Sep 17 00:00:00 2001 From: Lovell Fuller Date: Tue, 11 Oct 2016 21:12:25 +0100 Subject: [PATCH] Add experimental 'attention' crop strategy --- .gitignore | 4 + .jshintignore | 1 + docs/api.md | 9 +- docs/changelog.md | 3 + index.js | 5 +- src/operations.cc | 99 +++++++++++------- src/operations.h | 19 +++- src/pipeline.cc | 13 ++- src/pipeline.h | 4 + .../{crop-entropy.jpg => crop-strategy.jpg} | Bin .../{crop-entropy.png => crop-strategy.png} | Bin test/saliency/README.md | 16 +++ test/saliency/download.sh | 25 +++++ test/saliency/humanae/download.js | 39 +++++++ test/saliency/humanae/package.json | 9 ++ test/saliency/humanae/tone.js | 34 ++++++ test/saliency/report.html | 25 +++++ test/saliency/report.js | 69 ++++++++++++ test/saliency/userData.js | 69 ++++++++++++ test/unit/cpplint.js | 3 + test/unit/crop.js | 43 +++++++- 21 files changed, 438 insertions(+), 51 deletions(-) rename test/fixtures/expected/{crop-entropy.jpg => crop-strategy.jpg} (100%) rename test/fixtures/expected/{crop-entropy.png => crop-strategy.png} (100%) create mode 100644 test/saliency/README.md create mode 100755 test/saliency/download.sh create mode 100644 test/saliency/humanae/download.js create mode 100644 test/saliency/humanae/package.json create mode 100644 test/saliency/humanae/tone.js create mode 100644 test/saliency/report.html create mode 100644 test/saliency/report.js create mode 100644 test/saliency/userData.js diff --git a/.gitignore b/.gitignore index 7398e1af..727e9030 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,10 @@ coverage test/bench/node_modules test/fixtures/output* test/leak/libvips.supp +test/saliency/report.json +test/saliency/Image* +test/saliency/[Uu]serData* +!test/saliency/userData.js lib include packaging/libvips* diff --git a/.jshintignore b/.jshintignore index 426fab9f..1eb73c30 100644 --- a/.jshintignore +++ b/.jshintignore @@ -1,3 +1,4 @@ node_modules test/bench/node_modules +test/saliency/humanae/node_modules coverage diff --git a/docs/api.md b/docs/api.md index 2dc4d89c..dc063b5e 100644 --- a/docs/api.md +++ b/docs/api.md @@ -175,12 +175,11 @@ Possible attributes of `sharp.gravity` are `north`, `northeast`, `east`, `southeast`, `south`, `southwest`, `west`, `northwest`, `center` and `centre`. -Possible attributes of the experimental `sharp.strategy` are: +The experimental strategy-based approach resizes so one dimension is at its target length +then repeatedly ranks edge regions, discarding the edge with the lowest score based on the selected strategy. -* `entropy`: resize so one dimension is at its target size -then repeatedly remove pixels from the edge with the lowest -[Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29) -until it too reaches the target size. +* `entropy`: focus on the region with the highest [Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29). +* `attention`: focus on the region with the highest luminance frequency, colour saturation and presence of skin tones. The default crop option is a `center`/`centre` gravity. diff --git a/docs/changelog.md b/docs/changelog.md index c6fa6407..ec14153c 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -8,6 +8,9 @@ Requires libvips v8.3.3 * C++11 ABI version is now auto-detected, remove sharp-cxx11 installation flag. +* Add experimental 'attention' crop strategy. + [#295](https://github.com/lovell/sharp/issues/295) + * Include .node extension for Meteor's require() implementation. [#537](https://github.com/lovell/sharp/issues/537) [@isjackwild](https://github.com/isjackwild) diff --git a/index.js b/index.js index ed2e9fd1..56c45ae9 100644 --- a/index.js +++ b/index.js @@ -260,7 +260,8 @@ module.exports.gravity = { // Strategies for automagic behaviour module.exports.strategy = { - entropy: 16 + entropy: 16, + attention: 17 }; /* @@ -277,7 +278,7 @@ Sharp.prototype.crop = function(crop) { } else if (isString(crop) && isInteger(module.exports.gravity[crop])) { // Gravity (string) this.options.crop = module.exports.gravity[crop]; - } else if (isInteger(crop) && crop === module.exports.strategy.entropy) { + } else if (isInteger(crop) && crop >= module.exports.strategy.entropy) { // Strategy this.options.crop = crop; } else { diff --git a/src/operations.cc b/src/operations.cc index 1c05d2db..17280ab5 100644 --- a/src/operations.cc +++ b/src/operations.cc @@ -1,6 +1,7 @@ #include -#include +#include #include +#include #include #include "common.h" @@ -289,69 +290,104 @@ namespace sharp { } } + /* + Calculate the Shannon entropy + */ + double EntropyStrategy::operator()(VImage image) { + return image.hist_find().hist_entropy(); + } + + /* + Calculate the intensity of edges, skin tone and saturation + */ + double AttentionStrategy::operator()(VImage image) { + // Convert to LAB colourspace + VImage lab = image.colourspace(VIPS_INTERPRETATION_LAB); + VImage l = lab[0]; + VImage a = lab[1]; + VImage b = lab[2]; + // Edge detect luminosity with the Sobel operator + VImage sobel = vips::VImage::new_matrixv(3, 3, + -1.0, 0.0, 1.0, + -2.0, 0.0, 2.0, + -1.0, 0.0, 1.0); + VImage edges = l.conv(sobel).abs() + l.conv(sobel.rot90()).abs(); + // Skin tone chroma thresholds trained with http://humanae.tumblr.com/ + VImage skin = (a >= 3) & (a <= 22) & (b >= 4) & (b <= 31); + // Chroma >~50% saturation + VImage lch = lab.colourspace(VIPS_INTERPRETATION_LCH); + VImage c = lch[1]; + VImage saturation = c > 60; + // Find maximum in combined saliency mask + VImage mask = edges + skin + saturation; + return mask.max(); + } + /* Calculate crop area based on image entropy */ - std::tuple EntropyCrop(VImage image, int const outWidth, int const outHeight) { + std::tuple Crop( + VImage image, int const outWidth, int const outHeight, std::function strategy + ) { int left = 0; int top = 0; int const inWidth = image.width(); int const inHeight = image.height(); if (inWidth > outWidth) { - // Reduce width by repeated removing slices from edge with lowest entropy + // Reduce width by repeated removing slices from edge with lowest score int width = inWidth; - double leftEntropy = 0.0; - double rightEntropy = 0.0; + double leftScore = 0.0; + double rightScore = 0.0; // Max width of each slice int const maxSliceWidth = static_cast(ceil((inWidth - outWidth) / 8.0)); while (width > outWidth) { // Width of current slice int const slice = std::min(width - outWidth, maxSliceWidth); - if (leftEntropy == 0.0) { - // Update entropy of left slice - leftEntropy = Entropy(image.extract_area(left, 0, slice, inHeight)); + if (leftScore == 0.0) { + // Update score of left slice + leftScore = strategy(image.extract_area(left, 0, slice, inHeight)); } - if (rightEntropy == 0.0) { - // Update entropy of right slice - rightEntropy = Entropy(image.extract_area(width - slice - 1, 0, slice, inHeight)); + if (rightScore == 0.0) { + // Update score of right slice + rightScore = strategy(image.extract_area(width - slice - 1, 0, slice, inHeight)); } - // Keep slice with highest entropy - if (leftEntropy >= rightEntropy) { + // Keep slice with highest score + if (leftScore >= rightScore) { // Discard right slice - rightEntropy = 0.0; + rightScore = 0.0; } else { // Discard left slice - leftEntropy = 0.0; + leftScore = 0.0; left = left + slice; } width = width - slice; } } if (inHeight > outHeight) { - // Reduce height by repeated removing slices from edge with lowest entropy + // Reduce height by repeated removing slices from edge with lowest score int height = inHeight; - double topEntropy = 0.0; - double bottomEntropy = 0.0; + double topScore = 0.0; + double bottomScore = 0.0; // Max height of each slice int const maxSliceHeight = static_cast(ceil((inHeight - outHeight) / 8.0)); while (height > outHeight) { // Height of current slice int const slice = std::min(height - outHeight, maxSliceHeight); - if (topEntropy == 0.0) { - // Update entropy of top slice - topEntropy = Entropy(image.extract_area(0, top, inWidth, slice)); + if (topScore == 0.0) { + // Update score of top slice + topScore = strategy(image.extract_area(0, top, inWidth, slice)); } - if (bottomEntropy == 0.0) { - // Update entropy of bottom slice - bottomEntropy = Entropy(image.extract_area(0, height - slice - 1, inWidth, slice)); + if (bottomScore == 0.0) { + // Update score of bottom slice + bottomScore = strategy(image.extract_area(0, height - slice - 1, inWidth, slice)); } - // Keep slice with highest entropy - if (topEntropy >= bottomEntropy) { + // Keep slice with highest score + if (topScore >= bottomScore) { // Discard bottom slice - bottomEntropy = 0.0; + bottomScore = 0.0; } else { // Discard top slice - topEntropy = 0.0; + topScore = 0.0; top = top + slice; } height = height - slice; @@ -360,13 +396,6 @@ namespace sharp { return std::make_tuple(left, top); } - /* - Calculate the Shannon entropy for an image - */ - double Entropy(VImage image) { - return image.hist_find().hist_entropy(); - } - /* Insert a tile cache to prevent over-computation of any previous operations in the pipeline */ diff --git a/src/operations.h b/src/operations.h index 8f3cd9f3..e8b4c585 100644 --- a/src/operations.h +++ b/src/operations.h @@ -1,8 +1,10 @@ #ifndef SRC_OPERATIONS_H_ #define SRC_OPERATIONS_H_ -#include +#include +#include #include +#include #include using vips::VImage; @@ -63,14 +65,21 @@ namespace sharp { VImage Sharpen(VImage image, double const sigma, double const flat, double const jagged); /* - Calculate crop area based on image entropy + Crop strategy functors */ - std::tuple EntropyCrop(VImage image, int const outWidth, int const outHeight); + struct EntropyStrategy { + double operator()(VImage image); + }; + struct AttentionStrategy { + double operator()(VImage image); + }; /* - Calculate the Shannon entropy for an image + Calculate crop area based on given strategy (Entropy, Attention) */ - double Entropy(VImage image); + std::tuple Crop( + VImage image, int const outWidth, int const outHeight, std::function strategy + ); /* Insert a tile cache to prevent over-computation of any previous operations in the pipeline diff --git a/src/pipeline.cc b/src/pipeline.cc index 4f9b6f35..3118ae15 100644 --- a/src/pipeline.cc +++ b/src/pipeline.cc @@ -488,13 +488,18 @@ class PipelineWorker : public Nan::AsyncWorker { std::tie(left, top) = sharp::CalculateCrop( image.width(), image.height(), baton->width, baton->height, baton->crop ); - } else { + } else if (baton->crop == 16) { // Entropy-based crop - std::tie(left, top) = sharp::EntropyCrop(image, baton->width, baton->height); + std::tie(left, top) = sharp::Crop(image, baton->width, baton->height, sharp::EntropyStrategy()); + } else { + // Attention-based crop + std::tie(left, top) = sharp::Crop(image, baton->width, baton->height, sharp::AttentionStrategy()); } int width = std::min(image.width(), baton->width); int height = std::min(image.height(), baton->height); image = image.extract_area(left, top, width, height); + baton->cropCalcLeft = left; + baton->cropCalcTop = top; } } @@ -890,6 +895,10 @@ class PipelineWorker : public Nan::AsyncWorker { Set(info, New("width").ToLocalChecked(), New(static_cast(width))); Set(info, New("height").ToLocalChecked(), New(static_cast(height))); Set(info, New("channels").ToLocalChecked(), New(static_cast(baton->channels))); + if (baton->cropCalcLeft != -1 && baton->cropCalcLeft != -1) { + Set(info, New("cropCalcLeft").ToLocalChecked(), New(static_cast(baton->cropCalcLeft))); + Set(info, New("cropCalcTop").ToLocalChecked(), New(static_cast(baton->cropCalcTop))); + } if (baton->bufferOutLength > 0) { // Pass ownership of output data to Buffer instance diff --git a/src/pipeline.h b/src/pipeline.h index 420da2ec..b647e2bf 100644 --- a/src/pipeline.h +++ b/src/pipeline.h @@ -46,6 +46,8 @@ struct PipelineBaton { int channels; Canvas canvas; int crop; + int cropCalcLeft; + int cropCalcTop; std::string kernel; std::string interpolator; double background[4]; @@ -112,6 +114,8 @@ struct PipelineBaton { channels(0), canvas(Canvas::CROP), crop(0), + cropCalcLeft(-1), + cropCalcTop(-1), flatten(false), negate(false), blurSigma(0.0), diff --git a/test/fixtures/expected/crop-entropy.jpg b/test/fixtures/expected/crop-strategy.jpg similarity index 100% rename from test/fixtures/expected/crop-entropy.jpg rename to test/fixtures/expected/crop-strategy.jpg diff --git a/test/fixtures/expected/crop-entropy.png b/test/fixtures/expected/crop-strategy.png similarity index 100% rename from test/fixtures/expected/crop-entropy.png rename to test/fixtures/expected/crop-strategy.png diff --git a/test/saliency/README.md b/test/saliency/README.md new file mode 100644 index 00000000..580d7f24 --- /dev/null +++ b/test/saliency/README.md @@ -0,0 +1,16 @@ +# Crop strategy accuracy + +1. Download the [MSRA Salient Object Database](http://research.microsoft.com/en-us/um/people/jiansun/SalientObject/salient_object.htm) (101MB). +2. Extract each image and its median human-labelled salient region. +3. Generate a test report of percentage deviance of top and left edges for each crop strategy, plus a naive centre gravity crop as "control". + +```sh +git clone https://github.com/lovell/sharp.git +cd sharp/test/saliency +./download.sh +node report.js +python -m SimpleHTTPServer +``` + +The test report will then be available at +http://localhost:8000/report.html diff --git a/test/saliency/download.sh b/test/saliency/download.sh new file mode 100755 index 00000000..74784472 --- /dev/null +++ b/test/saliency/download.sh @@ -0,0 +1,25 @@ +#!/bin/sh + +# Fetch and parse the MSRA Salient Object Database 'Image set B' +# http://research.microsoft.com/en-us/um/people/jiansun/salientobject/salient_object.htm + +if [ ! -d Image ]; then + if [ ! -f ImageB.zip ]; then + echo "Downloading 5000 images (101MB)" + curl -O http://research.microsoft.com/en-us/um/people/jiansun/salientobject/ImageSetB/ImageB.zip + fi + unzip ImageB.zip +fi + +if [ ! -d UserData ]; then + if [ ! -f UserDataB.zip ]; then + echo "Downloading human-labelled regions" + curl -O http://research.microsoft.com/en-us/um/people/jiansun/salientobject/ImageSetB/UserDataB.zip + fi + unzip UserDataB.zip +fi + +if [ ! -f userData.json ]; then + echo "Processing human-labelled regions" + node userData.js +fi diff --git a/test/saliency/humanae/download.js b/test/saliency/humanae/download.js new file mode 100644 index 00000000..18e88d90 --- /dev/null +++ b/test/saliency/humanae/download.js @@ -0,0 +1,39 @@ +'use strict'; +/*jshint esversion: 6 */ + +const fs = require('fs'); +const request = require('request'); +const tumblr = require('tumblr.js'); + +const client = tumblr.createClient({ + consumer_key: '***', + consumer_secret: '***' +}); + +const fetchImages = function(offset) { + console.log(`Fetching offset ${offset}`); + client.posts('humanae', { + type: 'photo', + offset: offset + }, function (err, response) { + if (err) throw err; + if (response.posts.length > 0) { + response.posts.forEach((post) => { + const url = post.photos[0].alt_sizes + .filter((image) => image.width === 100) + .map((image) => image.url) + [0]; + const filename = `./images/${post.id}.jpg`; + try { + fs.statSync(filename); + } catch (err) { + if (err.code === 'ENOENT') { + request(url).pipe(fs.createWriteStream(filename)); + } + } + }); + fetchImages(offset + 20); + } + }); +}; +fetchImages(0); diff --git a/test/saliency/humanae/package.json b/test/saliency/humanae/package.json new file mode 100644 index 00000000..f436f5a2 --- /dev/null +++ b/test/saliency/humanae/package.json @@ -0,0 +1,9 @@ +{ + "name": "sharp-crop-strategy-attention-model-humanae", + "version": "0.0.1", + "private": true, + "dependencies": { + "request": "^2.75.0", + "tumblr.js": "^1.1.1" + } +} diff --git a/test/saliency/humanae/tone.js b/test/saliency/humanae/tone.js new file mode 100644 index 00000000..d89d023d --- /dev/null +++ b/test/saliency/humanae/tone.js @@ -0,0 +1,34 @@ +'use strict'; +/*jshint esversion: 6 */ + +const fs = require('fs'); +const child_process = require('child_process'); + +const a = []; +const b = []; + +fs.readdirSync('./images') + .filter((file) => file.endsWith('.jpg')) + .forEach((file) => { + // Extract one pixel, avoiding first DCT block, and return value of A and B channels + const command = `convert ./images/${file}[1x1+8+8] -colorspace lab -format "%[fx:u.g] %[fx:u.b]" info:`; + const result = child_process.execSync(command, { encoding: 'utf8' }); + const ab = result.split(' '); + a.push(ab[0]); + b.push(ab[1]); + }); + +a.sort((v1, v2) => v1 - v2); +b.sort((v1, v2) => v1 - v2); + +// Convert from 0..1 to -128..128 +const convert = function(v) { + return Math.round(256 * (v - 0.5)); +}; + +const threshold = Math.round(a.length / 100); +console.log(`Trimming lowest/highest ${threshold} for 98th percentile`); + +// Ignore ~2% outliers +console.log(`a ${convert(a[threshold])} - ${convert(a[a.length - threshold])}`); +console.log(`b ${convert(b[threshold])} - ${convert(b[b.length - threshold])}`); diff --git a/test/saliency/report.html b/test/saliency/report.html new file mode 100644 index 00000000..792d2737 --- /dev/null +++ b/test/saliency/report.html @@ -0,0 +1,25 @@ + + + + + + + +
+ + + diff --git a/test/saliency/report.js b/test/saliency/report.js new file mode 100644 index 00000000..c46abc76 --- /dev/null +++ b/test/saliency/report.js @@ -0,0 +1,69 @@ +'use strict'; +/*jshint esversion: 6 */ + +const os = require('os'); +const fs = require('fs'); +const path = require('path'); +const async = require('async'); +const sharp = require('../../'); + +const crops = { + centre: sharp.gravity.centre, + entropy: sharp.strategy.entropy, + attention: sharp.strategy.attention +}; +const concurrency = os.cpus().length; + +const scores = {}; + +const incrementScore = function(accuracy, crop) { + if (typeof scores[accuracy] === 'undefined') { + scores[accuracy] = {}; + } + if (typeof scores[accuracy][crop] === 'undefined') { + scores[accuracy][crop] = 0; + } + scores[accuracy][crop]++; +}; + +const userData = require('./userData.json'); +const files = Object.keys(userData); + +async.eachLimit(files, concurrency, function(file, done) { + const filename = path.join(__dirname, 'Image', file); + const salientWidth = userData[file].right - userData[file].left; + const salientHeight = userData[file].bottom - userData[file].top; + sharp(filename).metadata(function(err, metadata) { + if (err) console.log(err); + async.each(Object.keys(crops), function(crop, done) { + async.parallel([ + // Left edge accuracy + function(done) { + sharp(filename).resize(salientWidth, metadata.height).crop(crops[crop]).toBuffer(function(err, data, info) { + const accuracy = Math.round(Math.abs(userData[file].left - info.cropCalcLeft) / (metadata.width - salientWidth) * 100); + incrementScore(accuracy, crop); + done(); + }); + }, + // Top edge accuracy + function(done) { + sharp(filename).resize(metadata.width, salientHeight).crop(crops[crop]).toBuffer(function(err, data, info) { + const accuracy = Math.round(Math.abs(userData[file].top - info.cropCalcTop) / (metadata.height - salientHeight) * 100); + incrementScore(accuracy, crop); + done(); + }); + } + ], done); + }, done); + }); +}, function() { + const report = []; + Object.keys(scores).forEach(function(accuracy) { + report.push( + Object.assign({ + accuracy: parseInt(accuracy, 10) + }, scores[accuracy]) + ); + }); + fs.writeFileSync('report.json', JSON.stringify(report, null, 2)); +}); diff --git a/test/saliency/userData.js b/test/saliency/userData.js new file mode 100644 index 00000000..ab2961db --- /dev/null +++ b/test/saliency/userData.js @@ -0,0 +1,69 @@ +'use strict'; +/*jshint esversion: 6, loopfunc: true */ + +const fs = require('fs'); +const path = require('path'); + +const userDataDir = 'UserData'; + +const images = {}; + +const median = function(values) { + values.sort(function(a,b) { + return a - b; + }); + const half = Math.floor(values.length / 2); + if (values.length % 2) { + return values[half]; + } else { + return Math.floor((values[half - 1] + values[half]) / 2); + } +}; + +// List of files +fs.readdirSync(userDataDir).forEach(function(file) { + // Contents of file + const lines = fs.readFileSync(path.join(userDataDir, file), {encoding: 'utf-8'}).split(/\r\n/); + // First line = number of entries + const entries = parseInt(lines[0], 10); + // Verify number of entries + if (entries !== 500) { + throw new Error('Expecting 500 images in ' + file + ', found ' + entries); + } + // Keep track of which line we're on + let linePos = 2; + for (let i = 0; i < entries; i++) { + // Get data for current image + const filename = lines[linePos].replace(/\\/, path.sep); + linePos = linePos + 2; + const regions = lines[linePos].split('; '); + linePos = linePos + 2; + // Parse human-labelled regions for min/max coords + const lefts = [], tops = [], rights = [], bottoms = []; + regions.forEach(function(region) { + if (region.indexOf(' ') !== -1) { + const coords = region.split(' '); + lefts.push(parseInt(coords[0], 10)); + tops.push(parseInt(coords[1], 10)); + rights.push(parseInt(coords[2], 10)); + bottoms.push(parseInt(coords[3], 10)); + } + }); + // Add image + images[filename] = { + left: median(lefts), + top: median(tops), + right: median(rights), + bottom: median(bottoms) + }; + } +}); + +// Verify number of images found +const imageCount = Object.keys(images).length; +if (imageCount === 5000) { + // Write output + fs.writeFileSync('userData.json', JSON.stringify(images, null, 2)); +} else { + throw new Error('Expecting 5000 images, found ' + imageCount); +} diff --git a/test/unit/cpplint.js b/test/unit/cpplint.js index 44dc59b6..ae174dba 100644 --- a/test/unit/cpplint.js +++ b/test/unit/cpplint.js @@ -29,6 +29,9 @@ describe('cpplint', function() { }, whitespace: { parens: false + }, + runtime: { + indentation_namespace: false } } }, function(err, report) { diff --git a/test/unit/crop.js b/test/unit/crop.js index 5463ea37..0c7ac156 100644 --- a/test/unit/crop.js +++ b/test/unit/crop.js @@ -172,7 +172,9 @@ describe('Crop', function() { assert.strictEqual(3, info.channels); assert.strictEqual(80, info.width); assert.strictEqual(320, info.height); - fixtures.assertSimilar(fixtures.expected('crop-entropy.jpg'), data, done); + assert.strictEqual(250, info.cropCalcLeft); + assert.strictEqual(0, info.cropCalcTop); + fixtures.assertSimilar(fixtures.expected('crop-strategy.jpg'), data, done); }); }); @@ -186,10 +188,47 @@ describe('Crop', function() { assert.strictEqual(4, info.channels); assert.strictEqual(320, info.width); assert.strictEqual(80, info.height); - fixtures.assertSimilar(fixtures.expected('crop-entropy.png'), data, done); + assert.strictEqual(0, info.cropCalcLeft); + assert.strictEqual(80, info.cropCalcTop); + fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done); }); }); }); + describe('Attention strategy', function() { + + it('JPEG', function(done) { + sharp(fixtures.inputJpgWithCmykProfile) + .resize(80, 320) + .crop(sharp.strategy.attention) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual('jpeg', info.format); + assert.strictEqual(3, info.channels); + assert.strictEqual(80, info.width); + assert.strictEqual(320, info.height); + assert.strictEqual(250, info.cropCalcLeft); + assert.strictEqual(0, info.cropCalcTop); + fixtures.assertSimilar(fixtures.expected('crop-strategy.jpg'), data, done); + }); + }); + + it('PNG', function(done) { + sharp(fixtures.inputPngWithTransparency) + .resize(320, 80) + .crop(sharp.strategy.attention) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual('png', info.format); + assert.strictEqual(4, info.channels); + assert.strictEqual(320, info.width); + assert.strictEqual(80, info.height); + assert.strictEqual(0, info.cropCalcLeft); + assert.strictEqual(80, info.cropCalcTop); + fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done); + }); + }); + + }); });