Add experimental 'attention' crop strategy

This commit is contained in:
Lovell Fuller 2016-10-11 21:12:25 +01:00
parent 739178dd74
commit 18b9991fe7
21 changed files with 438 additions and 51 deletions

4
.gitignore vendored
View File

@ -4,6 +4,10 @@ coverage
test/bench/node_modules test/bench/node_modules
test/fixtures/output* test/fixtures/output*
test/leak/libvips.supp test/leak/libvips.supp
test/saliency/report.json
test/saliency/Image*
test/saliency/[Uu]serData*
!test/saliency/userData.js
lib lib
include include
packaging/libvips* packaging/libvips*

View File

@ -1,3 +1,4 @@
node_modules node_modules
test/bench/node_modules test/bench/node_modules
test/saliency/humanae/node_modules
coverage coverage

View File

@ -175,12 +175,11 @@ Possible attributes of `sharp.gravity` are
`north`, `northeast`, `east`, `southeast`, `south`, `north`, `northeast`, `east`, `southeast`, `south`,
`southwest`, `west`, `northwest`, `center` and `centre`. `southwest`, `west`, `northwest`, `center` and `centre`.
Possible attributes of the experimental `sharp.strategy` are: The experimental strategy-based approach resizes so one dimension is at its target length
then repeatedly ranks edge regions, discarding the edge with the lowest score based on the selected strategy.
* `entropy`: resize so one dimension is at its target size * `entropy`: focus on the region with the highest [Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29).
then repeatedly remove pixels from the edge with the lowest * `attention`: focus on the region with the highest luminance frequency, colour saturation and presence of skin tones.
[Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29)
until it too reaches the target size.
The default crop option is a `center`/`centre` gravity. The default crop option is a `center`/`centre` gravity.

View File

@ -8,6 +8,9 @@ Requires libvips v8.3.3
* C++11 ABI version is now auto-detected, remove sharp-cxx11 installation flag. * C++11 ABI version is now auto-detected, remove sharp-cxx11 installation flag.
* Add experimental 'attention' crop strategy.
[#295](https://github.com/lovell/sharp/issues/295)
* Include .node extension for Meteor's require() implementation. * Include .node extension for Meteor's require() implementation.
[#537](https://github.com/lovell/sharp/issues/537) [#537](https://github.com/lovell/sharp/issues/537)
[@isjackwild](https://github.com/isjackwild) [@isjackwild](https://github.com/isjackwild)

View File

@ -260,7 +260,8 @@ module.exports.gravity = {
// Strategies for automagic behaviour // Strategies for automagic behaviour
module.exports.strategy = { module.exports.strategy = {
entropy: 16 entropy: 16,
attention: 17
}; };
/* /*
@ -277,7 +278,7 @@ Sharp.prototype.crop = function(crop) {
} else if (isString(crop) && isInteger(module.exports.gravity[crop])) { } else if (isString(crop) && isInteger(module.exports.gravity[crop])) {
// Gravity (string) // Gravity (string)
this.options.crop = module.exports.gravity[crop]; this.options.crop = module.exports.gravity[crop];
} else if (isInteger(crop) && crop === module.exports.strategy.entropy) { } else if (isInteger(crop) && crop >= module.exports.strategy.entropy) {
// Strategy // Strategy
this.options.crop = crop; this.options.crop = crop;
} else { } else {

View File

@ -1,6 +1,7 @@
#include <algorithm> #include <algorithm>
#include <tuple> #include <functional>
#include <memory> #include <memory>
#include <tuple>
#include <vips/vips8> #include <vips/vips8>
#include "common.h" #include "common.h"
@ -289,69 +290,104 @@ namespace sharp {
} }
} }
/*
Calculate the Shannon entropy
*/
double EntropyStrategy::operator()(VImage image) {
return image.hist_find().hist_entropy();
}
/*
Calculate the intensity of edges, skin tone and saturation
*/
double AttentionStrategy::operator()(VImage image) {
// Convert to LAB colourspace
VImage lab = image.colourspace(VIPS_INTERPRETATION_LAB);
VImage l = lab[0];
VImage a = lab[1];
VImage b = lab[2];
// Edge detect luminosity with the Sobel operator
VImage sobel = vips::VImage::new_matrixv(3, 3,
-1.0, 0.0, 1.0,
-2.0, 0.0, 2.0,
-1.0, 0.0, 1.0);
VImage edges = l.conv(sobel).abs() + l.conv(sobel.rot90()).abs();
// Skin tone chroma thresholds trained with http://humanae.tumblr.com/
VImage skin = (a >= 3) & (a <= 22) & (b >= 4) & (b <= 31);
// Chroma >~50% saturation
VImage lch = lab.colourspace(VIPS_INTERPRETATION_LCH);
VImage c = lch[1];
VImage saturation = c > 60;
// Find maximum in combined saliency mask
VImage mask = edges + skin + saturation;
return mask.max();
}
/* /*
Calculate crop area based on image entropy Calculate crop area based on image entropy
*/ */
std::tuple<int, int> EntropyCrop(VImage image, int const outWidth, int const outHeight) { std::tuple<int, int> Crop(
VImage image, int const outWidth, int const outHeight, std::function<double(VImage)> strategy
) {
int left = 0; int left = 0;
int top = 0; int top = 0;
int const inWidth = image.width(); int const inWidth = image.width();
int const inHeight = image.height(); int const inHeight = image.height();
if (inWidth > outWidth) { if (inWidth > outWidth) {
// Reduce width by repeated removing slices from edge with lowest entropy // Reduce width by repeated removing slices from edge with lowest score
int width = inWidth; int width = inWidth;
double leftEntropy = 0.0; double leftScore = 0.0;
double rightEntropy = 0.0; double rightScore = 0.0;
// Max width of each slice // Max width of each slice
int const maxSliceWidth = static_cast<int>(ceil((inWidth - outWidth) / 8.0)); int const maxSliceWidth = static_cast<int>(ceil((inWidth - outWidth) / 8.0));
while (width > outWidth) { while (width > outWidth) {
// Width of current slice // Width of current slice
int const slice = std::min(width - outWidth, maxSliceWidth); int const slice = std::min(width - outWidth, maxSliceWidth);
if (leftEntropy == 0.0) { if (leftScore == 0.0) {
// Update entropy of left slice // Update score of left slice
leftEntropy = Entropy(image.extract_area(left, 0, slice, inHeight)); leftScore = strategy(image.extract_area(left, 0, slice, inHeight));
} }
if (rightEntropy == 0.0) { if (rightScore == 0.0) {
// Update entropy of right slice // Update score of right slice
rightEntropy = Entropy(image.extract_area(width - slice - 1, 0, slice, inHeight)); rightScore = strategy(image.extract_area(width - slice - 1, 0, slice, inHeight));
} }
// Keep slice with highest entropy // Keep slice with highest score
if (leftEntropy >= rightEntropy) { if (leftScore >= rightScore) {
// Discard right slice // Discard right slice
rightEntropy = 0.0; rightScore = 0.0;
} else { } else {
// Discard left slice // Discard left slice
leftEntropy = 0.0; leftScore = 0.0;
left = left + slice; left = left + slice;
} }
width = width - slice; width = width - slice;
} }
} }
if (inHeight > outHeight) { if (inHeight > outHeight) {
// Reduce height by repeated removing slices from edge with lowest entropy // Reduce height by repeated removing slices from edge with lowest score
int height = inHeight; int height = inHeight;
double topEntropy = 0.0; double topScore = 0.0;
double bottomEntropy = 0.0; double bottomScore = 0.0;
// Max height of each slice // Max height of each slice
int const maxSliceHeight = static_cast<int>(ceil((inHeight - outHeight) / 8.0)); int const maxSliceHeight = static_cast<int>(ceil((inHeight - outHeight) / 8.0));
while (height > outHeight) { while (height > outHeight) {
// Height of current slice // Height of current slice
int const slice = std::min(height - outHeight, maxSliceHeight); int const slice = std::min(height - outHeight, maxSliceHeight);
if (topEntropy == 0.0) { if (topScore == 0.0) {
// Update entropy of top slice // Update score of top slice
topEntropy = Entropy(image.extract_area(0, top, inWidth, slice)); topScore = strategy(image.extract_area(0, top, inWidth, slice));
} }
if (bottomEntropy == 0.0) { if (bottomScore == 0.0) {
// Update entropy of bottom slice // Update score of bottom slice
bottomEntropy = Entropy(image.extract_area(0, height - slice - 1, inWidth, slice)); bottomScore = strategy(image.extract_area(0, height - slice - 1, inWidth, slice));
} }
// Keep slice with highest entropy // Keep slice with highest score
if (topEntropy >= bottomEntropy) { if (topScore >= bottomScore) {
// Discard bottom slice // Discard bottom slice
bottomEntropy = 0.0; bottomScore = 0.0;
} else { } else {
// Discard top slice // Discard top slice
topEntropy = 0.0; topScore = 0.0;
top = top + slice; top = top + slice;
} }
height = height - slice; height = height - slice;
@ -360,13 +396,6 @@ namespace sharp {
return std::make_tuple(left, top); return std::make_tuple(left, top);
} }
/*
Calculate the Shannon entropy for an image
*/
double Entropy(VImage image) {
return image.hist_find().hist_entropy();
}
/* /*
Insert a tile cache to prevent over-computation of any previous operations in the pipeline Insert a tile cache to prevent over-computation of any previous operations in the pipeline
*/ */

View File

@ -1,8 +1,10 @@
#ifndef SRC_OPERATIONS_H_ #ifndef SRC_OPERATIONS_H_
#define SRC_OPERATIONS_H_ #define SRC_OPERATIONS_H_
#include <tuple> #include <algorithm>
#include <functional>
#include <memory> #include <memory>
#include <tuple>
#include <vips/vips8> #include <vips/vips8>
using vips::VImage; using vips::VImage;
@ -63,14 +65,21 @@ namespace sharp {
VImage Sharpen(VImage image, double const sigma, double const flat, double const jagged); VImage Sharpen(VImage image, double const sigma, double const flat, double const jagged);
/* /*
Calculate crop area based on image entropy Crop strategy functors
*/ */
std::tuple<int, int> EntropyCrop(VImage image, int const outWidth, int const outHeight); struct EntropyStrategy {
double operator()(VImage image);
};
struct AttentionStrategy {
double operator()(VImage image);
};
/* /*
Calculate the Shannon entropy for an image Calculate crop area based on given strategy (Entropy, Attention)
*/ */
double Entropy(VImage image); std::tuple<int, int> Crop(
VImage image, int const outWidth, int const outHeight, std::function<double(VImage)> strategy
);
/* /*
Insert a tile cache to prevent over-computation of any previous operations in the pipeline Insert a tile cache to prevent over-computation of any previous operations in the pipeline

View File

@ -488,13 +488,18 @@ class PipelineWorker : public Nan::AsyncWorker {
std::tie(left, top) = sharp::CalculateCrop( std::tie(left, top) = sharp::CalculateCrop(
image.width(), image.height(), baton->width, baton->height, baton->crop image.width(), image.height(), baton->width, baton->height, baton->crop
); );
} else { } else if (baton->crop == 16) {
// Entropy-based crop // Entropy-based crop
std::tie(left, top) = sharp::EntropyCrop(image, baton->width, baton->height); std::tie(left, top) = sharp::Crop(image, baton->width, baton->height, sharp::EntropyStrategy());
} else {
// Attention-based crop
std::tie(left, top) = sharp::Crop(image, baton->width, baton->height, sharp::AttentionStrategy());
} }
int width = std::min(image.width(), baton->width); int width = std::min(image.width(), baton->width);
int height = std::min(image.height(), baton->height); int height = std::min(image.height(), baton->height);
image = image.extract_area(left, top, width, height); image = image.extract_area(left, top, width, height);
baton->cropCalcLeft = left;
baton->cropCalcTop = top;
} }
} }
@ -890,6 +895,10 @@ class PipelineWorker : public Nan::AsyncWorker {
Set(info, New("width").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(width))); Set(info, New("width").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(width)));
Set(info, New("height").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(height))); Set(info, New("height").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(height)));
Set(info, New("channels").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(baton->channels))); Set(info, New("channels").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(baton->channels)));
if (baton->cropCalcLeft != -1 && baton->cropCalcLeft != -1) {
Set(info, New("cropCalcLeft").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(baton->cropCalcLeft)));
Set(info, New("cropCalcTop").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(baton->cropCalcTop)));
}
if (baton->bufferOutLength > 0) { if (baton->bufferOutLength > 0) {
// Pass ownership of output data to Buffer instance // Pass ownership of output data to Buffer instance

View File

@ -46,6 +46,8 @@ struct PipelineBaton {
int channels; int channels;
Canvas canvas; Canvas canvas;
int crop; int crop;
int cropCalcLeft;
int cropCalcTop;
std::string kernel; std::string kernel;
std::string interpolator; std::string interpolator;
double background[4]; double background[4];
@ -112,6 +114,8 @@ struct PipelineBaton {
channels(0), channels(0),
canvas(Canvas::CROP), canvas(Canvas::CROP),
crop(0), crop(0),
cropCalcLeft(-1),
cropCalcTop(-1),
flatten(false), flatten(false),
negate(false), negate(false),
blurSigma(0.0), blurSigma(0.0),

View File

Before

Width:  |  Height:  |  Size: 8.5 KiB

After

Width:  |  Height:  |  Size: 8.5 KiB

View File

Before

Width:  |  Height:  |  Size: 6.0 KiB

After

Width:  |  Height:  |  Size: 6.0 KiB

16
test/saliency/README.md Normal file
View File

@ -0,0 +1,16 @@
# Crop strategy accuracy
1. Download the [MSRA Salient Object Database](http://research.microsoft.com/en-us/um/people/jiansun/SalientObject/salient_object.htm) (101MB).
2. Extract each image and its median human-labelled salient region.
3. Generate a test report of percentage deviance of top and left edges for each crop strategy, plus a naive centre gravity crop as "control".
```sh
git clone https://github.com/lovell/sharp.git
cd sharp/test/saliency
./download.sh
node report.js
python -m SimpleHTTPServer
```
The test report will then be available at
http://localhost:8000/report.html

25
test/saliency/download.sh Executable file
View File

@ -0,0 +1,25 @@
#!/bin/sh
# Fetch and parse the MSRA Salient Object Database 'Image set B'
# http://research.microsoft.com/en-us/um/people/jiansun/salientobject/salient_object.htm
if [ ! -d Image ]; then
if [ ! -f ImageB.zip ]; then
echo "Downloading 5000 images (101MB)"
curl -O http://research.microsoft.com/en-us/um/people/jiansun/salientobject/ImageSetB/ImageB.zip
fi
unzip ImageB.zip
fi
if [ ! -d UserData ]; then
if [ ! -f UserDataB.zip ]; then
echo "Downloading human-labelled regions"
curl -O http://research.microsoft.com/en-us/um/people/jiansun/salientobject/ImageSetB/UserDataB.zip
fi
unzip UserDataB.zip
fi
if [ ! -f userData.json ]; then
echo "Processing human-labelled regions"
node userData.js
fi

View File

@ -0,0 +1,39 @@
'use strict';
/*jshint esversion: 6 */
const fs = require('fs');
const request = require('request');
const tumblr = require('tumblr.js');
const client = tumblr.createClient({
consumer_key: '***',
consumer_secret: '***'
});
const fetchImages = function(offset) {
console.log(`Fetching offset ${offset}`);
client.posts('humanae', {
type: 'photo',
offset: offset
}, function (err, response) {
if (err) throw err;
if (response.posts.length > 0) {
response.posts.forEach((post) => {
const url = post.photos[0].alt_sizes
.filter((image) => image.width === 100)
.map((image) => image.url)
[0];
const filename = `./images/${post.id}.jpg`;
try {
fs.statSync(filename);
} catch (err) {
if (err.code === 'ENOENT') {
request(url).pipe(fs.createWriteStream(filename));
}
}
});
fetchImages(offset + 20);
}
});
};
fetchImages(0);

View File

@ -0,0 +1,9 @@
{
"name": "sharp-crop-strategy-attention-model-humanae",
"version": "0.0.1",
"private": true,
"dependencies": {
"request": "^2.75.0",
"tumblr.js": "^1.1.1"
}
}

View File

@ -0,0 +1,34 @@
'use strict';
/*jshint esversion: 6 */
const fs = require('fs');
const child_process = require('child_process');
const a = [];
const b = [];
fs.readdirSync('./images')
.filter((file) => file.endsWith('.jpg'))
.forEach((file) => {
// Extract one pixel, avoiding first DCT block, and return value of A and B channels
const command = `convert ./images/${file}[1x1+8+8] -colorspace lab -format "%[fx:u.g] %[fx:u.b]" info:`;
const result = child_process.execSync(command, { encoding: 'utf8' });
const ab = result.split(' ');
a.push(ab[0]);
b.push(ab[1]);
});
a.sort((v1, v2) => v1 - v2);
b.sort((v1, v2) => v1 - v2);
// Convert from 0..1 to -128..128
const convert = function(v) {
return Math.round(256 * (v - 0.5));
};
const threshold = Math.round(a.length / 100);
console.log(`Trimming lowest/highest ${threshold} for 98th percentile`);
// Ignore ~2% outliers
console.log(`a ${convert(a[threshold])} - ${convert(a[a.length - threshold])}`);
console.log(`b ${convert(b[threshold])} - ${convert(b[b.length - threshold])}`);

25
test/saliency/report.html Normal file
View File

@ -0,0 +1,25 @@
<html>
<head>
<link href="https://cdnjs.cloudflare.com/ajax/libs/metrics-graphics/2.10.1/metricsgraphics.min.css" rel="stylesheet" type="text/css">
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/4.2.6/d3.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/metrics-graphics/2.10.1/metricsgraphics.min.js"></script>
</head>
<body>
<div id="accuracy"></div>
<script>
d3.json('report.json', function(err, data) {
MG.data_graphic({
title: 'Crop accuracy',
data: data,
target: '#accuracy',
width: 960,
height: 600,
x_accessor: 'accuracy',
x_label: '% Accuracy',
y_accessor: ['centre', 'entropy', 'attention'],
legend: ['Centre', 'Entropy', 'Attention']
});
});
</script>
</body>
</html>

69
test/saliency/report.js Normal file
View File

@ -0,0 +1,69 @@
'use strict';
/*jshint esversion: 6 */
const os = require('os');
const fs = require('fs');
const path = require('path');
const async = require('async');
const sharp = require('../../');
const crops = {
centre: sharp.gravity.centre,
entropy: sharp.strategy.entropy,
attention: sharp.strategy.attention
};
const concurrency = os.cpus().length;
const scores = {};
const incrementScore = function(accuracy, crop) {
if (typeof scores[accuracy] === 'undefined') {
scores[accuracy] = {};
}
if (typeof scores[accuracy][crop] === 'undefined') {
scores[accuracy][crop] = 0;
}
scores[accuracy][crop]++;
};
const userData = require('./userData.json');
const files = Object.keys(userData);
async.eachLimit(files, concurrency, function(file, done) {
const filename = path.join(__dirname, 'Image', file);
const salientWidth = userData[file].right - userData[file].left;
const salientHeight = userData[file].bottom - userData[file].top;
sharp(filename).metadata(function(err, metadata) {
if (err) console.log(err);
async.each(Object.keys(crops), function(crop, done) {
async.parallel([
// Left edge accuracy
function(done) {
sharp(filename).resize(salientWidth, metadata.height).crop(crops[crop]).toBuffer(function(err, data, info) {
const accuracy = Math.round(Math.abs(userData[file].left - info.cropCalcLeft) / (metadata.width - salientWidth) * 100);
incrementScore(accuracy, crop);
done();
});
},
// Top edge accuracy
function(done) {
sharp(filename).resize(metadata.width, salientHeight).crop(crops[crop]).toBuffer(function(err, data, info) {
const accuracy = Math.round(Math.abs(userData[file].top - info.cropCalcTop) / (metadata.height - salientHeight) * 100);
incrementScore(accuracy, crop);
done();
});
}
], done);
}, done);
});
}, function() {
const report = [];
Object.keys(scores).forEach(function(accuracy) {
report.push(
Object.assign({
accuracy: parseInt(accuracy, 10)
}, scores[accuracy])
);
});
fs.writeFileSync('report.json', JSON.stringify(report, null, 2));
});

69
test/saliency/userData.js Normal file
View File

@ -0,0 +1,69 @@
'use strict';
/*jshint esversion: 6, loopfunc: true */
const fs = require('fs');
const path = require('path');
const userDataDir = 'UserData';
const images = {};
const median = function(values) {
values.sort(function(a,b) {
return a - b;
});
const half = Math.floor(values.length / 2);
if (values.length % 2) {
return values[half];
} else {
return Math.floor((values[half - 1] + values[half]) / 2);
}
};
// List of files
fs.readdirSync(userDataDir).forEach(function(file) {
// Contents of file
const lines = fs.readFileSync(path.join(userDataDir, file), {encoding: 'utf-8'}).split(/\r\n/);
// First line = number of entries
const entries = parseInt(lines[0], 10);
// Verify number of entries
if (entries !== 500) {
throw new Error('Expecting 500 images in ' + file + ', found ' + entries);
}
// Keep track of which line we're on
let linePos = 2;
for (let i = 0; i < entries; i++) {
// Get data for current image
const filename = lines[linePos].replace(/\\/, path.sep);
linePos = linePos + 2;
const regions = lines[linePos].split('; ');
linePos = linePos + 2;
// Parse human-labelled regions for min/max coords
const lefts = [], tops = [], rights = [], bottoms = [];
regions.forEach(function(region) {
if (region.indexOf(' ') !== -1) {
const coords = region.split(' ');
lefts.push(parseInt(coords[0], 10));
tops.push(parseInt(coords[1], 10));
rights.push(parseInt(coords[2], 10));
bottoms.push(parseInt(coords[3], 10));
}
});
// Add image
images[filename] = {
left: median(lefts),
top: median(tops),
right: median(rights),
bottom: median(bottoms)
};
}
});
// Verify number of images found
const imageCount = Object.keys(images).length;
if (imageCount === 5000) {
// Write output
fs.writeFileSync('userData.json', JSON.stringify(images, null, 2));
} else {
throw new Error('Expecting 5000 images, found ' + imageCount);
}

View File

@ -29,6 +29,9 @@ describe('cpplint', function() {
}, },
whitespace: { whitespace: {
parens: false parens: false
},
runtime: {
indentation_namespace: false
} }
} }
}, function(err, report) { }, function(err, report) {

View File

@ -172,7 +172,9 @@ describe('Crop', function() {
assert.strictEqual(3, info.channels); assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width); assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height); assert.strictEqual(320, info.height);
fixtures.assertSimilar(fixtures.expected('crop-entropy.jpg'), data, done); assert.strictEqual(250, info.cropCalcLeft);
assert.strictEqual(0, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.jpg'), data, done);
}); });
}); });
@ -186,10 +188,47 @@ describe('Crop', function() {
assert.strictEqual(4, info.channels); assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
fixtures.assertSimilar(fixtures.expected('crop-entropy.png'), data, done); assert.strictEqual(0, info.cropCalcLeft);
assert.strictEqual(80, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
}); });
}); });
}); });
describe('Attention strategy', function() {
it('JPEG', function(done) {
sharp(fixtures.inputJpgWithCmykProfile)
.resize(80, 320)
.crop(sharp.strategy.attention)
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height);
assert.strictEqual(250, info.cropCalcLeft);
assert.strictEqual(0, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.jpg'), data, done);
});
});
it('PNG', function(done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80)
.crop(sharp.strategy.attention)
.toBuffer(function(err, data, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropCalcLeft);
assert.strictEqual(80, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});
});
}); });