Switch to the libvips crop strategy implementations

This commit is contained in:
Lovell Fuller 2017-05-06 14:46:28 +01:00
parent 2f534dc01c
commit 36078f9903
8 changed files with 15 additions and 156 deletions

View File

@ -22,6 +22,9 @@ Requires libvips v8.5.2.
[#607](https://github.com/lovell/sharp/issues/607) [#607](https://github.com/lovell/sharp/issues/607)
[@puzrin](https://github.com/puzrin) [@puzrin](https://github.com/puzrin)
* Switch to the libvips implementation of "attention" and "entropy" crop strategies.
[#727](https://github.com/lovell/sharp/issues/727)
* Improve performance and accuracy of nearest neighbour integral upsampling. * Improve performance and accuracy of nearest neighbour integral upsampling.
[#752](https://github.com/lovell/sharp/issues/752) [#752](https://github.com/lovell/sharp/issues/752)
[@MrIbby](https://github.com/MrIbby) [@MrIbby](https://github.com/MrIbby)

View File

@ -267,118 +267,6 @@ namespace sharp {
} }
} }
/*
Calculate the Shannon entropy
*/
double EntropyStrategy::operator()(VImage image) {
return image.hist_find().hist_entropy();
}
/*
Calculate the intensity of edges, skin tone and saturation
*/
double AttentionStrategy::operator()(VImage image) {
// Flatten RGBA onto a mid-grey background
if (image.bands() == 4 && HasAlpha(image)) {
double const midgrey = sharp::Is16Bit(image.interpretation()) ? 32768.0 : 128.0;
std::vector<double> background { midgrey, midgrey, midgrey };
image = image.flatten(VImage::option()->set("background", background));
}
// Convert to LAB colourspace
VImage lab = image.colourspace(VIPS_INTERPRETATION_LAB);
VImage l = lab[0];
VImage a = lab[1];
VImage b = lab[2];
// Edge detect luminosity with the Sobel operator
VImage sobel = vips::VImage::new_matrixv(3, 3,
-1.0, 0.0, 1.0,
-2.0, 0.0, 2.0,
-1.0, 0.0, 1.0);
VImage edges = l.conv(sobel).abs() + l.conv(sobel.rot90()).abs();
// Skin tone chroma thresholds trained with http://humanae.tumblr.com/
VImage skin = (a >= 3) & (a <= 22) & (b >= 4) & (b <= 31);
// Chroma >~50% saturation
VImage lch = lab.colourspace(VIPS_INTERPRETATION_LCH);
VImage c = lch[1];
VImage saturation = c > 60;
// Find maximum in combined saliency mask
VImage mask = edges + skin + saturation;
return mask.max();
}
/*
Calculate crop area based on image entropy
*/
std::tuple<int, int> Crop(
VImage image, int const outWidth, int const outHeight, std::function<double(VImage)> strategy
) {
int left = 0;
int top = 0;
int const inWidth = image.width();
int const inHeight = image.height();
if (inWidth > outWidth) {
// Reduce width by repeated removing slices from edge with lowest score
int width = inWidth;
double leftScore = 0.0;
double rightScore = 0.0;
// Max width of each slice
int const maxSliceWidth = static_cast<int>(ceil((inWidth - outWidth) / 8.0));
while (width > outWidth) {
// Width of current slice
int const slice = std::min(width - outWidth, maxSliceWidth);
if (leftScore == 0.0) {
// Update score of left slice
leftScore = strategy(image.extract_area(left, 0, slice, inHeight));
}
if (rightScore == 0.0) {
// Update score of right slice
rightScore = strategy(image.extract_area(width - slice - 1, 0, slice, inHeight));
}
// Keep slice with highest score
if (leftScore >= rightScore) {
// Discard right slice
rightScore = 0.0;
} else {
// Discard left slice
leftScore = 0.0;
left = left + slice;
}
width = width - slice;
}
}
if (inHeight > outHeight) {
// Reduce height by repeated removing slices from edge with lowest score
int height = inHeight;
double topScore = 0.0;
double bottomScore = 0.0;
// Max height of each slice
int const maxSliceHeight = static_cast<int>(ceil((inHeight - outHeight) / 8.0));
while (height > outHeight) {
// Height of current slice
int const slice = std::min(height - outHeight, maxSliceHeight);
if (topScore == 0.0) {
// Update score of top slice
topScore = strategy(image.extract_area(0, top, inWidth, slice));
}
if (bottomScore == 0.0) {
// Update score of bottom slice
bottomScore = strategy(image.extract_area(0, height - slice - 1, inWidth, slice));
}
// Keep slice with highest score
if (topScore >= bottomScore) {
// Discard bottom slice
bottomScore = 0.0;
} else {
// Discard top slice
topScore = 0.0;
top = top + slice;
}
height = height - slice;
}
}
return std::make_tuple(left, top);
}
/* /*
Insert a tile cache to prevent over-computation of any previous operations in the pipeline Insert a tile cache to prevent over-computation of any previous operations in the pipeline
*/ */

View File

@ -72,22 +72,6 @@ namespace sharp {
*/ */
VImage Sharpen(VImage image, double const sigma, double const flat, double const jagged); VImage Sharpen(VImage image, double const sigma, double const flat, double const jagged);
/*
Crop strategy functors
*/
struct EntropyStrategy {
double operator()(VImage image);
};
struct AttentionStrategy {
double operator()(VImage image);
};
/*
Calculate crop area based on given strategy (Entropy, Attention)
*/
std::tuple<int, int> Crop(
VImage image, int const outWidth, int const outHeight, std::function<double(VImage)> strategy);
/* /*
Insert a tile cache to prevent over-computation of any previous operations in the pipeline Insert a tile cache to prevent over-computation of any previous operations in the pipeline
*/ */

View File

@ -509,24 +509,20 @@ class PipelineWorker : public Nan::AsyncWorker {
->set("background", background)); ->set("background", background));
} else if (baton->canvas != Canvas::IGNORE_ASPECT) { } else if (baton->canvas != Canvas::IGNORE_ASPECT) {
// Crop/max/min // Crop/max/min
int left;
int top;
if (baton->crop < 9) { if (baton->crop < 9) {
// Gravity-based crop // Gravity-based crop
int left;
int top;
std::tie(left, top) = sharp::CalculateCrop( std::tie(left, top) = sharp::CalculateCrop(
image.width(), image.height(), baton->width, baton->height, baton->crop); image.width(), image.height(), baton->width, baton->height, baton->crop);
} else if (baton->crop == 16) { int width = std::min(image.width(), baton->width);
// Entropy-based crop int height = std::min(image.height(), baton->height);
std::tie(left, top) = sharp::Crop(image, baton->width, baton->height, sharp::EntropyStrategy()); image = image.extract_area(left, top, width, height);
} else { } else {
// Attention-based crop // Attention-based or Entropy-based crop
std::tie(left, top) = sharp::Crop(image, baton->width, baton->height, sharp::AttentionStrategy()); image = image.smartcrop(baton->width, baton->height, VImage::option()
->set("interesting", baton->crop == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION));
} }
int width = std::min(image.width(), baton->width);
int height = std::min(image.height(), baton->height);
image = image.extract_area(left, top, width, height);
baton->cropCalcLeft = left;
baton->cropCalcTop = top;
} }
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.5 KiB

View File

@ -161,7 +161,7 @@ describe('Crop', function () {
describe('Entropy-based strategy', function () { describe('Entropy-based strategy', function () {
it('JPEG', function (done) { it('JPEG', function (done) {
sharp(fixtures.inputJpgWithCmykProfile) sharp(fixtures.inputJpg)
.resize(80, 320) .resize(80, 320)
.crop(sharp.strategy.entropy) .crop(sharp.strategy.entropy)
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
@ -170,9 +170,7 @@ describe('Crop', function () {
assert.strictEqual(3, info.channels); assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width); assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height); assert.strictEqual(320, info.height);
assert.strictEqual(250, info.cropCalcLeft); fixtures.assertSimilar(fixtures.expected('crop-strategy-entropy.jpg'), data, done);
assert.strictEqual(0, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.jpg'), data, done);
}); });
}); });
@ -186,8 +184,6 @@ describe('Crop', function () {
assert.strictEqual(4, info.channels); assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropCalcLeft);
assert.strictEqual(80, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done); fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
}); });
}); });
@ -202,8 +198,6 @@ describe('Crop', function () {
assert.strictEqual(4, info.channels); assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropCalcLeft);
assert.strictEqual(80, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done); fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
}); });
}); });
@ -211,7 +205,7 @@ describe('Crop', function () {
describe('Attention strategy', function () { describe('Attention strategy', function () {
it('JPEG', function (done) { it('JPEG', function (done) {
sharp(fixtures.inputJpgWithCmykProfile) sharp(fixtures.inputJpg)
.resize(80, 320) .resize(80, 320)
.crop(sharp.strategy.attention) .crop(sharp.strategy.attention)
.toBuffer(function (err, data, info) { .toBuffer(function (err, data, info) {
@ -220,9 +214,7 @@ describe('Crop', function () {
assert.strictEqual(3, info.channels); assert.strictEqual(3, info.channels);
assert.strictEqual(80, info.width); assert.strictEqual(80, info.width);
assert.strictEqual(320, info.height); assert.strictEqual(320, info.height);
assert.strictEqual(250, info.cropCalcLeft); fixtures.assertSimilar(fixtures.expected('crop-strategy-attention.jpg'), data, done);
assert.strictEqual(0, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.jpg'), data, done);
}); });
}); });
@ -236,8 +228,6 @@ describe('Crop', function () {
assert.strictEqual(4, info.channels); assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropCalcLeft);
assert.strictEqual(80, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done); fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
}); });
}); });
@ -252,8 +242,6 @@ describe('Crop', function () {
assert.strictEqual(4, info.channels); assert.strictEqual(4, info.channels);
assert.strictEqual(320, info.width); assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height); assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropCalcLeft);
assert.strictEqual(80, info.cropCalcTop);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done); fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
}); });
}); });