mirror of
https://github.com/lovell/sharp.git
synced 2026-02-09 08:06:14 +01:00
Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1051fcd278 | ||
|
|
1a0030e086 | ||
|
|
114ce370ed | ||
|
|
207dcbeaa4 | ||
|
|
d4a1722863 | ||
|
|
18b9991fe7 | ||
|
|
739178dd74 | ||
|
|
dcd1392a85 | ||
|
|
07d66da57b | ||
|
|
28ce33feb3 | ||
|
|
86039a3f2b | ||
|
|
af9d09f8ae |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -4,6 +4,10 @@ coverage
|
||||
test/bench/node_modules
|
||||
test/fixtures/output*
|
||||
test/leak/libvips.supp
|
||||
test/saliency/report.json
|
||||
test/saliency/Image*
|
||||
test/saliency/[Uu]serData*
|
||||
!test/saliency/userData.js
|
||||
lib
|
||||
include
|
||||
packaging/libvips*
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
node_modules
|
||||
test/bench/node_modules
|
||||
test/saliency/humanae/node_modules
|
||||
coverage
|
||||
|
||||
41
README.md
41
README.md
@@ -1,5 +1,9 @@
|
||||
# sharp
|
||||
|
||||
```sh
|
||||
npm install sharp
|
||||
```
|
||||
|
||||
The typical use case for this high speed Node.js module
|
||||
is to convert large images in common formats to
|
||||
smaller, web-friendly JPEG, PNG and WebP images of varying dimensions.
|
||||
@@ -16,6 +20,43 @@ rotation, extraction, compositing and gamma correction are available.
|
||||
OS X, Windows (x64), Linux (x64, ARM) systems do not require
|
||||
the installation of any external runtime dependencies.
|
||||
|
||||
## Examples
|
||||
|
||||
```javascript
|
||||
import sharp from 'sharp';
|
||||
```
|
||||
|
||||
```javascript
|
||||
sharp(inputBuffer)
|
||||
.resize(320, 240)
|
||||
.toFile('output.webp', (err, info) => ... );
|
||||
```
|
||||
|
||||
```javascript
|
||||
sharp('input.jpg')
|
||||
.rotate()
|
||||
.resize(200)
|
||||
.toBuffer()
|
||||
.then( data => ... )
|
||||
.catch( err => ... );
|
||||
```
|
||||
|
||||
```javascript
|
||||
const roundedCorners = new Buffer(
|
||||
'<svg><rect x="0" y="0" width="200" height="200" rx="50" ry="50"/></svg>'
|
||||
);
|
||||
|
||||
const roundedCornerResizer =
|
||||
sharp()
|
||||
.resize(200, 200)
|
||||
.overlayWith(roundedCorners, { cutout: true })
|
||||
.png();
|
||||
|
||||
readableStream
|
||||
.pipe(roundedCornerResizer)
|
||||
.pipe(writableStream);
|
||||
```
|
||||
|
||||
[](https://coveralls.io/r/lovell/sharp?branch=master)
|
||||
|
||||
### Documentation
|
||||
|
||||
17
binding.gyp
17
binding.gyp
@@ -51,7 +51,6 @@
|
||||
],
|
||||
# Nested variables "pattern" borrowed from http://src.chromium.org/viewvc/chrome/trunk/src/build/common.gypi
|
||||
'variables': {
|
||||
'sharp_cxx11%': '0',
|
||||
'variables': {
|
||||
'variables': {
|
||||
'conditions': [
|
||||
@@ -92,10 +91,6 @@
|
||||
'src/sharp.cc',
|
||||
'src/utilities.cc'
|
||||
],
|
||||
'defines': [
|
||||
'_GLIBCXX_USE_CXX11_ABI=<(sharp_cxx11)',
|
||||
'_ALLOW_KEYWORD_MACROS'
|
||||
],
|
||||
'include_dirs': [
|
||||
'<!(node -e "require(\'nan\')")'
|
||||
],
|
||||
@@ -108,6 +103,12 @@
|
||||
'libraries': ['<!@(PKG_CONFIG_PATH="<(pkg_config_path)" pkg-config --libs --static vips-cpp)']
|
||||
}, {
|
||||
'libraries': ['<!@(PKG_CONFIG_PATH="<(pkg_config_path)" pkg-config --libs vips-cpp)']
|
||||
}],
|
||||
['OS == "linux"', {
|
||||
'defines': [
|
||||
# Inspect libvips-cpp.so to determine which C++11 ABI version was used and set _GLIBCXX_USE_CXX11_ABI accordingly. This is quite horrible.
|
||||
'_GLIBCXX_USE_CXX11_ABI=<!(if readelf -Ws "$(PKG_CONFIG_PATH="<(pkg_config_path)" pkg-config --libs-only-L vips-cpp | cut -c 3- | sed -e "s/^$/\/usr\/lib/")/libvips-cpp.so" | c++filt | grep -qF __cxx11;then echo "1";else echo "0";fi)'
|
||||
]
|
||||
}]
|
||||
]
|
||||
}, {
|
||||
@@ -119,6 +120,9 @@
|
||||
],
|
||||
'conditions': [
|
||||
['OS == "win"', {
|
||||
'defines': [
|
||||
'_ALLOW_KEYWORD_MACROS'
|
||||
],
|
||||
'libraries': [
|
||||
'../lib/libvips.lib',
|
||||
'../lib/libglib-2.0.lib',
|
||||
@@ -142,6 +146,9 @@
|
||||
'variables': {
|
||||
'download_vips': '<!(LDD_VERSION="<!(ldd --version 2>&1 || true)" node -e "require(\'./binding\').download_vips()")'
|
||||
},
|
||||
'defines': [
|
||||
'_GLIBCXX_USE_CXX11_ABI=0'
|
||||
],
|
||||
'libraries': [
|
||||
'../lib/libvips-cpp.so',
|
||||
'../lib/libvips.so',
|
||||
|
||||
18
binding.js
18
binding.js
@@ -17,6 +17,16 @@ var minimumLibvipsVersion = process.env.npm_package_config_libvips || require('.
|
||||
|
||||
var vipsHeaderPath = path.join(__dirname, 'include', 'vips', 'vips.h');
|
||||
|
||||
var platform = process.env.npm_config_platform || process.platform;
|
||||
|
||||
var arch = process.env.npm_config_arch || process.arch;
|
||||
var arm_version = process.env.npm_config_armv || process.config.variables.arm_version;
|
||||
|
||||
if (arch === 'arch64' || arch === 'armhf') {
|
||||
arch = 'arm';
|
||||
if (arch === 'arch64') arm_version = '8';
|
||||
}
|
||||
|
||||
// -- Helpers
|
||||
|
||||
// Does this file exist?
|
||||
@@ -47,9 +57,9 @@ var unpack = function(tarPath, done) {
|
||||
};
|
||||
|
||||
var platformId = function() {
|
||||
var id = [process.platform, process.arch].join('-');
|
||||
if (process.arch === 'arm') {
|
||||
switch(process.config.variables.arm_version) {
|
||||
var id = [platform, arch].join('-');
|
||||
if (arch === 'arm') {
|
||||
switch(arm_version) {
|
||||
case '8':
|
||||
id = id + 'v8';
|
||||
break;
|
||||
@@ -79,7 +89,7 @@ module.exports.download_vips = function() {
|
||||
// Has vips been installed locally?
|
||||
if (!isFile(vipsHeaderPath)) {
|
||||
// Ensure Intel 64-bit or ARM
|
||||
if (process.arch === 'ia32') {
|
||||
if (arch === 'ia32') {
|
||||
error('Intel Architecture 32-bit systems require manual installation - please see http://sharp.dimens.io/en/stable/install/');
|
||||
}
|
||||
// Ensure glibc >= 2.15
|
||||
|
||||
@@ -175,12 +175,11 @@ Possible attributes of `sharp.gravity` are
|
||||
`north`, `northeast`, `east`, `southeast`, `south`,
|
||||
`southwest`, `west`, `northwest`, `center` and `centre`.
|
||||
|
||||
Possible attributes of the experimental `sharp.strategy` are:
|
||||
The experimental strategy-based approach resizes so one dimension is at its target length
|
||||
then repeatedly ranks edge regions, discarding the edge with the lowest score based on the selected strategy.
|
||||
|
||||
* `entropy`: resize so one dimension is at its target size
|
||||
then repeatedly remove pixels from the edge with the lowest
|
||||
[Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29)
|
||||
until it too reaches the target size.
|
||||
* `entropy`: focus on the region with the highest [Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29).
|
||||
* `attention`: focus on the region with the highest luminance frequency, colour saturation and presence of skin tones.
|
||||
|
||||
The default crop option is a `center`/`centre` gravity.
|
||||
|
||||
|
||||
@@ -4,6 +4,31 @@
|
||||
|
||||
Requires libvips v8.3.3
|
||||
|
||||
#### v0.16.2 - 22<sup>nd</sup> October 2016
|
||||
|
||||
* Restrict readelf usage to Linux only when detecting global libvips version.
|
||||
[#602](https://github.com/lovell/sharp/issues/602)
|
||||
[@caoko](https://github.com/caoko)
|
||||
|
||||
#### v0.16.1 - 13<sup>th</sup> October 2016
|
||||
|
||||
* C++11 ABI version is now auto-detected, remove sharp-cxx11 installation flag.
|
||||
|
||||
* Add experimental 'attention' crop strategy.
|
||||
[#295](https://github.com/lovell/sharp/issues/295)
|
||||
|
||||
* Include .node extension for Meteor's require() implementation.
|
||||
[#537](https://github.com/lovell/sharp/issues/537)
|
||||
[@isjackwild](https://github.com/isjackwild)
|
||||
|
||||
* Ensure convolution kernel scale is clamped to a minimum value of 1.
|
||||
[#561](https://github.com/lovell/sharp/issues/561)
|
||||
[@abagshaw](https://github.com/abagshaw)
|
||||
|
||||
* Correct calculation of y-axis placement when overlaying image at a fixed point.
|
||||
[#566](https://github.com/lovell/sharp/issues/566)
|
||||
[@Nateowami](https://github.com/Nateowami)
|
||||
|
||||
#### v0.16.0 - 18<sup>th</sup> August 2016
|
||||
|
||||
* Add pre-compiled libvips for OS X, ARMv7 and ARMv8.
|
||||
|
||||
@@ -22,24 +22,15 @@ Most recent Linux-based operating systems with glibc running on x64 and ARMv6+ C
|
||||
* Debian 7, 8
|
||||
* Ubuntu 12.04, 14.04, 16.04
|
||||
* Centos 7
|
||||
* Fedora 22, 23
|
||||
* Fedora 23, 24
|
||||
* openSUSE 13.2
|
||||
* Archlinux
|
||||
* Raspbian Jessie
|
||||
* Amazon Linux 2016.03
|
||||
* Amazon Linux 2016.03, 2016.09
|
||||
|
||||
To use your own version of libvips instead of the provided binaries, make sure it is
|
||||
at least the version listed under `config.libvips` in the `package.json` file and
|
||||
that it can be located using `pkg-config --modversion vips-cpp`.
|
||||
|
||||
There are [changes in the C++11 ABI](https://gcc.gnu.org/onlinedocs/libstdc++/manual/using_dual_abi.html)
|
||||
when using v5.1+ of the `g++` compiler.
|
||||
If you have installed `libvips-dev` via package manager on an OS such as Debian testing/unstable,
|
||||
you can pass the required value of the `_GLIBCXX_USE_CXX11_ABI` macro using the `--sharp-cxx11` flag.
|
||||
|
||||
```sh
|
||||
npm install --sharp-cxx11=1
|
||||
```
|
||||
To use a globally-installed version of libvips instead of the provided binaries,
|
||||
make sure it is at least the version listed under `config.libvips` in the `package.json` file
|
||||
and that it can be located using `pkg-config --modversion vips-cpp`.
|
||||
|
||||
If you are using non-stadard paths (anything other than `/usr` or `/usr/local`),
|
||||
you might need to set `PKG_CONFIG_PATH` during `npm install`
|
||||
|
||||
30
index.js
30
index.js
@@ -9,7 +9,7 @@ var semver = require('semver');
|
||||
var color = require('color');
|
||||
var BluebirdPromise = require('bluebird');
|
||||
|
||||
var sharp = require('./build/Release/sharp');
|
||||
var sharp = require('./build/Release/sharp.node');
|
||||
|
||||
// Versioning
|
||||
var versions = {
|
||||
@@ -260,7 +260,8 @@ module.exports.gravity = {
|
||||
|
||||
// Strategies for automagic behaviour
|
||||
module.exports.strategy = {
|
||||
entropy: 16
|
||||
entropy: 16,
|
||||
attention: 17
|
||||
};
|
||||
|
||||
/*
|
||||
@@ -277,7 +278,7 @@ Sharp.prototype.crop = function(crop) {
|
||||
} else if (isString(crop) && isInteger(module.exports.gravity[crop])) {
|
||||
// Gravity (string)
|
||||
this.options.crop = module.exports.gravity[crop];
|
||||
} else if (isInteger(crop) && crop === module.exports.strategy.entropy) {
|
||||
} else if (isInteger(crop) && crop >= module.exports.strategy.entropy) {
|
||||
// Strategy
|
||||
this.options.crop = crop;
|
||||
} else {
|
||||
@@ -503,22 +504,25 @@ Sharp.prototype.blur = function(sigma) {
|
||||
Convolve the image with a kernel.
|
||||
*/
|
||||
Sharp.prototype.convolve = function(kernel) {
|
||||
if (!isDefined(kernel) || !isDefined(kernel.kernel) ||
|
||||
!isDefined(kernel.width) || !isDefined(kernel.height) ||
|
||||
!inRange(kernel.width,3,1001) || !inRange(kernel.height,3,1001) ||
|
||||
if (!isObject(kernel) || !Array.isArray(kernel.kernel) ||
|
||||
!isInteger(kernel.width) || !isInteger(kernel.height) ||
|
||||
!inRange(kernel.width, 3, 1001) || !inRange(kernel.height, 3, 1001) ||
|
||||
kernel.height * kernel.width != kernel.kernel.length
|
||||
) {
|
||||
// must pass in a kernel
|
||||
throw new Error('Invalid convolution kernel');
|
||||
}
|
||||
if(!isDefined(kernel.scale)) {
|
||||
var sum = 0;
|
||||
kernel.kernel.forEach(function(e) {
|
||||
sum += e;
|
||||
});
|
||||
kernel.scale = sum;
|
||||
// Default scale is sum of kernel values
|
||||
if (!isInteger(kernel.scale)) {
|
||||
kernel.scale = kernel.kernel.reduce(function(a, b) {
|
||||
return a + b;
|
||||
}, 0);
|
||||
}
|
||||
if(!isDefined(kernel.offset)) {
|
||||
// Clamp scale to a minimum value of 1
|
||||
if (kernel.scale < 1) {
|
||||
kernel.scale = 1;
|
||||
}
|
||||
if (!isInteger(kernel.offset)) {
|
||||
kernel.offset = 0;
|
||||
}
|
||||
this.options.convKernel = kernel;
|
||||
|
||||
14
package.json
14
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "sharp",
|
||||
"version": "0.16.0",
|
||||
"version": "0.16.2",
|
||||
"author": "Lovell Fuller <npm@lovell.info>",
|
||||
"contributors": [
|
||||
"Pierre Inglebert <pierre.inglebert@gmail.com>",
|
||||
@@ -59,21 +59,21 @@
|
||||
"vips"
|
||||
],
|
||||
"dependencies": {
|
||||
"bluebird": "^3.4.1",
|
||||
"bluebird": "^3.4.6",
|
||||
"color": "^0.11.3",
|
||||
"nan": "^2.4.0",
|
||||
"semver": "^5.3.0",
|
||||
"request": "^2.74.0",
|
||||
"request": "^2.75.0",
|
||||
"tar": "^2.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"async": "^2.0.1",
|
||||
"async": "^2.1.2",
|
||||
"bufferutil": "^1.2.1",
|
||||
"coveralls": "^2.11.12",
|
||||
"coveralls": "^2.11.14",
|
||||
"exif-reader": "^1.0.1",
|
||||
"icc": "^0.0.2",
|
||||
"istanbul": "^0.4.4",
|
||||
"mocha": "^3.0.0",
|
||||
"istanbul": "^0.4.5",
|
||||
"mocha": "^3.1.2",
|
||||
"mocha-jshint": "^2.3.1",
|
||||
"node-cpplint": "^0.4.0",
|
||||
"rimraf": "^2.5.4",
|
||||
|
||||
@@ -24,7 +24,7 @@ VERSION_GSF=1.14.39
|
||||
VERSION_EXIF=0.6.21
|
||||
VERSION_LCMS2=2.8
|
||||
VERSION_JPEG=1.5.0
|
||||
VERSION_PNG16=1.6.23
|
||||
VERSION_PNG16=1.6.25
|
||||
VERSION_WEBP=0.5.1
|
||||
VERSION_TIFF=4.0.6
|
||||
VERSION_ORC=0.4.25
|
||||
|
||||
@@ -391,7 +391,7 @@ namespace sharp {
|
||||
|
||||
if(y >= 0 && y < (inHeight - outHeight)) {
|
||||
top = y;
|
||||
} else if(x >= (inHeight - outHeight)) {
|
||||
} else if(y >= (inHeight - outHeight)) {
|
||||
top = inHeight - outHeight;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#include <algorithm>
|
||||
#include <tuple>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <tuple>
|
||||
#include <vips/vips8>
|
||||
|
||||
#include "common.h"
|
||||
@@ -289,69 +290,104 @@ namespace sharp {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate the Shannon entropy
|
||||
*/
|
||||
double EntropyStrategy::operator()(VImage image) {
|
||||
return image.hist_find().hist_entropy();
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate the intensity of edges, skin tone and saturation
|
||||
*/
|
||||
double AttentionStrategy::operator()(VImage image) {
|
||||
// Convert to LAB colourspace
|
||||
VImage lab = image.colourspace(VIPS_INTERPRETATION_LAB);
|
||||
VImage l = lab[0];
|
||||
VImage a = lab[1];
|
||||
VImage b = lab[2];
|
||||
// Edge detect luminosity with the Sobel operator
|
||||
VImage sobel = vips::VImage::new_matrixv(3, 3,
|
||||
-1.0, 0.0, 1.0,
|
||||
-2.0, 0.0, 2.0,
|
||||
-1.0, 0.0, 1.0);
|
||||
VImage edges = l.conv(sobel).abs() + l.conv(sobel.rot90()).abs();
|
||||
// Skin tone chroma thresholds trained with http://humanae.tumblr.com/
|
||||
VImage skin = (a >= 3) & (a <= 22) & (b >= 4) & (b <= 31);
|
||||
// Chroma >~50% saturation
|
||||
VImage lch = lab.colourspace(VIPS_INTERPRETATION_LCH);
|
||||
VImage c = lch[1];
|
||||
VImage saturation = c > 60;
|
||||
// Find maximum in combined saliency mask
|
||||
VImage mask = edges + skin + saturation;
|
||||
return mask.max();
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate crop area based on image entropy
|
||||
*/
|
||||
std::tuple<int, int> EntropyCrop(VImage image, int const outWidth, int const outHeight) {
|
||||
std::tuple<int, int> Crop(
|
||||
VImage image, int const outWidth, int const outHeight, std::function<double(VImage)> strategy
|
||||
) {
|
||||
int left = 0;
|
||||
int top = 0;
|
||||
int const inWidth = image.width();
|
||||
int const inHeight = image.height();
|
||||
if (inWidth > outWidth) {
|
||||
// Reduce width by repeated removing slices from edge with lowest entropy
|
||||
// Reduce width by repeated removing slices from edge with lowest score
|
||||
int width = inWidth;
|
||||
double leftEntropy = 0.0;
|
||||
double rightEntropy = 0.0;
|
||||
double leftScore = 0.0;
|
||||
double rightScore = 0.0;
|
||||
// Max width of each slice
|
||||
int const maxSliceWidth = static_cast<int>(ceil((inWidth - outWidth) / 8.0));
|
||||
while (width > outWidth) {
|
||||
// Width of current slice
|
||||
int const slice = std::min(width - outWidth, maxSliceWidth);
|
||||
if (leftEntropy == 0.0) {
|
||||
// Update entropy of left slice
|
||||
leftEntropy = Entropy(image.extract_area(left, 0, slice, inHeight));
|
||||
if (leftScore == 0.0) {
|
||||
// Update score of left slice
|
||||
leftScore = strategy(image.extract_area(left, 0, slice, inHeight));
|
||||
}
|
||||
if (rightEntropy == 0.0) {
|
||||
// Update entropy of right slice
|
||||
rightEntropy = Entropy(image.extract_area(width - slice - 1, 0, slice, inHeight));
|
||||
if (rightScore == 0.0) {
|
||||
// Update score of right slice
|
||||
rightScore = strategy(image.extract_area(width - slice - 1, 0, slice, inHeight));
|
||||
}
|
||||
// Keep slice with highest entropy
|
||||
if (leftEntropy >= rightEntropy) {
|
||||
// Keep slice with highest score
|
||||
if (leftScore >= rightScore) {
|
||||
// Discard right slice
|
||||
rightEntropy = 0.0;
|
||||
rightScore = 0.0;
|
||||
} else {
|
||||
// Discard left slice
|
||||
leftEntropy = 0.0;
|
||||
leftScore = 0.0;
|
||||
left = left + slice;
|
||||
}
|
||||
width = width - slice;
|
||||
}
|
||||
}
|
||||
if (inHeight > outHeight) {
|
||||
// Reduce height by repeated removing slices from edge with lowest entropy
|
||||
// Reduce height by repeated removing slices from edge with lowest score
|
||||
int height = inHeight;
|
||||
double topEntropy = 0.0;
|
||||
double bottomEntropy = 0.0;
|
||||
double topScore = 0.0;
|
||||
double bottomScore = 0.0;
|
||||
// Max height of each slice
|
||||
int const maxSliceHeight = static_cast<int>(ceil((inHeight - outHeight) / 8.0));
|
||||
while (height > outHeight) {
|
||||
// Height of current slice
|
||||
int const slice = std::min(height - outHeight, maxSliceHeight);
|
||||
if (topEntropy == 0.0) {
|
||||
// Update entropy of top slice
|
||||
topEntropy = Entropy(image.extract_area(0, top, inWidth, slice));
|
||||
if (topScore == 0.0) {
|
||||
// Update score of top slice
|
||||
topScore = strategy(image.extract_area(0, top, inWidth, slice));
|
||||
}
|
||||
if (bottomEntropy == 0.0) {
|
||||
// Update entropy of bottom slice
|
||||
bottomEntropy = Entropy(image.extract_area(0, height - slice - 1, inWidth, slice));
|
||||
if (bottomScore == 0.0) {
|
||||
// Update score of bottom slice
|
||||
bottomScore = strategy(image.extract_area(0, height - slice - 1, inWidth, slice));
|
||||
}
|
||||
// Keep slice with highest entropy
|
||||
if (topEntropy >= bottomEntropy) {
|
||||
// Keep slice with highest score
|
||||
if (topScore >= bottomScore) {
|
||||
// Discard bottom slice
|
||||
bottomEntropy = 0.0;
|
||||
bottomScore = 0.0;
|
||||
} else {
|
||||
// Discard top slice
|
||||
topEntropy = 0.0;
|
||||
topScore = 0.0;
|
||||
top = top + slice;
|
||||
}
|
||||
height = height - slice;
|
||||
@@ -360,13 +396,6 @@ namespace sharp {
|
||||
return std::make_tuple(left, top);
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate the Shannon entropy for an image
|
||||
*/
|
||||
double Entropy(VImage image) {
|
||||
return image.hist_find().hist_entropy();
|
||||
}
|
||||
|
||||
/*
|
||||
Insert a tile cache to prevent over-computation of any previous operations in the pipeline
|
||||
*/
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
#ifndef SRC_OPERATIONS_H_
|
||||
#define SRC_OPERATIONS_H_
|
||||
|
||||
#include <tuple>
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <tuple>
|
||||
#include <vips/vips8>
|
||||
|
||||
using vips::VImage;
|
||||
@@ -63,14 +65,21 @@ namespace sharp {
|
||||
VImage Sharpen(VImage image, double const sigma, double const flat, double const jagged);
|
||||
|
||||
/*
|
||||
Calculate crop area based on image entropy
|
||||
Crop strategy functors
|
||||
*/
|
||||
std::tuple<int, int> EntropyCrop(VImage image, int const outWidth, int const outHeight);
|
||||
struct EntropyStrategy {
|
||||
double operator()(VImage image);
|
||||
};
|
||||
struct AttentionStrategy {
|
||||
double operator()(VImage image);
|
||||
};
|
||||
|
||||
/*
|
||||
Calculate the Shannon entropy for an image
|
||||
Calculate crop area based on given strategy (Entropy, Attention)
|
||||
*/
|
||||
double Entropy(VImage image);
|
||||
std::tuple<int, int> Crop(
|
||||
VImage image, int const outWidth, int const outHeight, std::function<double(VImage)> strategy
|
||||
);
|
||||
|
||||
/*
|
||||
Insert a tile cache to prevent over-computation of any previous operations in the pipeline
|
||||
|
||||
@@ -488,13 +488,18 @@ class PipelineWorker : public Nan::AsyncWorker {
|
||||
std::tie(left, top) = sharp::CalculateCrop(
|
||||
image.width(), image.height(), baton->width, baton->height, baton->crop
|
||||
);
|
||||
} else {
|
||||
} else if (baton->crop == 16) {
|
||||
// Entropy-based crop
|
||||
std::tie(left, top) = sharp::EntropyCrop(image, baton->width, baton->height);
|
||||
std::tie(left, top) = sharp::Crop(image, baton->width, baton->height, sharp::EntropyStrategy());
|
||||
} else {
|
||||
// Attention-based crop
|
||||
std::tie(left, top) = sharp::Crop(image, baton->width, baton->height, sharp::AttentionStrategy());
|
||||
}
|
||||
int width = std::min(image.width(), baton->width);
|
||||
int height = std::min(image.height(), baton->height);
|
||||
image = image.extract_area(left, top, width, height);
|
||||
baton->cropCalcLeft = left;
|
||||
baton->cropCalcTop = top;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -890,6 +895,10 @@ class PipelineWorker : public Nan::AsyncWorker {
|
||||
Set(info, New("width").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(width)));
|
||||
Set(info, New("height").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(height)));
|
||||
Set(info, New("channels").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(baton->channels)));
|
||||
if (baton->cropCalcLeft != -1 && baton->cropCalcLeft != -1) {
|
||||
Set(info, New("cropCalcLeft").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(baton->cropCalcLeft)));
|
||||
Set(info, New("cropCalcTop").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(baton->cropCalcTop)));
|
||||
}
|
||||
|
||||
if (baton->bufferOutLength > 0) {
|
||||
// Pass ownership of output data to Buffer instance
|
||||
|
||||
@@ -46,6 +46,8 @@ struct PipelineBaton {
|
||||
int channels;
|
||||
Canvas canvas;
|
||||
int crop;
|
||||
int cropCalcLeft;
|
||||
int cropCalcTop;
|
||||
std::string kernel;
|
||||
std::string interpolator;
|
||||
double background[4];
|
||||
@@ -112,6 +114,8 @@ struct PipelineBaton {
|
||||
channels(0),
|
||||
canvas(Canvas::CROP),
|
||||
crop(0),
|
||||
cropCalcLeft(-1),
|
||||
cropCalcTop(-1),
|
||||
flatten(false),
|
||||
negate(false),
|
||||
blurSigma(0.0),
|
||||
|
||||
@@ -8,11 +8,11 @@
|
||||
"test": "VIPS_WARNING=0 node perf && node random && node parallel"
|
||||
},
|
||||
"devDependencies": {
|
||||
"async": "^2.0.1",
|
||||
"async": "^2.1.1",
|
||||
"benchmark": "^2.1.1",
|
||||
"gm": "^1.23.0",
|
||||
"imagemagick": "^0.1.3",
|
||||
"imagemagick-native": "^1.9.2",
|
||||
"imagemagick-native": "^1.9.3",
|
||||
"jimp": "^0.2.27",
|
||||
"lwip": "^0.0.9",
|
||||
"semver": "^5.3.0"
|
||||
|
||||
@@ -519,6 +519,36 @@ async.series({
|
||||
}
|
||||
});
|
||||
}
|
||||
}).add('sharp-crop-entropy', {
|
||||
defer: true,
|
||||
fn: function(deferred) {
|
||||
sharp(inputJpgBuffer)
|
||||
.resize(width, height)
|
||||
.crop(sharp.strategy.entropy)
|
||||
.toBuffer(function(err, buffer) {
|
||||
if (err) {
|
||||
throw err;
|
||||
} else {
|
||||
assert.notStrictEqual(null, buffer);
|
||||
deferred.resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
}).add('sharp-crop-attention', {
|
||||
defer: true,
|
||||
fn: function(deferred) {
|
||||
sharp(inputJpgBuffer)
|
||||
.resize(width, height)
|
||||
.crop(sharp.strategy.attention)
|
||||
.toBuffer(function(err, buffer) {
|
||||
if (err) {
|
||||
throw err;
|
||||
} else {
|
||||
assert.notStrictEqual(null, buffer);
|
||||
deferred.resolve();
|
||||
}
|
||||
});
|
||||
}
|
||||
}).on('cycle', function(event) {
|
||||
console.log('operations ' + String(event.target));
|
||||
}).on('complete', function() {
|
||||
|
||||
BIN
test/fixtures/expected/conv-sobel-horizontal.jpg
vendored
Normal file
BIN
test/fixtures/expected/conv-sobel-horizontal.jpg
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 19 KiB |
|
Before Width: | Height: | Size: 8.5 KiB After Width: | Height: | Size: 8.5 KiB |
|
Before Width: | Height: | Size: 6.0 KiB After Width: | Height: | Size: 6.0 KiB |
BIN
test/fixtures/expected/overlay-bottom-edges-meet.jpg
vendored
Normal file
BIN
test/fixtures/expected/overlay-bottom-edges-meet.jpg
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.8 KiB |
16
test/saliency/README.md
Normal file
16
test/saliency/README.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# Crop strategy accuracy
|
||||
|
||||
1. Download the [MSRA Salient Object Database](http://research.microsoft.com/en-us/um/people/jiansun/SalientObject/salient_object.htm) (101MB).
|
||||
2. Extract each image and its median human-labelled salient region.
|
||||
3. Generate a test report of percentage deviance of top and left edges for each crop strategy, plus a naive centre gravity crop as "control".
|
||||
|
||||
```sh
|
||||
git clone https://github.com/lovell/sharp.git
|
||||
cd sharp/test/saliency
|
||||
./download.sh
|
||||
node report.js
|
||||
python -m SimpleHTTPServer
|
||||
```
|
||||
|
||||
The test report will then be available at
|
||||
http://localhost:8000/report.html
|
||||
25
test/saliency/download.sh
Executable file
25
test/saliency/download.sh
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Fetch and parse the MSRA Salient Object Database 'Image set B'
|
||||
# http://research.microsoft.com/en-us/um/people/jiansun/salientobject/salient_object.htm
|
||||
|
||||
if [ ! -d Image ]; then
|
||||
if [ ! -f ImageB.zip ]; then
|
||||
echo "Downloading 5000 images (101MB)"
|
||||
curl -O http://research.microsoft.com/en-us/um/people/jiansun/salientobject/ImageSetB/ImageB.zip
|
||||
fi
|
||||
unzip ImageB.zip
|
||||
fi
|
||||
|
||||
if [ ! -d UserData ]; then
|
||||
if [ ! -f UserDataB.zip ]; then
|
||||
echo "Downloading human-labelled regions"
|
||||
curl -O http://research.microsoft.com/en-us/um/people/jiansun/salientobject/ImageSetB/UserDataB.zip
|
||||
fi
|
||||
unzip UserDataB.zip
|
||||
fi
|
||||
|
||||
if [ ! -f userData.json ]; then
|
||||
echo "Processing human-labelled regions"
|
||||
node userData.js
|
||||
fi
|
||||
39
test/saliency/humanae/download.js
Normal file
39
test/saliency/humanae/download.js
Normal file
@@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
/*jshint esversion: 6 */
|
||||
|
||||
const fs = require('fs');
|
||||
const request = require('request');
|
||||
const tumblr = require('tumblr.js');
|
||||
|
||||
const client = tumblr.createClient({
|
||||
consumer_key: '***',
|
||||
consumer_secret: '***'
|
||||
});
|
||||
|
||||
const fetchImages = function(offset) {
|
||||
console.log(`Fetching offset ${offset}`);
|
||||
client.posts('humanae', {
|
||||
type: 'photo',
|
||||
offset: offset
|
||||
}, function (err, response) {
|
||||
if (err) throw err;
|
||||
if (response.posts.length > 0) {
|
||||
response.posts.forEach((post) => {
|
||||
const url = post.photos[0].alt_sizes
|
||||
.filter((image) => image.width === 100)
|
||||
.map((image) => image.url)
|
||||
[0];
|
||||
const filename = `./images/${post.id}.jpg`;
|
||||
try {
|
||||
fs.statSync(filename);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
request(url).pipe(fs.createWriteStream(filename));
|
||||
}
|
||||
}
|
||||
});
|
||||
fetchImages(offset + 20);
|
||||
}
|
||||
});
|
||||
};
|
||||
fetchImages(0);
|
||||
9
test/saliency/humanae/package.json
Normal file
9
test/saliency/humanae/package.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "sharp-crop-strategy-attention-model-humanae",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"request": "^2.75.0",
|
||||
"tumblr.js": "^1.1.1"
|
||||
}
|
||||
}
|
||||
34
test/saliency/humanae/tone.js
Normal file
34
test/saliency/humanae/tone.js
Normal file
@@ -0,0 +1,34 @@
|
||||
'use strict';
|
||||
/*jshint esversion: 6 */
|
||||
|
||||
const fs = require('fs');
|
||||
const child_process = require('child_process');
|
||||
|
||||
const a = [];
|
||||
const b = [];
|
||||
|
||||
fs.readdirSync('./images')
|
||||
.filter((file) => file.endsWith('.jpg'))
|
||||
.forEach((file) => {
|
||||
// Extract one pixel, avoiding first DCT block, and return value of A and B channels
|
||||
const command = `convert ./images/${file}[1x1+8+8] -colorspace lab -format "%[fx:u.g] %[fx:u.b]" info:`;
|
||||
const result = child_process.execSync(command, { encoding: 'utf8' });
|
||||
const ab = result.split(' ');
|
||||
a.push(ab[0]);
|
||||
b.push(ab[1]);
|
||||
});
|
||||
|
||||
a.sort((v1, v2) => v1 - v2);
|
||||
b.sort((v1, v2) => v1 - v2);
|
||||
|
||||
// Convert from 0..1 to -128..128
|
||||
const convert = function(v) {
|
||||
return Math.round(256 * (v - 0.5));
|
||||
};
|
||||
|
||||
const threshold = Math.round(a.length / 100);
|
||||
console.log(`Trimming lowest/highest ${threshold} for 98th percentile`);
|
||||
|
||||
// Ignore ~2% outliers
|
||||
console.log(`a ${convert(a[threshold])} - ${convert(a[a.length - threshold])}`);
|
||||
console.log(`b ${convert(b[threshold])} - ${convert(b[b.length - threshold])}`);
|
||||
25
test/saliency/report.html
Normal file
25
test/saliency/report.html
Normal file
@@ -0,0 +1,25 @@
|
||||
<html>
|
||||
<head>
|
||||
<link href="https://cdnjs.cloudflare.com/ajax/libs/metrics-graphics/2.10.1/metricsgraphics.min.css" rel="stylesheet" type="text/css">
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/4.2.6/d3.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/metrics-graphics/2.10.1/metricsgraphics.min.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="accuracy"></div>
|
||||
<script>
|
||||
d3.json('report.json', function(err, data) {
|
||||
MG.data_graphic({
|
||||
title: 'Crop accuracy',
|
||||
data: data,
|
||||
target: '#accuracy',
|
||||
width: 960,
|
||||
height: 600,
|
||||
x_accessor: 'accuracy',
|
||||
x_label: '% Accuracy',
|
||||
y_accessor: ['centre', 'entropy', 'attention'],
|
||||
legend: ['Centre', 'Entropy', 'Attention']
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
69
test/saliency/report.js
Normal file
69
test/saliency/report.js
Normal file
@@ -0,0 +1,69 @@
|
||||
'use strict';
|
||||
/*jshint esversion: 6 */
|
||||
|
||||
const os = require('os');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const async = require('async');
|
||||
const sharp = require('../../');
|
||||
|
||||
const crops = {
|
||||
centre: sharp.gravity.centre,
|
||||
entropy: sharp.strategy.entropy,
|
||||
attention: sharp.strategy.attention
|
||||
};
|
||||
const concurrency = os.cpus().length;
|
||||
|
||||
const scores = {};
|
||||
|
||||
const incrementScore = function(accuracy, crop) {
|
||||
if (typeof scores[accuracy] === 'undefined') {
|
||||
scores[accuracy] = {};
|
||||
}
|
||||
if (typeof scores[accuracy][crop] === 'undefined') {
|
||||
scores[accuracy][crop] = 0;
|
||||
}
|
||||
scores[accuracy][crop]++;
|
||||
};
|
||||
|
||||
const userData = require('./userData.json');
|
||||
const files = Object.keys(userData);
|
||||
|
||||
async.eachLimit(files, concurrency, function(file, done) {
|
||||
const filename = path.join(__dirname, 'Image', file);
|
||||
const salientWidth = userData[file].right - userData[file].left;
|
||||
const salientHeight = userData[file].bottom - userData[file].top;
|
||||
sharp(filename).metadata(function(err, metadata) {
|
||||
if (err) console.log(err);
|
||||
async.each(Object.keys(crops), function(crop, done) {
|
||||
async.parallel([
|
||||
// Left edge accuracy
|
||||
function(done) {
|
||||
sharp(filename).resize(salientWidth, metadata.height).crop(crops[crop]).toBuffer(function(err, data, info) {
|
||||
const accuracy = Math.round(Math.abs(userData[file].left - info.cropCalcLeft) / (metadata.width - salientWidth) * 100);
|
||||
incrementScore(accuracy, crop);
|
||||
done();
|
||||
});
|
||||
},
|
||||
// Top edge accuracy
|
||||
function(done) {
|
||||
sharp(filename).resize(metadata.width, salientHeight).crop(crops[crop]).toBuffer(function(err, data, info) {
|
||||
const accuracy = Math.round(Math.abs(userData[file].top - info.cropCalcTop) / (metadata.height - salientHeight) * 100);
|
||||
incrementScore(accuracy, crop);
|
||||
done();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
}, done);
|
||||
});
|
||||
}, function() {
|
||||
const report = [];
|
||||
Object.keys(scores).forEach(function(accuracy) {
|
||||
report.push(
|
||||
Object.assign({
|
||||
accuracy: parseInt(accuracy, 10)
|
||||
}, scores[accuracy])
|
||||
);
|
||||
});
|
||||
fs.writeFileSync('report.json', JSON.stringify(report, null, 2));
|
||||
});
|
||||
69
test/saliency/userData.js
Normal file
69
test/saliency/userData.js
Normal file
@@ -0,0 +1,69 @@
|
||||
'use strict';
|
||||
/*jshint esversion: 6, loopfunc: true */
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const userDataDir = 'UserData';
|
||||
|
||||
const images = {};
|
||||
|
||||
const median = function(values) {
|
||||
values.sort(function(a,b) {
|
||||
return a - b;
|
||||
});
|
||||
const half = Math.floor(values.length / 2);
|
||||
if (values.length % 2) {
|
||||
return values[half];
|
||||
} else {
|
||||
return Math.floor((values[half - 1] + values[half]) / 2);
|
||||
}
|
||||
};
|
||||
|
||||
// List of files
|
||||
fs.readdirSync(userDataDir).forEach(function(file) {
|
||||
// Contents of file
|
||||
const lines = fs.readFileSync(path.join(userDataDir, file), {encoding: 'utf-8'}).split(/\r\n/);
|
||||
// First line = number of entries
|
||||
const entries = parseInt(lines[0], 10);
|
||||
// Verify number of entries
|
||||
if (entries !== 500) {
|
||||
throw new Error('Expecting 500 images in ' + file + ', found ' + entries);
|
||||
}
|
||||
// Keep track of which line we're on
|
||||
let linePos = 2;
|
||||
for (let i = 0; i < entries; i++) {
|
||||
// Get data for current image
|
||||
const filename = lines[linePos].replace(/\\/, path.sep);
|
||||
linePos = linePos + 2;
|
||||
const regions = lines[linePos].split('; ');
|
||||
linePos = linePos + 2;
|
||||
// Parse human-labelled regions for min/max coords
|
||||
const lefts = [], tops = [], rights = [], bottoms = [];
|
||||
regions.forEach(function(region) {
|
||||
if (region.indexOf(' ') !== -1) {
|
||||
const coords = region.split(' ');
|
||||
lefts.push(parseInt(coords[0], 10));
|
||||
tops.push(parseInt(coords[1], 10));
|
||||
rights.push(parseInt(coords[2], 10));
|
||||
bottoms.push(parseInt(coords[3], 10));
|
||||
}
|
||||
});
|
||||
// Add image
|
||||
images[filename] = {
|
||||
left: median(lefts),
|
||||
top: median(tops),
|
||||
right: median(rights),
|
||||
bottom: median(bottoms)
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// Verify number of images found
|
||||
const imageCount = Object.keys(images).length;
|
||||
if (imageCount === 5000) {
|
||||
// Write output
|
||||
fs.writeFileSync('userData.json', JSON.stringify(images, null, 2));
|
||||
} else {
|
||||
throw new Error('Expecting 5000 images, found ' + imageCount);
|
||||
}
|
||||
@@ -9,18 +9,17 @@ describe('Convolve', function() {
|
||||
|
||||
it('specific convolution kernel 1', function(done) {
|
||||
sharp(fixtures.inputPngStripesV)
|
||||
.resize(320, 240)
|
||||
.convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 3,
|
||||
'scale': 50,
|
||||
'offset': 0,
|
||||
'kernel': [ 10, 20, 10,
|
||||
0, 0, 0,
|
||||
10, 20, 10 ]
|
||||
})
|
||||
.convolve({
|
||||
width: 3,
|
||||
height: 3,
|
||||
scale: 50,
|
||||
offset: 0,
|
||||
kernel: [ 10, 20, 10,
|
||||
0, 0, 0,
|
||||
10, 20, 10 ]
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
@@ -30,16 +29,15 @@ describe('Convolve', function() {
|
||||
|
||||
it('specific convolution kernel 2', function(done) {
|
||||
sharp(fixtures.inputPngStripesH)
|
||||
.resize(320, 240)
|
||||
.convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 3,
|
||||
'kernel': [ 1, 0, 1,
|
||||
2, 0, 2,
|
||||
1, 0, 1 ]
|
||||
})
|
||||
.convolve({
|
||||
width: 3,
|
||||
height: 3,
|
||||
kernel: [ 1, 0, 1,
|
||||
2, 0, 2,
|
||||
1, 0, 1 ]
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
@@ -47,36 +45,48 @@ describe('Convolve', function() {
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid kernel specification: no data', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 3,
|
||||
'kernel': []
|
||||
});
|
||||
});
|
||||
it('horizontal Sobel operator', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.convolve({
|
||||
width: 3,
|
||||
height: 3,
|
||||
kernel: [ -1, 0, 1,
|
||||
-2, 0, 2,
|
||||
-1, 0, 1 ]
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('conv-sobel-horizontal.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid kernel specification: bad data format', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 3,
|
||||
'kernel': [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
|
||||
});
|
||||
describe('invalid kernel specification', function() {
|
||||
it('missing', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).convolve({});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid kernel specification: wrong width', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 4,
|
||||
'kernel': [1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
it('incorrect data format', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).convolve({
|
||||
width: 3,
|
||||
height: 3,
|
||||
kernel: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
|
||||
});
|
||||
});
|
||||
});
|
||||
it('incorrect dimensions', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).convolve({
|
||||
width: 3,
|
||||
height: 4,
|
||||
kernel: [1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -29,6 +29,9 @@ describe('cpplint', function() {
|
||||
},
|
||||
whitespace: {
|
||||
parens: false
|
||||
},
|
||||
runtime: {
|
||||
indentation_namespace: false
|
||||
}
|
||||
}
|
||||
}, function(err, report) {
|
||||
|
||||
@@ -172,7 +172,9 @@ describe('Crop', function() {
|
||||
assert.strictEqual(3, info.channels);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(320, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('crop-entropy.jpg'), data, done);
|
||||
assert.strictEqual(250, info.cropCalcLeft);
|
||||
assert.strictEqual(0, info.cropCalcTop);
|
||||
fixtures.assertSimilar(fixtures.expected('crop-strategy.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -186,10 +188,47 @@ describe('Crop', function() {
|
||||
assert.strictEqual(4, info.channels);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(80, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('crop-entropy.png'), data, done);
|
||||
assert.strictEqual(0, info.cropCalcLeft);
|
||||
assert.strictEqual(80, info.cropCalcTop);
|
||||
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('Attention strategy', function() {
|
||||
|
||||
it('JPEG', function(done) {
|
||||
sharp(fixtures.inputJpgWithCmykProfile)
|
||||
.resize(80, 320)
|
||||
.crop(sharp.strategy.attention)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3, info.channels);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(320, info.height);
|
||||
assert.strictEqual(250, info.cropCalcLeft);
|
||||
assert.strictEqual(0, info.cropCalcTop);
|
||||
fixtures.assertSimilar(fixtures.expected('crop-strategy.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('PNG', function(done) {
|
||||
sharp(fixtures.inputPngWithTransparency)
|
||||
.resize(320, 80)
|
||||
.crop(sharp.strategy.attention)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(4, info.channels);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(80, info.height);
|
||||
assert.strictEqual(0, info.cropCalcLeft);
|
||||
assert.strictEqual(80, info.cropCalcTop);
|
||||
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
@@ -437,7 +437,27 @@ describe('Overlays', function() {
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('Overlay 100x100 with 50x50 so bottom edges meet', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(50, 50)
|
||||
.toBuffer(function(err, overlay) {
|
||||
if (err) throw err;
|
||||
sharp(fixtures.inputJpgWithLandscapeExif1)
|
||||
.resize(100, 100)
|
||||
.overlayWith(overlay, {
|
||||
top: 50,
|
||||
left: 40
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(100, info.width);
|
||||
assert.strictEqual(100, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('overlay-bottom-edges-meet.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user