Compare commits
37 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d500554c1 | ||
|
|
c42fb97419 | ||
|
|
d1d6155fd1 | ||
|
|
ff8c42e894 | ||
|
|
e10aeb29eb | ||
|
|
fee3d882c7 | ||
|
|
d17e8d3450 | ||
|
|
99f960bf56 | ||
|
|
83d8847f57 | ||
|
|
f672f86b53 | ||
|
|
b69627891d | ||
|
|
673d8278b5 | ||
|
|
8dd554b935 | ||
|
|
65b7f7d7d5 | ||
|
|
a982cfdb20 | ||
|
|
7689fbe54d | ||
|
|
c9d32e22d3 | ||
|
|
278273b5c3 | ||
|
|
a5d85b8a54 | ||
|
|
4c172d25f6 | ||
|
|
b70a7d9a3b | ||
|
|
ba5a8b44ed | ||
|
|
91e1ed1314 | ||
|
|
85f20c6e1b | ||
|
|
4b98dbb454 | ||
|
|
c3ad4fbdaa | ||
|
|
2e9cd83ed2 | ||
|
|
f1ead06645 | ||
|
|
d486eaad03 | ||
|
|
7d261a147d | ||
|
|
61038888c4 | ||
|
|
39040fb9a0 | ||
|
|
4f3262c328 | ||
|
|
69126a7c5f | ||
|
|
62554b766f | ||
|
|
e699e36270 | ||
|
|
331926dc3c |
@@ -2,7 +2,7 @@
|
||||
"strict": true,
|
||||
"node": true,
|
||||
"maxparams": 4,
|
||||
"maxcomplexity": 13,
|
||||
"maxcomplexity": 14,
|
||||
"globals": {
|
||||
"beforeEach": true,
|
||||
"afterEach": true,
|
||||
|
||||
@@ -3,7 +3,6 @@ node_js:
|
||||
- "0.10"
|
||||
- "0.12"
|
||||
- "4"
|
||||
- "5"
|
||||
- "6"
|
||||
os:
|
||||
- linux
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
os: Visual Studio 2015
|
||||
os: Previous Visual Studio 2015
|
||||
version: "{build}"
|
||||
build: off
|
||||
platform: x64
|
||||
@@ -7,7 +7,6 @@ environment:
|
||||
matrix:
|
||||
- nodejs_version: "0.12"
|
||||
- nodejs_version: "4"
|
||||
- nodejs_version: "5"
|
||||
- nodejs_version: "6"
|
||||
install:
|
||||
- ps: Install-Product node $env:nodejs_version x64
|
||||
|
||||
@@ -51,6 +51,7 @@
|
||||
],
|
||||
# Nested variables "pattern" borrowed from http://src.chromium.org/viewvc/chrome/trunk/src/build/common.gypi
|
||||
'variables': {
|
||||
'sharp_cxx11%': '0',
|
||||
'variables': {
|
||||
'variables': {
|
||||
'conditions': [
|
||||
@@ -92,7 +93,7 @@
|
||||
'src/utilities.cc'
|
||||
],
|
||||
'defines': [
|
||||
'_GLIBCXX_USE_CXX11_ABI=0',
|
||||
'_GLIBCXX_USE_CXX11_ABI=<(sharp_cxx11)',
|
||||
'_ALLOW_KEYWORD_MACROS'
|
||||
],
|
||||
'include_dirs': [
|
||||
|
||||
100
docs/api.md
@@ -298,6 +298,12 @@ sharp(input)
|
||||
});
|
||||
```
|
||||
|
||||
#### trim([tolerance])
|
||||
|
||||
Trim "boring" pixels from all edges that contain values within a percentage similarity of the top-left pixel.
|
||||
|
||||
* `tolerance`, if present, is an integral Number between 1 and 99 representing the percentage similarity, defaulting to 10.
|
||||
|
||||
#### background(rgba)
|
||||
|
||||
Set the background for the `embed`, `flatten` and `extend` operations.
|
||||
@@ -381,6 +387,30 @@ When a `sigma` is provided, performs a slower, more accurate Gaussian blur. This
|
||||
|
||||
* `sigma`, if present, is a Number between 0.3 and 1000 representing the sigma of the Gaussian mask, where `sigma = 1 + radius / 2`.
|
||||
|
||||
#### convolve(kernel)
|
||||
|
||||
Convolve the image with the specified `kernel`, an Object with the following attributes:
|
||||
|
||||
* `width` is an integral Number representing the width of the kernel in pixels.
|
||||
* `height` is an integral Number representing the width of the kernel in pixels.
|
||||
* `kernel` is an Array of length `width*height` containing the kernel values.
|
||||
* `scale`, if present, is a Number representing the scale of the kernel in pixels, defaulting to the sum of the kernel's values.
|
||||
* `offset`, if present, is a Number representing the offset of the kernel in pixels, defaulting to 0.
|
||||
|
||||
```javascript
|
||||
sharp(input)
|
||||
.convolve({
|
||||
width: 3,
|
||||
height: 3,
|
||||
kernel: [-1, 0, 1, -2, 0, 2, -1, 0, 1]
|
||||
})
|
||||
.raw()
|
||||
.toBuffer(function(err, data, info) {
|
||||
// data contains the raw pixel data representing the convolution
|
||||
// of the input image with the horizontal Sobel operator
|
||||
});
|
||||
```
|
||||
|
||||
#### sharpen([sigma], [flat], [jagged])
|
||||
|
||||
When used without parameters, performs a fast, mild sharpen of the output image. This typically reduces performance by 10%.
|
||||
@@ -391,11 +421,13 @@ When a `sigma` is provided, performs a slower, more accurate sharpen of the L ch
|
||||
* `flat`, if present, is a Number representing the level of sharpening to apply to "flat" areas, defaulting to a value of 1.0.
|
||||
* `jagged`, if present, is a Number representing the level of sharpening to apply to "jagged" areas, defaulting to a value of 2.0.
|
||||
|
||||
#### threshold([threshold])
|
||||
#### threshold([threshold], [options])
|
||||
|
||||
Converts all pixels in the image to greyscale white or black. Any pixel greather-than-or-equal-to the threshold (0..255) will be white. All others will be black.
|
||||
Any pixel value greather than or equal to the threshold value will be set to 255, otherwise it will be set to 0.
|
||||
By default, the image will be converted to single channel greyscale before thresholding.
|
||||
|
||||
* `threshold`, if present, is a Number, representing the level above which pixels will be forced to white.
|
||||
* `threshold`, if present, is a Number between 0 and 255, representing the level at which the threshold will be applied. The default threshold is 128.
|
||||
* `options`, if present, is an Object containing a Boolean `greyscale` (or `grayscale`). When `false` each channel will have the threshold applied independently.
|
||||
|
||||
#### gamma([gamma])
|
||||
|
||||
@@ -431,6 +463,12 @@ Overlay (composite) a image containing an alpha channel over the processed (resi
|
||||
`options`, if present, is an Object with the following optional attributes:
|
||||
|
||||
* `gravity` is a String or an attribute of the `sharp.gravity` Object e.g. `sharp.gravity.north` at which to place the overlay, defaulting to `center`/`centre`.
|
||||
* `top` is an integral Number representing the pixel offset from the top edge.
|
||||
* `left` is an integral Number representing the pixel offset from the left edge.
|
||||
* `tile` is a Boolean, defaulting to `false`. When set to `true` repeats the overlay image across the entire image with the given `gravity`.
|
||||
* `cutout` is a Boolean, defaulting to `false`. When set to `true` applies only the alpha channel of the overlay image to the image to be overlaid, giving the appearance of one image being cut out of another.
|
||||
|
||||
If both `top` and `left` are provided, they take precedence over `gravity`.
|
||||
|
||||
```javascript
|
||||
sharp('input.png')
|
||||
@@ -451,13 +489,61 @@ sharp('input.png')
|
||||
});
|
||||
```
|
||||
|
||||
#### extractChannel(channel)
|
||||
|
||||
Extract a single channel from a multi-channel image.
|
||||
|
||||
* `channel` is a zero-indexed integral Number representing the band number to extract. `red`, `green` or `blue` are also accepted as an alternative to `0`, `1` or `2` respectively.
|
||||
|
||||
```javascript
|
||||
sharp(input)
|
||||
.extractChannel('green')
|
||||
.toFile('input_green.jpg', function(err, info) {
|
||||
// info.channels === 1
|
||||
// input_green.jpg contains the green channel of the input image
|
||||
});
|
||||
```
|
||||
|
||||
#### bandbool(operation)
|
||||
|
||||
Perform a bitwise boolean operation on all input image channels (bands) to produce a single channel output image.
|
||||
|
||||
`operation` is a string containing the name of the bitwise operator to be appled to image channels, which can be one of:
|
||||
|
||||
* `and` performs a bitwise and operation, like the c-operator `&`.
|
||||
* `or` performs a bitwise or operation, like the c-operator `|`.
|
||||
* `eor` performs a bitwise exclusive or operation, like the c-operator `^`.
|
||||
|
||||
```javascript
|
||||
sharp('input.png')
|
||||
.bandbool(sharp.bool.and)
|
||||
.toFile('output.png')
|
||||
```
|
||||
|
||||
In the above example if `input.png` is a 3 channel RGB image, `output.png` will be a 1 channel grayscale image where each pixel `P = R & G & B`.
|
||||
For example, if `I(1,1) = [247, 170, 14] = [0b11110111, 0b10101010, 0b00001111]` then `O(1,1) = 0b11110111 & 0b10101010 & 0b00001111 = 0b00000010 = 2`.
|
||||
|
||||
#### boolean(image, operation)
|
||||
|
||||
Perform a bitwise boolean operation with `image`, where `image` is one of the following:
|
||||
|
||||
* Buffer containing PNG, WebP, GIF or SVG image data, or
|
||||
* String containing the path to an image file
|
||||
|
||||
This operation creates an output image where each pixel is the result of the selected bitwise boolean `operation` between the corresponding pixels of the input images.
|
||||
The boolean operation can be one of the following:
|
||||
|
||||
* `and` performs a bitwise and operation, like the c-operator `&`.
|
||||
* `or` performs a bitwise or operation, like the c-operator `|`.
|
||||
* `eor` performs a bitwise exclusive or operation, like the c-operator `^`.
|
||||
|
||||
### Output
|
||||
|
||||
#### toFile(path, [callback])
|
||||
|
||||
`path` is a String containing the path to write the image data to.
|
||||
|
||||
If an explicit output format is not selected, it will be inferred from the extension, with JPEG, PNG, WebP, TIFF and DZI supported.
|
||||
If an explicit output format is not selected, it will be inferred from the extension, with JPEG, PNG, WebP, TIFF, DZI, and VIPS V format supported. Note that RAW format is only supported for buffer output.
|
||||
|
||||
`callback`, if present, is called with two arguments `(err, info)` where:
|
||||
|
||||
@@ -468,7 +554,7 @@ A Promises/A+ promise is returned when `callback` is not provided.
|
||||
|
||||
#### toBuffer([callback])
|
||||
|
||||
Write image data to a Buffer, the format of which will match the input image by default. JPEG, PNG and WebP are supported.
|
||||
Write image data to a Buffer, the format of which will match the input image by default. JPEG, PNG, WebP, and RAW are supported.
|
||||
|
||||
`callback`, if present, gets three arguments `(err, buffer, info)` where:
|
||||
|
||||
@@ -541,7 +627,7 @@ The size, overlap, container and directory layout to use when generating square
|
||||
* `container` is a String, with value `fs` or `zip`. The default value is `fs`.
|
||||
* `layout` is a String, with value `dz`, `zoomify` or `google`. The default value is `dz`.
|
||||
|
||||
You can also use the file extension .zip or .szi to write to a ZIP container instead of the filesystem.
|
||||
You can also use the file extension `.zip` or `.szi` to write to a compressed archive file format.
|
||||
|
||||
```javascript
|
||||
sharp('input.tiff')
|
||||
@@ -658,6 +744,8 @@ If `options` is provided, sets the limits of _libvips'_ operation cache.
|
||||
|
||||
`options` can also be a boolean, where `true` enables the default cache settings and `false` disables all caching.
|
||||
|
||||
Existing entries in the cache will be trimmed after any change in limits.
|
||||
|
||||
This method always returns cache statistics, useful for determining how much working memory is required for a particular task.
|
||||
|
||||
```javascript
|
||||
|
||||
@@ -4,6 +4,72 @@
|
||||
|
||||
Requires libvips v8.3.1
|
||||
|
||||
#### v0.15.1 - 12<sup>th</sup> July 2016
|
||||
|
||||
* Concat Stream-based input in single operation for ~+3% perf and less GC.
|
||||
[#429](https://github.com/lovell/sharp/issues/429)
|
||||
[@papandreou](https://github.com/papandreou)
|
||||
|
||||
* Add alpha channel, if required, before extend operation.
|
||||
[#439](https://github.com/lovell/sharp/pull/439)
|
||||
[@frulo](https://github.com/frulo)
|
||||
|
||||
* Allow overlay image to be repeated across entire image via tile option.
|
||||
[#443](https://github.com/lovell/sharp/pull/443)
|
||||
[@lemnisk8](https://github.com/lemnisk8)
|
||||
|
||||
* Add cutout option to overlayWith feature, applies only the alpha channel of the overlay image.
|
||||
[#448](https://github.com/lovell/sharp/pull/448)
|
||||
[@kleisauke](https://github.com/kleisauke)
|
||||
|
||||
* Ensure scaling factors are calculated independently to prevent rounding errors.
|
||||
[#452](https://github.com/lovell/sharp/issues/452)
|
||||
[@puzrin](https://github.com/puzrin)
|
||||
|
||||
* Add --sharp-cxx11 flag to compile with gcc's new C++11 ABI.
|
||||
[#456](https://github.com/lovell/sharp/pull/456)
|
||||
[@kapouer](https://github.com/kapouer)
|
||||
|
||||
* Add top/left offset support to overlayWith operation.
|
||||
[#473](https://github.com/lovell/sharp/pull/473)
|
||||
[@rnanwani](https://github.com/rnanwani)
|
||||
|
||||
* Add convolve operation for kernel-based convolution.
|
||||
[#479](https://github.com/lovell/sharp/pull/479)
|
||||
[@mhirsch](https://github.com/mhirsch)
|
||||
|
||||
* Add greyscale option to threshold operation for colourspace conversion control.
|
||||
[#480](https://github.com/lovell/sharp/pull/480)
|
||||
[@mhirsch](https://github.com/mhirsch)
|
||||
|
||||
* Ensure ICC profiles are licenced for distribution.
|
||||
[#486](https://github.com/lovell/sharp/issues/486)
|
||||
[@kapouer](https://github.com/kapouer)
|
||||
|
||||
* Allow images with an alpha channel to work with LAB-colourspace based sharpen.
|
||||
[#490](https://github.com/lovell/sharp/issues/490)
|
||||
[@jwagner](https://github.com/jwagner)
|
||||
|
||||
* Add trim operation to remove "boring" edges.
|
||||
[#492](https://github.com/lovell/sharp/pull/492)
|
||||
[@kleisauke](https://github.com/kleisauke)
|
||||
|
||||
* Add bandbool feature for channel-wise boolean operations.
|
||||
[#496](https://github.com/lovell/sharp/pull/496)
|
||||
[@mhirsch](https://github.com/mhirsch)
|
||||
|
||||
* Add extractChannel operation to extract a channel from an image.
|
||||
[#497](https://github.com/lovell/sharp/pull/497)
|
||||
[@mhirsch](https://github.com/mhirsch)
|
||||
|
||||
* Add ability to read and write native libvips .v files.
|
||||
[#500](https://github.com/lovell/sharp/pull/500)
|
||||
[@mhirsch](https://github.com/mhirsch)
|
||||
|
||||
* Add boolean feature for bitwise image operations.
|
||||
[#501](https://github.com/lovell/sharp/pull/501)
|
||||
[@mhirsch](https://github.com/mhirsch)
|
||||
|
||||
#### v0.15.0 - 21<sup>st</sup> May 2016
|
||||
|
||||
* Use libvips' new Lanczos 3 kernel as default for image reduction.
|
||||
@@ -84,6 +150,9 @@ Requires libvips v8.2.3
|
||||
[#387](https://github.com/lovell/sharp/issues/387)
|
||||
[@kleisauke](https://github.com/kleisauke)
|
||||
|
||||
* Remove deprecated style of calling extract API. Breaks calls using positional arguments.
|
||||
[#276](https://github.com/lovell/sharp/issues/276)
|
||||
|
||||
### v0.13 - "*mind*"
|
||||
|
||||
Requires libvips v8.2.2
|
||||
|
||||
@@ -92,6 +92,11 @@ the help and code contributions of the following people:
|
||||
* [Kenton Gray](https://github.com/kentongray)
|
||||
* [Felix Bünemann](https://github.com/felixbuenemann)
|
||||
* [Samy Al Zahrani](https://github.com/salzhrani)
|
||||
* [Chintan Thakkar](https://github.com/lemnisk8)
|
||||
* [F. Orlando Galashan](https://github.com/frulo)
|
||||
* [Kleis Auke Wolthuizen](https://github.com/kleisauke)
|
||||
* [Matt Hirsch](https://github.com/mhirsch)
|
||||
* [Rahul Nanwani](https://github.com/rnanwani)
|
||||
|
||||
Thank you!
|
||||
|
||||
|
||||
@@ -30,8 +30,16 @@ Most recent Linux-based operating systems with glibc running on x64 and ARMv6+ C
|
||||
|
||||
To use your own version of libvips instead of the provided binaries, make sure it is
|
||||
at least the version listed under `config.libvips` in the `package.json` file,
|
||||
that it can be located using `pkg-config --modversion vips-cpp`
|
||||
and that it has been compiled with `_GLIBCXX_USE_CXX11_ABI=0`.
|
||||
that it can be located using `pkg-config --modversion vips-cpp`.
|
||||
|
||||
There are [changes in the C++11 ABI](https://gcc.gnu.org/onlinedocs/libstdc++/manual/using_dual_abi.html)
|
||||
when using v5.1+ of the `g++` compiler.
|
||||
If you have installed `libvips-dev` via package manager on an OS such as Debian testing/unstable,
|
||||
you can pass the required value of the `_GLIBCXX_USE_CXX11_ABI` macro using the `--sharp-cxx11` flag.
|
||||
|
||||
```sh
|
||||
npm install --sharp-cxx11=1
|
||||
```
|
||||
|
||||
If you are using non-stadard paths (anything other than `/usr` or `/usr/local`),
|
||||
you might need to set `PKG_CONFIG_PATH` during `npm install`
|
||||
@@ -54,12 +62,6 @@ For Linux-based operating systems such as Alpine that use musl libc,
|
||||
the smaller stack size means libvips' cache should be disabled
|
||||
via `sharp.cache(false)` to avoid a stack overflow.
|
||||
|
||||
Beware of Linux OS upgrades that introduce v5.1+ of the `g++` compiler due to
|
||||
[changes](https://gcc.gnu.org/onlinedocs/libstdc++/manual/using_dual_abi.html)
|
||||
in the C++11 ABI.
|
||||
This module assumes the previous behaviour, which can be enforced by setting the
|
||||
`_GLIBCXX_USE_CXX11_ABI=0` environment variable at libvips' compile time.
|
||||
|
||||
### Mac OS
|
||||
|
||||
[](https://travis-ci.org/lovell/sharp)
|
||||
@@ -193,3 +195,47 @@ configuration file to prevent the use of coders known to be vulnerable.
|
||||
|
||||
Set the `MAGICK_CONFIGURE_PATH` environment variable
|
||||
to the directory containing the `policy.xml` file.
|
||||
|
||||
### Licences
|
||||
|
||||
If a global installation of libvips that meets the
|
||||
minimum version requirement cannot be found,
|
||||
this module will download a pre-compiled bundle of libvips
|
||||
and its dependencies on Linux and Windows machines.
|
||||
|
||||
Should you need to manually download and inspect these files,
|
||||
you can do so via https://dl.bintray.com/lovell/sharp/
|
||||
|
||||
This module is licensed under the terms of the
|
||||
[Apache 2.0 Licence](https://github.com/lovell/sharp/blob/master/LICENSE).
|
||||
|
||||
The libraries downloaded and used by this module
|
||||
are done so under the terms of the following licences,
|
||||
all of which are compatible with the Apache 2.0 Licence.
|
||||
|
||||
Use of libraries under the terms of the LGPLv3 is via the
|
||||
"any later version" clause of the LGPLv2 or LGPLv2.1.
|
||||
|
||||
| Library | Used under the terms of |
|
||||
|---------------|----------------------------------------------------------------------------------------------------------|
|
||||
| cairo | Mozilla Public License 2.0 |
|
||||
| fontconfig | [fontconfig Licence](https://cgit.freedesktop.org/fontconfig/tree/COPYING) (BSD-like) |
|
||||
| freetype | [freetype Licence](http://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/FTL.TXT) (BSD-like) |
|
||||
| giflib | MIT Licence |
|
||||
| glib | LGPLv3 |
|
||||
| harfbuzz | MIT Licence |
|
||||
| lcms | MIT Licence |
|
||||
| libcroco | LGPLv3 |
|
||||
| libexif | LGPLv3 |
|
||||
| libffi | MIT Licence |
|
||||
| libgsf | LGPLv3 |
|
||||
| libjpeg-turbo | [zlib License, IJG License](https://github.com/libjpeg-turbo/libjpeg-turbo/blob/master/LICENSE.md) |
|
||||
| libpng | [libpng License](http://www.libpng.org/pub/png/src/libpng-LICENSE.txt) |
|
||||
| librsvg | LGPLv3 |
|
||||
| libtiff | [libtiff License](http://www.libtiff.org/misc.html) (BSD-like) |
|
||||
| libvips | LGPLv3 |
|
||||
| libwebp | New BSD License |
|
||||
| libxml2 | MIT Licence |
|
||||
| pango | LGPLv3 |
|
||||
| pixman | MIT Licence |
|
||||
| zlib | [zlib Licence](https://github.com/madler/zlib/blob/master/zlib.h) |
|
||||
|
||||
BIN
icc/cmyk.icm
Normal file
BIN
icc/sRGB.icc
Normal file
256
index.js
@@ -42,7 +42,7 @@ var Sharp = function(input, options) {
|
||||
stream.Duplex.call(this);
|
||||
this.options = {
|
||||
// input options
|
||||
bufferIn: null,
|
||||
bufferIn: [],
|
||||
streamIn: false,
|
||||
sequentialRead: false,
|
||||
limitInputPixels: maximum.pixels,
|
||||
@@ -85,13 +85,23 @@ var Sharp = function(input, options) {
|
||||
sharpenFlat: 1,
|
||||
sharpenJagged: 2,
|
||||
threshold: 0,
|
||||
thresholdGrayscale: true,
|
||||
trimTolerance: 0,
|
||||
gamma: 0,
|
||||
greyscale: false,
|
||||
normalize: 0,
|
||||
bandBoolOp: null,
|
||||
booleanOp: null,
|
||||
booleanBufferIn: null,
|
||||
booleanFileIn: '',
|
||||
// overlay
|
||||
overlayFileIn: '',
|
||||
overlayBufferIn: null,
|
||||
overlayGravity: 0,
|
||||
overlayXOffset : -1,
|
||||
overlayYOffset : -1,
|
||||
overlayTile: false,
|
||||
overlayCutout: false,
|
||||
// output options
|
||||
formatOut: 'input',
|
||||
fileOut: '',
|
||||
@@ -108,6 +118,7 @@ var Sharp = function(input, options) {
|
||||
withMetadataOrientation: -1,
|
||||
tileSize: 256,
|
||||
tileOverlap: 0,
|
||||
extractChannel: -1,
|
||||
// Function to notify of queue length changes
|
||||
queueListener: function(queueLength) {
|
||||
module.exports.queue.emit('change', queueLength);
|
||||
@@ -217,18 +228,8 @@ Sharp.prototype._inputOptions = function(options) {
|
||||
Sharp.prototype._write = function(chunk, encoding, callback) {
|
||||
/*jslint unused: false */
|
||||
if (this.options.streamIn) {
|
||||
if (typeof chunk === 'object' && chunk instanceof Buffer) {
|
||||
if (this.options.bufferIn instanceof Buffer) {
|
||||
// Append to existing Buffer
|
||||
this.options.bufferIn = Buffer.concat(
|
||||
[this.options.bufferIn, chunk],
|
||||
this.options.bufferIn.length + chunk.length
|
||||
);
|
||||
} else {
|
||||
// Create new Buffer
|
||||
this.options.bufferIn = new Buffer(chunk.length);
|
||||
chunk.copy(this.options.bufferIn);
|
||||
}
|
||||
if (isBuffer(chunk)) {
|
||||
this.options.bufferIn.push(chunk);
|
||||
callback();
|
||||
} else {
|
||||
callback(new Error('Non-Buffer data on Writable Stream'));
|
||||
@@ -238,6 +239,15 @@ Sharp.prototype._write = function(chunk, encoding, callback) {
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
Flattens the array of chunks in bufferIn
|
||||
*/
|
||||
Sharp.prototype._flattenBufferIn = function() {
|
||||
if (Array.isArray(this.options.bufferIn)) {
|
||||
this.options.bufferIn = Buffer.concat(this.options.bufferIn);
|
||||
}
|
||||
};
|
||||
|
||||
// Weighting to apply to image crop
|
||||
module.exports.gravity = {
|
||||
center: 0,
|
||||
@@ -297,6 +307,21 @@ Sharp.prototype.extract = function(options) {
|
||||
return this;
|
||||
};
|
||||
|
||||
Sharp.prototype.extractChannel = function(channel) {
|
||||
if (channel === 'red')
|
||||
channel = 0;
|
||||
else if (channel === 'green')
|
||||
channel = 1;
|
||||
else if (channel === 'blue')
|
||||
channel = 2;
|
||||
if(isInteger(channel) && inRange(channel,0,4)) {
|
||||
this.options.extractChannel = channel;
|
||||
} else {
|
||||
throw new Error('Cannot extract invalid channel ' + channel);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/*
|
||||
Set the background colour for embed and flatten operations.
|
||||
Delegates to the 'Color' module, which can throw an Error
|
||||
@@ -334,12 +359,31 @@ Sharp.prototype.ignoreAspectRatio = function() {
|
||||
};
|
||||
|
||||
Sharp.prototype.flatten = function(flatten) {
|
||||
this.options.flatten = (typeof flatten === 'boolean') ? flatten : true;
|
||||
this.options.flatten = isBoolean(flatten) ? flatten : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
Sharp.prototype.negate = function(negate) {
|
||||
this.options.negate = (typeof negate === 'boolean') ? negate : true;
|
||||
this.options.negate = isBoolean(negate) ? negate : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
/*
|
||||
Bitwise boolean operations between images
|
||||
*/
|
||||
Sharp.prototype.boolean = function(operand, operator) {
|
||||
if (isString(operand)) {
|
||||
this.options.booleanFileIn = operand;
|
||||
} else if (isBuffer(operand)) {
|
||||
this.options.booleanBufferIn = operand;
|
||||
} else {
|
||||
throw new Error('Unsupported boolean operand ' + typeof operand);
|
||||
}
|
||||
if (isString(operator) && contains(operator, ['and', 'or', 'eor'])) {
|
||||
this.options.booleanOp = operator;
|
||||
} else {
|
||||
throw new Error('Invalid boolean operation ' + operator);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
@@ -355,12 +399,39 @@ Sharp.prototype.overlayWith = function(overlay, options) {
|
||||
throw new Error('Unsupported overlay ' + typeof overlay);
|
||||
}
|
||||
if (isObject(options)) {
|
||||
if (isInteger(options.gravity) && inRange(options.gravity, 0, 8)) {
|
||||
this.options.overlayGravity = options.gravity;
|
||||
} else if (isString(options.gravity) && isInteger(module.exports.gravity[options.gravity])) {
|
||||
this.options.overlayGravity = module.exports.gravity[options.gravity];
|
||||
} else if (isDefined(options.gravity)) {
|
||||
throw new Error('Unsupported overlay gravity ' + options.gravity);
|
||||
if (isDefined(options.tile)) {
|
||||
if (isBoolean(options.tile)) {
|
||||
this.options.overlayTile = options.tile;
|
||||
} else {
|
||||
throw new Error('Invalid overlay tile ' + options.tile);
|
||||
}
|
||||
}
|
||||
if (isDefined(options.cutout)) {
|
||||
if (isBoolean(options.cutout)) {
|
||||
this.options.overlayCutout = options.cutout;
|
||||
} else {
|
||||
throw new Error('Invalid overlay cutout ' + options.cutout);
|
||||
}
|
||||
}
|
||||
if (isDefined(options.left) || isDefined(options.top)) {
|
||||
if (
|
||||
isInteger(options.left) && inRange(options.left, 0, maximum.width) &&
|
||||
isInteger(options.top) && inRange(options.top, 0, maximum.height)
|
||||
) {
|
||||
this.options.overlayXOffset = options.left;
|
||||
this.options.overlayYOffset = options.top;
|
||||
} else {
|
||||
throw new Error('Invalid overlay left ' + options.left + ' and/or top ' + options.top);
|
||||
}
|
||||
}
|
||||
if (isDefined(options.gravity)) {
|
||||
if(isInteger(options.gravity) && inRange(options.gravity, 0, 8)) {
|
||||
this.options.overlayGravity = options.gravity;
|
||||
} else if (isString(options.gravity) && isInteger(module.exports.gravity[options.gravity])) {
|
||||
this.options.overlayGravity = module.exports.gravity[options.gravity];
|
||||
} else {
|
||||
throw new Error('Unsupported overlay gravity ' + options.gravity);
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
@@ -371,9 +442,9 @@ Sharp.prototype.overlayWith = function(overlay, options) {
|
||||
Auto-rotation based on the EXIF Orientation tag is represented by an angle of -1
|
||||
*/
|
||||
Sharp.prototype.rotate = function(angle) {
|
||||
if (typeof angle === 'undefined') {
|
||||
if (!isDefined(angle)) {
|
||||
this.options.angle = -1;
|
||||
} else if (!Number.isNaN(angle) && [0, 90, 180, 270].indexOf(angle) !== -1) {
|
||||
} else if (isInteger(angle) && contains(angle, [0, 90, 180, 270])) {
|
||||
this.options.angle = angle;
|
||||
} else {
|
||||
throw new Error('Unsupported angle (0, 90, 180, 270) ' + angle);
|
||||
@@ -385,7 +456,7 @@ Sharp.prototype.rotate = function(angle) {
|
||||
Flip the image vertically, about the Y axis
|
||||
*/
|
||||
Sharp.prototype.flip = function(flip) {
|
||||
this.options.flip = (typeof flip === 'boolean') ? flip : true;
|
||||
this.options.flip = isBoolean(flip) ? flip : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
@@ -393,7 +464,7 @@ Sharp.prototype.flip = function(flip) {
|
||||
Flop the image horizontally, about the X axis
|
||||
*/
|
||||
Sharp.prototype.flop = function(flop) {
|
||||
this.options.flop = (typeof flop === 'boolean') ? flop : true;
|
||||
this.options.flop = isBoolean(flop) ? flop : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
@@ -403,7 +474,7 @@ Sharp.prototype.flop = function(flop) {
|
||||
"change the dimensions of the image only if its width or height exceeds the geometry specification"
|
||||
*/
|
||||
Sharp.prototype.withoutEnlargement = function(withoutEnlargement) {
|
||||
this.options.withoutEnlargement = (typeof withoutEnlargement === 'boolean') ? withoutEnlargement : true;
|
||||
this.options.withoutEnlargement = isBoolean(withoutEnlargement) ? withoutEnlargement : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
@@ -428,6 +499,32 @@ Sharp.prototype.blur = function(sigma) {
|
||||
return this;
|
||||
};
|
||||
|
||||
/*
|
||||
Convolve the image with a kernel.
|
||||
*/
|
||||
Sharp.prototype.convolve = function(kernel) {
|
||||
if (!isDefined(kernel) || !isDefined(kernel.kernel) ||
|
||||
!isDefined(kernel.width) || !isDefined(kernel.height) ||
|
||||
!inRange(kernel.width,3,1001) || !inRange(kernel.height,3,1001) ||
|
||||
kernel.height * kernel.width != kernel.kernel.length
|
||||
) {
|
||||
// must pass in a kernel
|
||||
throw new Error('Invalid convolution kernel');
|
||||
}
|
||||
if(!isDefined(kernel.scale)) {
|
||||
var sum = 0;
|
||||
kernel.kernel.forEach(function(e) {
|
||||
sum += e;
|
||||
});
|
||||
kernel.scale = sum;
|
||||
}
|
||||
if(!isDefined(kernel.offset)) {
|
||||
kernel.offset = 0;
|
||||
}
|
||||
this.options.convKernel = kernel;
|
||||
return this;
|
||||
};
|
||||
|
||||
/*
|
||||
Sharpen the output image.
|
||||
Call without a radius to use a fast, mild sharpen.
|
||||
@@ -468,16 +565,37 @@ Sharp.prototype.sharpen = function(sigma, flat, jagged) {
|
||||
return this;
|
||||
};
|
||||
|
||||
Sharp.prototype.threshold = function(threshold) {
|
||||
if (typeof threshold === 'undefined') {
|
||||
Sharp.prototype.threshold = function(threshold, options) {
|
||||
if (!isDefined(threshold)) {
|
||||
this.options.threshold = 128;
|
||||
} else if (typeof threshold === 'boolean') {
|
||||
} else if (isBoolean(threshold)) {
|
||||
this.options.threshold = threshold ? 128 : 0;
|
||||
} else if (typeof threshold === 'number' && !Number.isNaN(threshold) && (threshold % 1 === 0) && threshold >= 0 && threshold <= 255) {
|
||||
} else if (isInteger(threshold) && inRange(threshold, 0, 255)) {
|
||||
this.options.threshold = threshold;
|
||||
} else {
|
||||
throw new Error('Invalid threshold (0 to 255) ' + threshold);
|
||||
}
|
||||
if (!isObject(options) || options.greyscale === true || options.grayscale === true) {
|
||||
this.options.thresholdGrayscale = true;
|
||||
} else {
|
||||
this.options.thresholdGrayscale = false;
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/*
|
||||
Automatically remove "boring" image edges.
|
||||
tolerance - if present, is a percentaged tolerance level between 0 and 100 to trim away similar color values
|
||||
Defaulting to 10 when no tolerance is given.
|
||||
*/
|
||||
Sharp.prototype.trim = function(tolerance) {
|
||||
if (!isDefined(tolerance)) {
|
||||
this.options.trimTolerance = 10;
|
||||
} else if (isInteger(tolerance) && inRange(tolerance, 1, 99)) {
|
||||
this.options.trimTolerance = tolerance;
|
||||
} else {
|
||||
throw new Error('Invalid trim tolerance (1 to 99) ' + tolerance);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
@@ -486,10 +604,10 @@ Sharp.prototype.threshold = function(threshold) {
|
||||
Improves brightness of resized image in non-linear colour spaces.
|
||||
*/
|
||||
Sharp.prototype.gamma = function(gamma) {
|
||||
if (typeof gamma === 'undefined') {
|
||||
if (!isDefined(gamma)) {
|
||||
// Default gamma correction of 2.2 (sRGB)
|
||||
this.options.gamma = 2.2;
|
||||
} else if (!Number.isNaN(gamma) && gamma >= 1 && gamma <= 3) {
|
||||
} else if (isNumber(gamma) && inRange(gamma, 1, 3)) {
|
||||
this.options.gamma = gamma;
|
||||
} else {
|
||||
throw new Error('Invalid gamma correction (1.0 to 3.0) ' + gamma);
|
||||
@@ -501,32 +619,44 @@ Sharp.prototype.gamma = function(gamma) {
|
||||
Enhance output image contrast by stretching its luminance to cover the full dynamic range
|
||||
*/
|
||||
Sharp.prototype.normalize = function(normalize) {
|
||||
this.options.normalize = (typeof normalize === 'boolean') ? normalize : true;
|
||||
this.options.normalize = isBoolean(normalize) ? normalize : true;
|
||||
return this;
|
||||
};
|
||||
Sharp.prototype.normalise = Sharp.prototype.normalize;
|
||||
|
||||
/*
|
||||
Perform boolean/bitwise operation on image color channels - results in one channel image
|
||||
*/
|
||||
Sharp.prototype.bandbool = function(boolOp) {
|
||||
if (isString(boolOp) && contains(boolOp, ['and', 'or', 'eor'])) {
|
||||
this.options.bandBoolOp = boolOp;
|
||||
} else {
|
||||
throw new Error('Invalid bandbool operation ' + boolOp);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/*
|
||||
Convert to greyscale
|
||||
*/
|
||||
Sharp.prototype.greyscale = function(greyscale) {
|
||||
this.options.greyscale = (typeof greyscale === 'boolean') ? greyscale : true;
|
||||
this.options.greyscale = isBoolean(greyscale) ? greyscale : true;
|
||||
return this;
|
||||
};
|
||||
Sharp.prototype.grayscale = Sharp.prototype.greyscale;
|
||||
|
||||
Sharp.prototype.progressive = function(progressive) {
|
||||
this.options.progressive = (typeof progressive === 'boolean') ? progressive : true;
|
||||
this.options.progressive = isBoolean(progressive) ? progressive : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
Sharp.prototype.sequentialRead = function(sequentialRead) {
|
||||
this.options.sequentialRead = (typeof sequentialRead === 'boolean') ? sequentialRead : true;
|
||||
this.options.sequentialRead = isBoolean(sequentialRead) ? sequentialRead : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
Sharp.prototype.quality = function(quality) {
|
||||
if (!Number.isNaN(quality) && quality >= 1 && quality <= 100 && quality % 1 === 0) {
|
||||
if (isInteger(quality) && inRange(quality, 1, 100)) {
|
||||
this.options.quality = quality;
|
||||
} else {
|
||||
throw new Error('Invalid quality (1 to 100) ' + quality);
|
||||
@@ -538,7 +668,7 @@ Sharp.prototype.quality = function(quality) {
|
||||
zlib compression level for PNG output
|
||||
*/
|
||||
Sharp.prototype.compressionLevel = function(compressionLevel) {
|
||||
if (!Number.isNaN(compressionLevel) && compressionLevel >= 0 && compressionLevel <= 9) {
|
||||
if (isInteger(compressionLevel) && inRange(compressionLevel, 0, 9)) {
|
||||
this.options.compressionLevel = compressionLevel;
|
||||
} else {
|
||||
throw new Error('Invalid compressionLevel (0 to 9) ' + compressionLevel);
|
||||
@@ -550,7 +680,7 @@ Sharp.prototype.compressionLevel = function(compressionLevel) {
|
||||
Disable the use of adaptive row filtering for PNG output
|
||||
*/
|
||||
Sharp.prototype.withoutAdaptiveFiltering = function(withoutAdaptiveFiltering) {
|
||||
this.options.withoutAdaptiveFiltering = (typeof withoutAdaptiveFiltering === 'boolean') ? withoutAdaptiveFiltering : true;
|
||||
this.options.withoutAdaptiveFiltering = isBoolean(withoutAdaptiveFiltering) ? withoutAdaptiveFiltering : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
@@ -558,7 +688,7 @@ Sharp.prototype.withoutAdaptiveFiltering = function(withoutAdaptiveFiltering) {
|
||||
Disable the use of chroma subsampling for JPEG output
|
||||
*/
|
||||
Sharp.prototype.withoutChromaSubsampling = function(withoutChromaSubsampling) {
|
||||
this.options.withoutChromaSubsampling = (typeof withoutChromaSubsampling === 'boolean') ? withoutChromaSubsampling : true;
|
||||
this.options.withoutChromaSubsampling = isBoolean(withoutChromaSubsampling) ? withoutChromaSubsampling : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
@@ -566,7 +696,7 @@ Sharp.prototype.withoutChromaSubsampling = function(withoutChromaSubsampling) {
|
||||
Apply trellis quantisation to JPEG output - requires mozjpeg 3.0+
|
||||
*/
|
||||
Sharp.prototype.trellisQuantisation = function(trellisQuantisation) {
|
||||
this.options.trellisQuantisation = (typeof trellisQuantisation === 'boolean') ? trellisQuantisation : true;
|
||||
this.options.trellisQuantisation = isBoolean(trellisQuantisation) ? trellisQuantisation : true;
|
||||
return this;
|
||||
};
|
||||
Sharp.prototype.trellisQuantization = Sharp.prototype.trellisQuantisation;
|
||||
@@ -575,7 +705,7 @@ Sharp.prototype.trellisQuantization = Sharp.prototype.trellisQuantisation;
|
||||
Apply overshoot deringing to JPEG output - requires mozjpeg 3.0+
|
||||
*/
|
||||
Sharp.prototype.overshootDeringing = function(overshootDeringing) {
|
||||
this.options.overshootDeringing = (typeof overshootDeringing === 'boolean') ? overshootDeringing : true;
|
||||
this.options.overshootDeringing = isBoolean(overshootDeringing) ? overshootDeringing : true;
|
||||
return this;
|
||||
};
|
||||
|
||||
@@ -583,7 +713,7 @@ Sharp.prototype.overshootDeringing = function(overshootDeringing) {
|
||||
Optimise scans in progressive JPEG output - requires mozjpeg 3.0+
|
||||
*/
|
||||
Sharp.prototype.optimiseScans = function(optimiseScans) {
|
||||
this.options.optimiseScans = (typeof optimiseScans === 'boolean') ? optimiseScans : true;
|
||||
this.options.optimiseScans = isBoolean(optimiseScans) ? optimiseScans : true;
|
||||
if (this.options.optimiseScans) {
|
||||
this.progressive();
|
||||
}
|
||||
@@ -597,16 +727,10 @@ Sharp.prototype.optimizeScans = Sharp.prototype.optimiseScans;
|
||||
orientation: numeric value for EXIF Orientation tag
|
||||
*/
|
||||
Sharp.prototype.withMetadata = function(withMetadata) {
|
||||
this.options.withMetadata = (typeof withMetadata === 'boolean') ? withMetadata : true;
|
||||
if (typeof withMetadata === 'object') {
|
||||
if ('orientation' in withMetadata) {
|
||||
if (
|
||||
typeof withMetadata.orientation === 'number' &&
|
||||
!Number.isNaN(withMetadata.orientation) &&
|
||||
withMetadata.orientation % 1 === 0 &&
|
||||
withMetadata.orientation >= 1 &&
|
||||
withMetadata.orientation <= 8
|
||||
) {
|
||||
this.options.withMetadata = isBoolean(withMetadata) ? withMetadata : true;
|
||||
if (isObject(withMetadata)) {
|
||||
if (isDefined(withMetadata.orientation)) {
|
||||
if (isInteger(withMetadata.orientation) && inRange(withMetadata.orientation, 1, 8)) {
|
||||
this.options.withMetadataOrientation = withMetadata.orientation;
|
||||
} else {
|
||||
throw new Error('Invalid orientation (1 to 8) ' + withMetadata.orientation);
|
||||
@@ -703,6 +827,12 @@ module.exports.interpolator = {
|
||||
vsqbs: 'vsqbs',
|
||||
vertexSplitQuadraticBasisSpline: 'vsqbs'
|
||||
};
|
||||
// Boolean operations for bandbool
|
||||
module.exports.bool = {
|
||||
and: 'and',
|
||||
or: 'or',
|
||||
eor: 'eor'
|
||||
};
|
||||
|
||||
/*
|
||||
Resize image to width x height pixels
|
||||
@@ -847,12 +977,11 @@ Sharp.prototype.raw = function() {
|
||||
@param format is either the id as a String or an Object with an 'id' attribute
|
||||
*/
|
||||
Sharp.prototype.toFormat = function(formatOut) {
|
||||
if (isObject(formatOut) && isDefined(formatOut.id)) {
|
||||
formatOut = formatOut.id;
|
||||
}
|
||||
if (
|
||||
isDefined(formatOut) &&
|
||||
['jpeg', 'png', 'webp', 'raw', 'tiff', 'dz', 'input'].indexOf(formatOut) !== -1
|
||||
if (isObject(formatOut) && isString(formatOut.id)) {
|
||||
this.options.formatOut = formatOut.id;
|
||||
} else if (
|
||||
isString(formatOut) &&
|
||||
contains(formatOut, ['jpeg', 'png', 'webp', 'raw', 'tiff', 'dz', 'input'])
|
||||
) {
|
||||
this.options.formatOut = formatOut;
|
||||
} else {
|
||||
@@ -882,6 +1011,7 @@ Sharp.prototype._pipeline = function(callback) {
|
||||
if (this.options.streamIn) {
|
||||
// output=file/buffer, input=stream
|
||||
this.on('finish', function() {
|
||||
that._flattenBufferIn();
|
||||
sharp.pipeline(that.options, callback);
|
||||
});
|
||||
} else {
|
||||
@@ -894,6 +1024,7 @@ Sharp.prototype._pipeline = function(callback) {
|
||||
if (this.options.streamIn) {
|
||||
// output=stream, input=stream
|
||||
this.on('finish', function() {
|
||||
that._flattenBufferIn();
|
||||
sharp.pipeline(that.options, function(err, data, info) {
|
||||
if (err) {
|
||||
that.emit('error', err);
|
||||
@@ -923,6 +1054,7 @@ Sharp.prototype._pipeline = function(callback) {
|
||||
// output=promise, input=stream
|
||||
return new BluebirdPromise(function(resolve, reject) {
|
||||
that.on('finish', function() {
|
||||
that._flattenBufferIn();
|
||||
sharp.pipeline(that.options, function(err, data) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
@@ -956,6 +1088,7 @@ Sharp.prototype.metadata = function(callback) {
|
||||
if (typeof callback === 'function') {
|
||||
if (this.options.streamIn) {
|
||||
this.on('finish', function() {
|
||||
that._flattenBufferIn();
|
||||
sharp.metadata(that.options, callback);
|
||||
});
|
||||
} else {
|
||||
@@ -966,6 +1099,7 @@ Sharp.prototype.metadata = function(callback) {
|
||||
if (this.options.streamIn) {
|
||||
return new BluebirdPromise(function(resolve, reject) {
|
||||
that.on('finish', function() {
|
||||
that._flattenBufferIn();
|
||||
sharp.metadata(that.options, function(err, metadata) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
@@ -994,13 +1128,15 @@ Sharp.prototype.metadata = function(callback) {
|
||||
Cloned instances share the same input.
|
||||
*/
|
||||
Sharp.prototype.clone = function() {
|
||||
var that = this;
|
||||
// Clone existing options
|
||||
var clone = new Sharp();
|
||||
util._extend(clone.options, this.options);
|
||||
// Pass 'finish' event to clone for Stream-based input
|
||||
this.on('finish', function() {
|
||||
// Clone inherits input data
|
||||
clone.options.bufferIn = this.options.bufferIn;
|
||||
that._flattenBufferIn();
|
||||
clone.options.bufferIn = that.options.bufferIn;
|
||||
clone.emit('finish');
|
||||
});
|
||||
return clone;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
site_name: sharp
|
||||
site_url: http://sharp.dimens.io/
|
||||
repo_url: https://github.com/lovell/sharp
|
||||
site_description: The fastest Node.js module for resizing JPEG, PNG, WebP and TIFF images. Uses the libvips library.
|
||||
site_description: High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images
|
||||
copyright: <a href="https://dimens.io/">dimens.io</a>
|
||||
google_analytics: ['UA-13034748-12', 'sharp.dimens.io']
|
||||
theme: readthedocs
|
||||
|
||||
24
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "sharp",
|
||||
"version": "0.15.0",
|
||||
"version": "0.15.1",
|
||||
"author": "Lovell Fuller <npm@lovell.info>",
|
||||
"contributors": [
|
||||
"Pierre Inglebert <pierre.inglebert@gmail.com>",
|
||||
@@ -22,7 +22,11 @@
|
||||
"John Tobin <john@limelightmobileinc.com>",
|
||||
"Kenton Gray <kentongray@gmail.com>",
|
||||
"Felix Bünemann <Felix.Buenemann@gmail.com>",
|
||||
"Samy Al Zahrani <samyalzahrany@gmail.com>"
|
||||
"Samy Al Zahrani <samyalzahrany@gmail.com>",
|
||||
"Chintan Thakkar <lemnisk8@gmail.com>",
|
||||
"F. Orlando Galashan <frulo@gmx.de>",
|
||||
"Kleis Auke Wolthuizen <info@kleisauke.nl>",
|
||||
"Matt Hirsch <mhirsch@media.mit.edu>"
|
||||
],
|
||||
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images",
|
||||
"scripts": {
|
||||
@@ -54,11 +58,11 @@
|
||||
"vips"
|
||||
],
|
||||
"dependencies": {
|
||||
"bluebird": "^3.3.5",
|
||||
"color": "^0.11.1",
|
||||
"nan": "^2.2.1",
|
||||
"semver": "^5.1.0",
|
||||
"request": "^2.71.0",
|
||||
"bluebird": "^3.4.1",
|
||||
"color": "^0.11.3",
|
||||
"nan": "^2.4.0",
|
||||
"semver": "^5.2.0",
|
||||
"request": "^2.73.0",
|
||||
"tar": "^2.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -67,11 +71,11 @@
|
||||
"coveralls": "^2.11.9",
|
||||
"exif-reader": "^1.0.0",
|
||||
"icc": "^0.0.2",
|
||||
"istanbul": "^0.4.3",
|
||||
"mocha": "^2.4.5",
|
||||
"istanbul": "^0.4.4",
|
||||
"mocha": "^2.5.3",
|
||||
"mocha-jshint": "^2.3.1",
|
||||
"node-cpplint": "^0.4.0",
|
||||
"rimraf": "^2.5.2",
|
||||
"rimraf": "^2.5.3",
|
||||
"unzip": "^0.1.11"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -55,6 +55,9 @@ namespace sharp {
|
||||
bool IsDzZip(std::string const &str) {
|
||||
return EndsWith(str, ".zip") || EndsWith(str, ".ZIP") || EndsWith(str, ".szi") || EndsWith(str, ".SZI");
|
||||
}
|
||||
bool IsV(std::string const &str) {
|
||||
return EndsWith(str, ".v") || EndsWith(str, ".V") || EndsWith(str, ".vips") || EndsWith(str, ".VIPS");
|
||||
}
|
||||
|
||||
/*
|
||||
Provide a string identifier for the given image type.
|
||||
@@ -73,6 +76,7 @@ namespace sharp {
|
||||
case ImageType::OPENSLIDE: id = "openslide"; break;
|
||||
case ImageType::PPM: id = "ppm"; break;
|
||||
case ImageType::FITS: id = "fits"; break;
|
||||
case ImageType::VIPS: id = "v"; break;
|
||||
case ImageType::RAW: id = "raw"; break;
|
||||
case ImageType::UNKNOWN: id = "unknown"; break;
|
||||
}
|
||||
@@ -136,6 +140,8 @@ namespace sharp {
|
||||
imageType = ImageType::PPM;
|
||||
} else if (EndsWith(loader, "Fits")) {
|
||||
imageType = ImageType::FITS;
|
||||
} else if (EndsWith(loader, "Vips")) {
|
||||
imageType = ImageType::VIPS;
|
||||
} else if (EndsWith(loader, "Magick") || EndsWith(loader, "MagickFile")) {
|
||||
imageType = ImageType::MAGICK;
|
||||
}
|
||||
@@ -277,4 +283,63 @@ namespace sharp {
|
||||
return std::make_tuple(left, top);
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate the (left, top) coordinates of the output image
|
||||
within the input image, applying the given x and y offsets.
|
||||
*/
|
||||
std::tuple<int, int> CalculateCrop(int const inWidth, int const inHeight,
|
||||
int const outWidth, int const outHeight, int const x, int const y) {
|
||||
|
||||
// default values
|
||||
int left = 0;
|
||||
int top = 0;
|
||||
|
||||
// assign only if valid
|
||||
if(x >= 0 && x < (inWidth - outWidth)) {
|
||||
left = x;
|
||||
} else if(x >= (inWidth - outWidth)) {
|
||||
left = inWidth - outWidth;
|
||||
}
|
||||
|
||||
if(y >= 0 && y < (inHeight - outHeight)) {
|
||||
top = y;
|
||||
} else if(x >= (inHeight - outHeight)) {
|
||||
top = inHeight - outHeight;
|
||||
}
|
||||
|
||||
// the resulting left and top could have been outside the image after calculation from bottom/right edges
|
||||
if(left < 0) {
|
||||
left = 0;
|
||||
}
|
||||
if(top < 0) {
|
||||
top = 0;
|
||||
}
|
||||
|
||||
return std::make_tuple(left, top);
|
||||
}
|
||||
|
||||
/*
|
||||
Are pixel values in this image 16-bit integer?
|
||||
*/
|
||||
bool Is16Bit(VipsInterpretation const interpretation) {
|
||||
return interpretation == VIPS_INTERPRETATION_RGB16 || interpretation == VIPS_INTERPRETATION_GREY16;
|
||||
}
|
||||
|
||||
/*
|
||||
Return the image alpha maximum. Useful for combining alpha bands. scRGB
|
||||
images are 0 - 1 for image data, but the alpha is 0 - 255.
|
||||
*/
|
||||
double MaximumImageAlpha(VipsInterpretation const interpretation) {
|
||||
return Is16Bit(interpretation) ? 65535.0 : 255.0;
|
||||
}
|
||||
|
||||
/*
|
||||
Get boolean operation type from string
|
||||
*/
|
||||
VipsOperationBoolean GetBooleanOperation(std::string const opStr) {
|
||||
return static_cast<VipsOperationBoolean>(
|
||||
vips_enum_from_nick(nullptr, VIPS_TYPE_OPERATION_BOOLEAN, opStr.data())
|
||||
);
|
||||
}
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
25
src/common.h
@@ -22,6 +22,7 @@ namespace sharp {
|
||||
OPENSLIDE,
|
||||
PPM,
|
||||
FITS,
|
||||
VIPS,
|
||||
RAW,
|
||||
UNKNOWN
|
||||
};
|
||||
@@ -39,6 +40,7 @@ namespace sharp {
|
||||
bool IsTiff(std::string const &str);
|
||||
bool IsDz(std::string const &str);
|
||||
bool IsDzZip(std::string const &str);
|
||||
bool IsV(std::string const &str);
|
||||
|
||||
/*
|
||||
Provide a string identifier for the given image type.
|
||||
@@ -108,6 +110,29 @@ namespace sharp {
|
||||
std::tuple<int, int> CalculateCrop(int const inWidth, int const inHeight,
|
||||
int const outWidth, int const outHeight, int const gravity);
|
||||
|
||||
/*
|
||||
Calculate the (left, top) coordinates of the output image
|
||||
within the input image, applying the given x and y offsets of the output image.
|
||||
*/
|
||||
std::tuple<int, int> CalculateCrop(int const inWidth, int const inHeight,
|
||||
int const outWidth, int const outHeight, int const x, int const y);
|
||||
|
||||
/*
|
||||
Are pixel values in this image 16-bit integer?
|
||||
*/
|
||||
bool Is16Bit(VipsInterpretation const interpretation);
|
||||
|
||||
/*
|
||||
Return the image alpha maximum. Useful for combining alpha bands. scRGB
|
||||
images are 0 - 1 for image data, but the alpha is 0 - 255.
|
||||
*/
|
||||
double MaximumImageAlpha(VipsInterpretation const interpretation);
|
||||
|
||||
/*
|
||||
Get boolean operation type from string
|
||||
*/
|
||||
VipsOperationBoolean GetBooleanOperation(std::string const opStr);
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
#endif // SRC_COMMON_H_
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#include <algorithm>
|
||||
#include <tuple>
|
||||
#include <memory>
|
||||
#include <vips/vips8>
|
||||
|
||||
#include "common.h"
|
||||
@@ -15,6 +16,49 @@ namespace sharp {
|
||||
Assumes alpha channels are already premultiplied and will be unpremultiplied after.
|
||||
*/
|
||||
VImage Composite(VImage src, VImage dst, const int gravity) {
|
||||
if(IsInputValidForComposition(src, dst)) {
|
||||
// Enlarge overlay src, if required
|
||||
if (src.width() < dst.width() || src.height() < dst.height()) {
|
||||
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
|
||||
int left;
|
||||
int top;
|
||||
std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), src.width(), src.height(), gravity);
|
||||
// Embed onto transparent background
|
||||
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
|
||||
src = src.embed(left, top, dst.width(), dst.height(), VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background)
|
||||
);
|
||||
}
|
||||
return CompositeImage(src, dst);
|
||||
}
|
||||
// If the input was not valid for composition the return the input image itself
|
||||
return dst;
|
||||
}
|
||||
|
||||
|
||||
VImage Composite(VImage src, VImage dst, const int x, const int y) {
|
||||
if(IsInputValidForComposition(src, dst)) {
|
||||
// Enlarge overlay src, if required
|
||||
if (src.width() < dst.width() || src.height() < dst.height()) {
|
||||
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
|
||||
int left;
|
||||
int top;
|
||||
std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), src.width(), src.height(), x, y);
|
||||
// Embed onto transparent background
|
||||
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
|
||||
src = src.embed(left, top, dst.width(), dst.height(), VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background)
|
||||
);
|
||||
}
|
||||
return CompositeImage(src, dst);
|
||||
}
|
||||
// If the input was not valid for composition the return the input image itself
|
||||
return dst;
|
||||
}
|
||||
|
||||
bool IsInputValidForComposition(VImage src, VImage dst) {
|
||||
using sharp::CalculateCrop;
|
||||
using sharp::HasAlpha;
|
||||
|
||||
@@ -28,20 +72,10 @@ namespace sharp {
|
||||
throw VError("Overlay image must have same dimensions or smaller");
|
||||
}
|
||||
|
||||
// Enlarge overlay src, if required
|
||||
if (src.width() < dst.width() || src.height() < dst.height()) {
|
||||
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
|
||||
int left;
|
||||
int top;
|
||||
std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), src.width(), src.height(), gravity);
|
||||
// Embed onto transparent background
|
||||
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
|
||||
src = src.embed(left, top, dst.width(), dst.height(), VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background)
|
||||
);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
VImage CompositeImage(VImage src, VImage dst) {
|
||||
// Split src into non-alpha and alpha channels
|
||||
VImage srcWithoutAlpha = src.extract_band(0, VImage::option()->set("n", src.bands() - 1));
|
||||
VImage srcAlpha = src[src.bands() - 1] * (1.0 / 255.0);
|
||||
@@ -81,6 +115,65 @@ namespace sharp {
|
||||
return outRGBPremultiplied.bandjoin(outAlphaNormalized * 255.0);
|
||||
}
|
||||
|
||||
/*
|
||||
Cutout src over dst with given gravity.
|
||||
*/
|
||||
VImage Cutout(VImage mask, VImage dst, const int gravity) {
|
||||
using sharp::CalculateCrop;
|
||||
using sharp::HasAlpha;
|
||||
using sharp::MaximumImageAlpha;
|
||||
|
||||
bool maskHasAlpha = HasAlpha(mask);
|
||||
|
||||
if (!maskHasAlpha && mask.bands() > 1) {
|
||||
throw VError("Overlay image must have an alpha channel or one band");
|
||||
}
|
||||
if (!HasAlpha(dst)) {
|
||||
throw VError("Image to be overlaid must have an alpha channel");
|
||||
}
|
||||
if (mask.width() > dst.width() || mask.height() > dst.height()) {
|
||||
throw VError("Overlay image must have same dimensions or smaller");
|
||||
}
|
||||
|
||||
// Enlarge overlay mask, if required
|
||||
if (mask.width() < dst.width() || mask.height() < dst.height()) {
|
||||
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
|
||||
int left;
|
||||
int top;
|
||||
std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), mask.width(), mask.height(), gravity);
|
||||
// Embed onto transparent background
|
||||
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
|
||||
mask = mask.embed(left, top, dst.width(), dst.height(), VImage::option()
|
||||
->set("extend", VIPS_EXTEND_BACKGROUND)
|
||||
->set("background", background)
|
||||
);
|
||||
}
|
||||
|
||||
// we use the mask alpha if it has alpha
|
||||
if(maskHasAlpha) {
|
||||
mask = mask.extract_band(mask.bands() - 1, VImage::option()->set("n", 1));;
|
||||
}
|
||||
|
||||
// Split dst into an optional alpha
|
||||
VImage dstAlpha = dst.extract_band(dst.bands() - 1, VImage::option()->set("n", 1));
|
||||
|
||||
// we use the dst non-alpha
|
||||
dst = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1));
|
||||
|
||||
// the range of the mask and the image need to match .. one could be
|
||||
// 16-bit, one 8-bit
|
||||
double const dstMax = MaximumImageAlpha(dst.interpretation());
|
||||
double const maskMax = MaximumImageAlpha(mask.interpretation());
|
||||
|
||||
// combine the new mask and the existing alpha ... there are
|
||||
// many ways of doing this, mult is the simplest
|
||||
mask = dstMax * ((mask / maskMax) * (dstAlpha / dstMax));
|
||||
|
||||
// append the mask to the image data ... the mask might be float now,
|
||||
// we must cast the format down to match the image data
|
||||
return dst.bandjoin(mask.cast(dst.format()));
|
||||
}
|
||||
|
||||
/*
|
||||
* Stretch luminance to cover full dynamic range.
|
||||
*/
|
||||
@@ -152,6 +245,26 @@ namespace sharp {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Convolution with a kernel.
|
||||
*/
|
||||
VImage Convolve(VImage image, int const width, int const height,
|
||||
double const scale, double const offset,
|
||||
std::unique_ptr<double[]> const &kernel_v
|
||||
) {
|
||||
VImage kernel = VImage::new_from_memory(
|
||||
kernel_v.get(),
|
||||
width * height * sizeof(double),
|
||||
width,
|
||||
height,
|
||||
1,
|
||||
VIPS_FORMAT_DOUBLE);
|
||||
kernel.set("scale", scale);
|
||||
kernel.set("offset", offset);
|
||||
|
||||
return image.conv(kernel);
|
||||
}
|
||||
|
||||
/*
|
||||
* Sharpen flat and jagged areas. Use sigma of -1.0 for fast sharpen.
|
||||
*/
|
||||
@@ -166,9 +279,13 @@ namespace sharp {
|
||||
return image.conv(sharpen);
|
||||
} else {
|
||||
// Slow, accurate sharpen in LAB colour space, with control over flat vs jagged areas
|
||||
VipsInterpretation colourspaceBeforeSharpen = image.interpretation();
|
||||
if (colourspaceBeforeSharpen == VIPS_INTERPRETATION_RGB) {
|
||||
colourspaceBeforeSharpen = VIPS_INTERPRETATION_sRGB;
|
||||
}
|
||||
return image.sharpen(
|
||||
VImage::option()->set("sigma", sigma)->set("m1", flat)->set("m2", jagged)
|
||||
);
|
||||
).colourspace(colourspaceBeforeSharpen);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -268,4 +385,77 @@ namespace sharp {
|
||||
);
|
||||
}
|
||||
|
||||
VImage Threshold(VImage image, double const threshold, bool const thresholdGrayscale) {
|
||||
if(!thresholdGrayscale) {
|
||||
return image >= threshold;
|
||||
}
|
||||
return image.colourspace(VIPS_INTERPRETATION_B_W) >= threshold;
|
||||
}
|
||||
|
||||
/*
|
||||
Perform boolean/bitwise operation on image color channels - results in one channel image
|
||||
*/
|
||||
VImage Bandbool(VImage image, VipsOperationBoolean const boolean) {
|
||||
return image.bandbool(boolean);
|
||||
}
|
||||
|
||||
/*
|
||||
Perform bitwise boolean operation between images
|
||||
*/
|
||||
VImage Boolean(VImage image, VImage imageR, VipsOperationBoolean const boolean) {
|
||||
return image.boolean(imageR, boolean);
|
||||
}
|
||||
|
||||
VImage Trim(VImage image, int const tolerance) {
|
||||
using sharp::MaximumImageAlpha;
|
||||
// An equivalent of ImageMagick's -trim in C++ ... automatically remove
|
||||
// "boring" image edges.
|
||||
|
||||
// We use .project to sum the rows and columns of a 0/255 mask image, the first
|
||||
// non-zero row or column is the object edge. We make the mask image with an
|
||||
// amount-different-from-background image plus a threshold.
|
||||
|
||||
// find the value of the pixel at (0, 0) ... we will search for all pixels
|
||||
// significantly different from this
|
||||
std::vector<double> background = image(0, 0);
|
||||
|
||||
double const max = MaximumImageAlpha(image.interpretation());
|
||||
|
||||
// we need to smooth the image, subtract the background from every pixel, take
|
||||
// the absolute value of the difference, then threshold
|
||||
VImage mask = (image.median(3) - background).abs() > (max * tolerance / 100);
|
||||
|
||||
// sum mask rows and columns, then search for the first non-zero sum in each
|
||||
// direction
|
||||
VImage rows;
|
||||
VImage columns = mask.project(&rows);
|
||||
|
||||
VImage profileLeftV;
|
||||
VImage profileLeftH = columns.profile(&profileLeftV);
|
||||
|
||||
VImage profileRightV;
|
||||
VImage profileRightH = columns.fliphor().profile(&profileRightV);
|
||||
|
||||
VImage profileTopV;
|
||||
VImage profileTopH = rows.profile(&profileTopV);
|
||||
|
||||
VImage profileBottomV;
|
||||
VImage profileBottomH = rows.flipver().profile(&profileBottomV);
|
||||
|
||||
int left = static_cast<int>(floor(profileLeftV.min()));
|
||||
int right = columns.width() - static_cast<int>(floor(profileRightV.min()));
|
||||
int top = static_cast<int>(floor(profileTopH.min()));
|
||||
int bottom = rows.height() - static_cast<int>(floor(profileBottomH.min()));
|
||||
|
||||
int width = right - left;
|
||||
int height = bottom - top;
|
||||
|
||||
if(width <= 0 || height <= 0) {
|
||||
throw VError("Unexpected error while trimming. Try to lower the tolerance");
|
||||
}
|
||||
|
||||
// and now crop the original image
|
||||
return image.extract_area(left, top, width, height);
|
||||
}
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#define SRC_OPERATIONS_H_
|
||||
|
||||
#include <tuple>
|
||||
#include <memory>
|
||||
#include <vips/vips8>
|
||||
|
||||
using vips::VImage;
|
||||
@@ -14,6 +15,27 @@ namespace sharp {
|
||||
*/
|
||||
VImage Composite(VImage src, VImage dst, const int gravity);
|
||||
|
||||
/*
|
||||
Alpha composite src over dst with given x and y offsets.
|
||||
Assumes alpha channels are already premultiplied and will be unpremultiplied after.
|
||||
*/
|
||||
VImage Composite(VImage src, VImage dst, const int x, const int y);
|
||||
|
||||
/*
|
||||
Check if the src and dst Images for composition operation are valid
|
||||
*/
|
||||
bool IsInputValidForComposition(VImage src, VImage dst);
|
||||
|
||||
/*
|
||||
Given a valid src and dst, returns the composite of the two images
|
||||
*/
|
||||
VImage CompositeImage(VImage src, VImage dst);
|
||||
|
||||
/*
|
||||
Cutout src over dst with given gravity.
|
||||
*/
|
||||
VImage Cutout(VImage src, VImage dst, const int gravity);
|
||||
|
||||
/*
|
||||
* Stretch luminance to cover full dynamic range.
|
||||
*/
|
||||
@@ -29,6 +51,12 @@ namespace sharp {
|
||||
*/
|
||||
VImage Blur(VImage image, double const sigma);
|
||||
|
||||
/*
|
||||
* Convolution with a kernel.
|
||||
*/
|
||||
VImage Convolve(VImage image, int const width, int const height,
|
||||
double const scale, double const offset, std::unique_ptr<double[]> const &kernel_v);
|
||||
|
||||
/*
|
||||
* Sharpen flat and jagged areas. Use sigma of -1.0 for fast sharpen.
|
||||
*/
|
||||
@@ -49,6 +77,26 @@ namespace sharp {
|
||||
*/
|
||||
VImage TileCache(VImage image, double const factor);
|
||||
|
||||
/*
|
||||
Threshold an image
|
||||
*/
|
||||
VImage Threshold(VImage image, double const threshold, bool const thresholdColor);
|
||||
|
||||
/*
|
||||
Perform boolean/bitwise operation on image color channels - results in one channel image
|
||||
*/
|
||||
VImage Bandbool(VImage image, VipsOperationBoolean const boolean);
|
||||
|
||||
/*
|
||||
Perform bitwise boolean operation between images
|
||||
*/
|
||||
VImage Boolean(VImage image, VImage imageR, VipsOperationBoolean const boolean);
|
||||
|
||||
/*
|
||||
Trim an image
|
||||
*/
|
||||
VImage Trim(VImage image, int const tolerance);
|
||||
|
||||
} // namespace sharp
|
||||
|
||||
#endif // SRC_OPERATIONS_H_
|
||||
|
||||
329
src/pipeline.cc
@@ -2,6 +2,8 @@
|
||||
#include <cmath>
|
||||
#include <tuple>
|
||||
#include <utility>
|
||||
#include <memory>
|
||||
#include <numeric>
|
||||
|
||||
#include <vips/vips8>
|
||||
|
||||
@@ -45,12 +47,18 @@ using vips::VOption;
|
||||
using vips::VError;
|
||||
|
||||
using sharp::Composite;
|
||||
using sharp::Cutout;
|
||||
using sharp::Normalize;
|
||||
using sharp::Gamma;
|
||||
using sharp::Blur;
|
||||
using sharp::Convolve;
|
||||
using sharp::Sharpen;
|
||||
using sharp::EntropyCrop;
|
||||
using sharp::TileCache;
|
||||
using sharp::Threshold;
|
||||
using sharp::Bandbool;
|
||||
using sharp::Boolean;
|
||||
using sharp::Trim;
|
||||
|
||||
using sharp::ImageType;
|
||||
using sharp::ImageTypeId;
|
||||
@@ -67,23 +75,30 @@ using sharp::IsWebp;
|
||||
using sharp::IsTiff;
|
||||
using sharp::IsDz;
|
||||
using sharp::IsDzZip;
|
||||
using sharp::IsV;
|
||||
using sharp::FreeCallback;
|
||||
using sharp::CalculateCrop;
|
||||
using sharp::Is16Bit;
|
||||
using sharp::MaximumImageAlpha;
|
||||
using sharp::GetBooleanOperation;
|
||||
|
||||
using sharp::counterProcess;
|
||||
using sharp::counterQueue;
|
||||
|
||||
class PipelineWorker : public AsyncWorker {
|
||||
public:
|
||||
PipelineWorker(Callback *callback, PipelineBaton *baton, Callback *queueListener,
|
||||
const Local<Object> &bufferIn, const Local<Object> &overlayBufferIn) :
|
||||
AsyncWorker(callback), baton(baton), queueListener(queueListener) {
|
||||
if (baton->bufferInLength > 0) {
|
||||
SaveToPersistent("bufferIn", bufferIn);
|
||||
PipelineWorker(
|
||||
Callback *callback, PipelineBaton *baton, Callback *queueListener,
|
||||
std::vector<Local<Object>> const buffersToPersist
|
||||
) : AsyncWorker(callback), baton(baton), queueListener(queueListener), buffersToPersist(buffersToPersist) {
|
||||
// Protect Buffer objects from GC, keyed on index
|
||||
std::accumulate(buffersToPersist.begin(), buffersToPersist.end(), 0,
|
||||
[this](uint32_t index, Local<Object> const buffer) -> uint32_t {
|
||||
SaveToPersistent(index, buffer);
|
||||
return index + 1;
|
||||
}
|
||||
if (baton->overlayBufferInLength > 0) {
|
||||
SaveToPersistent("overlayBufferIn", overlayBufferIn);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
~PipelineWorker() {}
|
||||
|
||||
/*
|
||||
@@ -95,8 +110,8 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Increment processing task counter
|
||||
g_atomic_int_inc(&counterProcess);
|
||||
|
||||
// Latest v2 sRGB ICC profile
|
||||
std::string srgbProfile = baton->iccProfilePath + "sRGB_IEC61966-2-1_black_scaled.icc";
|
||||
// Default sRGB ICC profile from https://packages.debian.org/sid/all/icc-profiles-free/filelist
|
||||
std::string srgbProfile = baton->iccProfilePath + "sRGB.icc";
|
||||
|
||||
// Input
|
||||
ImageType inputImageType = ImageType::UNKNOWN;
|
||||
@@ -202,6 +217,11 @@ class PipelineWorker : public AsyncWorker {
|
||||
RemoveExifOrientation(image);
|
||||
}
|
||||
|
||||
// Trim
|
||||
if(baton->trimTolerance != 0) {
|
||||
image = Trim(image, baton->trimTolerance);
|
||||
}
|
||||
|
||||
// Pre extraction
|
||||
if (baton->topOffsetPre != -1) {
|
||||
image = image.extract_area(baton->leftOffsetPre, baton->topOffsetPre, baton->widthPre, baton->heightPre);
|
||||
@@ -219,34 +239,46 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Scaling calculations
|
||||
double xfactor = 1.0;
|
||||
double yfactor = 1.0;
|
||||
int targetResizeWidth = baton->width;
|
||||
int targetResizeHeight = baton->height;
|
||||
if (baton->width > 0 && baton->height > 0) {
|
||||
// Fixed width and height
|
||||
xfactor = static_cast<double>(inputWidth) / (static_cast<double>(baton->width) + 0.1);
|
||||
yfactor = static_cast<double>(inputHeight) / (static_cast<double>(baton->height) + 0.1);
|
||||
xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width);
|
||||
yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height);
|
||||
switch (baton->canvas) {
|
||||
case Canvas::CROP:
|
||||
xfactor = std::min(xfactor, yfactor);
|
||||
yfactor = xfactor;
|
||||
if (xfactor < yfactor) {
|
||||
targetResizeHeight = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
yfactor = xfactor;
|
||||
} else {
|
||||
targetResizeWidth = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
xfactor = yfactor;
|
||||
}
|
||||
break;
|
||||
case Canvas::EMBED:
|
||||
xfactor = std::max(xfactor, yfactor);
|
||||
yfactor = xfactor;
|
||||
if (xfactor > yfactor) {
|
||||
targetResizeHeight = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
yfactor = xfactor;
|
||||
} else {
|
||||
targetResizeWidth = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
xfactor = yfactor;
|
||||
}
|
||||
break;
|
||||
case Canvas::MAX:
|
||||
if (xfactor > yfactor) {
|
||||
baton->height = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
targetResizeHeight = baton->height = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
yfactor = xfactor;
|
||||
} else {
|
||||
baton->width = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
targetResizeWidth = baton->width = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
xfactor = yfactor;
|
||||
}
|
||||
break;
|
||||
case Canvas::MIN:
|
||||
if (xfactor < yfactor) {
|
||||
baton->height = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
targetResizeHeight = baton->height = static_cast<int>(round(static_cast<double>(inputHeight) / xfactor));
|
||||
yfactor = xfactor;
|
||||
} else {
|
||||
baton->width = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
targetResizeWidth = baton->width = static_cast<int>(round(static_cast<double>(inputWidth) / yfactor));
|
||||
xfactor = yfactor;
|
||||
}
|
||||
break;
|
||||
@@ -259,23 +291,23 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
} else if (baton->width > 0) {
|
||||
// Fixed width
|
||||
xfactor = static_cast<double>(inputWidth) / (static_cast<double>(baton->width) + 0.1);
|
||||
xfactor = static_cast<double>(inputWidth) / static_cast<double>(baton->width);
|
||||
if (baton->canvas == Canvas::IGNORE_ASPECT) {
|
||||
baton->height = inputHeight;
|
||||
targetResizeHeight = baton->height = inputHeight;
|
||||
} else {
|
||||
// Auto height
|
||||
yfactor = xfactor;
|
||||
baton->height = static_cast<int>(round(static_cast<double>(inputHeight) / yfactor));
|
||||
targetResizeHeight = baton->height = static_cast<int>(round(static_cast<double>(inputHeight) / yfactor));
|
||||
}
|
||||
} else if (baton->height > 0) {
|
||||
// Fixed height
|
||||
yfactor = static_cast<double>(inputHeight) / (static_cast<double>(baton->height) + 0.1);
|
||||
yfactor = static_cast<double>(inputHeight) / static_cast<double>(baton->height);
|
||||
if (baton->canvas == Canvas::IGNORE_ASPECT) {
|
||||
baton->width = inputWidth;
|
||||
targetResizeWidth = baton->width = inputWidth;
|
||||
} else {
|
||||
// Auto width
|
||||
xfactor = yfactor;
|
||||
baton->width = static_cast<int>(round(static_cast<double>(inputWidth) / xfactor));
|
||||
targetResizeWidth = baton->width = static_cast<int>(round(static_cast<double>(inputWidth) / xfactor));
|
||||
}
|
||||
} else {
|
||||
// Identity transform
|
||||
@@ -371,8 +403,8 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Ignore failure of embedded profile
|
||||
}
|
||||
} else if (image.interpretation() == VIPS_INTERPRETATION_CMYK) {
|
||||
// Convert to sRGB using default "USWebCoatedSWOP" CMYK profile
|
||||
std::string cmykProfile = baton->iccProfilePath + "USWebCoatedSWOP.icc";
|
||||
// Convert to sRGB using default CMYK profile from http://www.argyllcms.com/cmyk.icm
|
||||
std::string cmykProfile = baton->iccProfilePath + "cmyk.icm";
|
||||
image = image.icc_transform(const_cast<char*>(srgbProfile.data()), VImage::option()
|
||||
->set("input_profile", cmykProfile.data())
|
||||
->set("intent", VIPS_INTENT_PERCEPTUAL)
|
||||
@@ -380,13 +412,12 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
|
||||
// Calculate maximum alpha value based on input image pixel depth
|
||||
bool is16Bit = (image.format() == VIPS_FORMAT_USHORT);
|
||||
double maxAlpha = is16Bit ? 65535.0 : 255.0;
|
||||
double const maxAlpha = MaximumImageAlpha(image.interpretation());
|
||||
|
||||
// Flatten image to remove alpha channel
|
||||
if (baton->flatten && HasAlpha(image)) {
|
||||
// Scale up 8-bit values to match 16-bit input image
|
||||
double multiplier = (image.interpretation() == VIPS_INTERPRETATION_RGB16) ? 256.0 : 1.0;
|
||||
double const multiplier = Is16Bit(image.interpretation()) ? 256.0 : 1.0;
|
||||
// Background colour
|
||||
std::vector<double> background {
|
||||
baton->background[0] * multiplier,
|
||||
@@ -429,26 +460,20 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Swap input output width and height when rotating by 90 or 270 degrees
|
||||
std::swap(shrunkWidth, shrunkHeight);
|
||||
}
|
||||
xresidual = static_cast<double>(baton->width) / static_cast<double>(shrunkWidth);
|
||||
yresidual = static_cast<double>(baton->height) / static_cast<double>(shrunkHeight);
|
||||
if (baton->canvas == Canvas::EMBED) {
|
||||
xresidual = std::min(xresidual, yresidual);
|
||||
yresidual = xresidual;
|
||||
} else if (baton->canvas == Canvas::IGNORE_ASPECT) {
|
||||
if (!baton->rotateBeforePreExtract &&
|
||||
(rotation == VIPS_ANGLE_D90 || rotation == VIPS_ANGLE_D270)) {
|
||||
std::swap(xresidual, yresidual);
|
||||
}
|
||||
} else {
|
||||
xresidual = std::max(xresidual, yresidual);
|
||||
yresidual = xresidual;
|
||||
xresidual = static_cast<double>(targetResizeWidth) / static_cast<double>(shrunkWidth);
|
||||
yresidual = static_cast<double>(targetResizeHeight) / static_cast<double>(shrunkHeight);
|
||||
if (
|
||||
!baton->rotateBeforePreExtract &&
|
||||
(rotation == VIPS_ANGLE_D90 || rotation == VIPS_ANGLE_D270)
|
||||
) {
|
||||
std::swap(xresidual, yresidual);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure image has an alpha channel when there is an overlay
|
||||
bool hasOverlay = baton->overlayBufferInLength > 0 || !baton->overlayFileIn.empty();
|
||||
if (hasOverlay && !HasAlpha(image)) {
|
||||
double multiplier = (image.interpretation() == VIPS_INTERPRETATION_RGB16) ? 256.0 : 1.0;
|
||||
double const multiplier = Is16Bit(image.interpretation()) ? 256.0 : 1.0;
|
||||
image = image.bandjoin(
|
||||
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier)
|
||||
);
|
||||
@@ -456,10 +481,11 @@ class PipelineWorker : public AsyncWorker {
|
||||
|
||||
bool shouldAffineTransform = xresidual != 1.0 || yresidual != 1.0;
|
||||
bool shouldBlur = baton->blurSigma != 0.0;
|
||||
bool shouldConv = baton->convKernelWidth * baton->convKernelHeight > 0;
|
||||
bool shouldSharpen = baton->sharpenSigma != 0.0;
|
||||
bool shouldThreshold = baton->threshold != 0;
|
||||
bool shouldCutout = baton->overlayCutout;
|
||||
bool shouldPremultiplyAlpha = HasAlpha(image) &&
|
||||
(shouldAffineTransform || shouldBlur || shouldSharpen || hasOverlay);
|
||||
(shouldAffineTransform || shouldBlur || shouldConv || shouldSharpen || (hasOverlay && !shouldCutout));
|
||||
|
||||
// Premultiply image alpha channel before all transformations to avoid
|
||||
// dark fringing around bright pixels
|
||||
@@ -527,7 +553,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (image.width() != baton->width || image.height() != baton->height) {
|
||||
if (baton->canvas == Canvas::EMBED) {
|
||||
// Scale up 8-bit values to match 16-bit input image
|
||||
double multiplier = (image.interpretation() == VIPS_INTERPRETATION_RGB16) ? 256.0 : 1.0;
|
||||
double const multiplier = Is16Bit(image.interpretation()) ? 256.0 : 1.0;
|
||||
// Create background colour
|
||||
std::vector<double> background;
|
||||
if (image.bands() > 2) {
|
||||
@@ -590,7 +616,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
// Extend edges
|
||||
if (baton->extendTop > 0 || baton->extendBottom > 0 || baton->extendLeft > 0 || baton->extendRight > 0) {
|
||||
// Scale up 8-bit values to match 16-bit input image
|
||||
const double multiplier = (image.interpretation() == VIPS_INTERPRETATION_RGB16) ? 256.0 : 1.0;
|
||||
double const multiplier = Is16Bit(image.interpretation()) ? 256.0 : 1.0;
|
||||
// Create background colour
|
||||
std::vector<double> background {
|
||||
baton->background[0] * multiplier,
|
||||
@@ -598,19 +624,26 @@ class PipelineWorker : public AsyncWorker {
|
||||
baton->background[2] * multiplier
|
||||
};
|
||||
// Add alpha channel to background colour
|
||||
if (HasAlpha(image)) {
|
||||
if (baton->background[3] < 255.0 || HasAlpha(image)) {
|
||||
background.push_back(baton->background[3] * multiplier);
|
||||
}
|
||||
// Add non-transparent alpha channel, if required
|
||||
if (baton->background[3] < 255.0 && !HasAlpha(image)) {
|
||||
image = image.bandjoin(
|
||||
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier)
|
||||
);
|
||||
}
|
||||
// Embed
|
||||
baton->width = image.width() + baton->extendLeft + baton->extendRight;
|
||||
baton->height = image.height() + baton->extendTop + baton->extendBottom;
|
||||
|
||||
image = image.embed(baton->extendLeft, baton->extendTop, baton->width, baton->height,
|
||||
VImage::option()->set("extend", VIPS_EXTEND_BACKGROUND)->set("background", background));
|
||||
}
|
||||
|
||||
// Threshold - must happen before blurring, due to the utility of blurring after thresholding
|
||||
if (shouldThreshold) {
|
||||
image = image.colourspace(VIPS_INTERPRETATION_B_W) >= baton->threshold;
|
||||
if (baton->threshold != 0) {
|
||||
image = Threshold(image, baton->threshold, baton->thresholdGrayscale);
|
||||
}
|
||||
|
||||
// Blur
|
||||
@@ -618,6 +651,15 @@ class PipelineWorker : public AsyncWorker {
|
||||
image = Blur(image, baton->blurSigma);
|
||||
}
|
||||
|
||||
// Convolve
|
||||
if (shouldConv) {
|
||||
image = Convolve(image,
|
||||
baton->convKernelWidth, baton->convKernelHeight,
|
||||
baton->convKernelScale, baton->convKernelOffset,
|
||||
baton->convKernel
|
||||
);
|
||||
}
|
||||
|
||||
// Sharpen
|
||||
if (shouldSharpen) {
|
||||
image = Sharpen(image, baton->sharpenSigma, baton->sharpenFlat, baton->sharpenJagged);
|
||||
@@ -657,17 +699,65 @@ class PipelineWorker : public AsyncWorker {
|
||||
if (overlayImageType == ImageType::UNKNOWN) {
|
||||
return Error();
|
||||
}
|
||||
// Ensure overlay is premultiplied sRGB
|
||||
overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB).premultiply();
|
||||
// Composite images with given gravity
|
||||
image = Composite(overlayImage, image, baton->overlayGravity);
|
||||
// Check if overlay is tiled
|
||||
if (baton->overlayTile) {
|
||||
int overlayImageWidth = overlayImage.width();
|
||||
int overlayImageHeight = overlayImage.height();
|
||||
int across = 0;
|
||||
int down = 0;
|
||||
|
||||
// use gravity in ovelay
|
||||
if(overlayImageWidth <= baton->width) {
|
||||
across = static_cast<int>(ceil(static_cast<double>(image.width()) / overlayImageWidth));
|
||||
}
|
||||
if(overlayImageHeight <= baton->height) {
|
||||
down = static_cast<int>(ceil(static_cast<double>(image.height()) / overlayImageHeight));
|
||||
}
|
||||
if(across != 0 || down != 0) {
|
||||
int left;
|
||||
int top;
|
||||
overlayImage = overlayImage.replicate(across, down);
|
||||
|
||||
if(baton->overlayXOffset >= 0 && baton->overlayYOffset >= 0) {
|
||||
// the overlayX/YOffsets will now be used to CalculateCrop for extract_area
|
||||
std::tie(left, top) = CalculateCrop(
|
||||
overlayImage.width(), overlayImage.height(), image.width(), image.height(),
|
||||
baton->overlayXOffset, baton->overlayYOffset
|
||||
);
|
||||
} else {
|
||||
// the overlayGravity will now be used to CalculateCrop for extract_area
|
||||
std::tie(left, top) = CalculateCrop(
|
||||
overlayImage.width(), overlayImage.height(), image.width(), image.height(), baton->overlayGravity
|
||||
);
|
||||
}
|
||||
overlayImage = overlayImage.extract_area(
|
||||
left, top, image.width(), image.height()
|
||||
);
|
||||
}
|
||||
// the overlayGravity was used for extract_area, therefore set it back to its default value of 0
|
||||
baton->overlayGravity = 0;
|
||||
}
|
||||
if(shouldCutout) {
|
||||
// 'cut out' the image, premultiplication is not required
|
||||
image = Cutout(overlayImage, image, baton->overlayGravity);
|
||||
} else {
|
||||
// Ensure overlay is premultiplied sRGB
|
||||
overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB).premultiply();
|
||||
if(baton->overlayXOffset >= 0 && baton->overlayYOffset >= 0) {
|
||||
// Composite images with given offsets
|
||||
image = Composite(overlayImage, image, baton->overlayXOffset, baton->overlayYOffset);
|
||||
} else {
|
||||
// Composite images with given gravity
|
||||
image = Composite(overlayImage, image, baton->overlayGravity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reverse premultiplication after all transformations:
|
||||
if (shouldPremultiplyAlpha) {
|
||||
image = image.unpremultiply(VImage::option()->set("max_alpha", maxAlpha));
|
||||
// Cast pixel values to integer
|
||||
if (is16Bit) {
|
||||
if (Is16Bit(image.interpretation())) {
|
||||
image = image.cast(VIPS_FORMAT_USHORT);
|
||||
} else {
|
||||
image = image.cast(VIPS_FORMAT_UCHAR);
|
||||
@@ -685,7 +775,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
|
||||
// Convert image to sRGB, if not already
|
||||
if (image.interpretation() == VIPS_INTERPRETATION_RGB16) {
|
||||
if (Is16Bit(image.interpretation())) {
|
||||
image = image.cast(VIPS_FORMAT_USHORT);
|
||||
}
|
||||
if (image.interpretation() != VIPS_INTERPRETATION_sRGB) {
|
||||
@@ -698,6 +788,58 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
}
|
||||
|
||||
// Apply bitwise boolean operation between images
|
||||
if (baton->booleanOp != VIPS_OPERATION_BOOLEAN_LAST &&
|
||||
(baton->booleanBufferInLength > 0 || !baton->booleanFileIn.empty())) {
|
||||
VImage booleanImage;
|
||||
ImageType booleanImageType = ImageType::UNKNOWN;
|
||||
if (baton->booleanBufferInLength > 0) {
|
||||
// Buffer input for boolean operation
|
||||
booleanImageType = DetermineImageType(baton->booleanBufferIn, baton->booleanBufferInLength);
|
||||
if (booleanImageType != ImageType::UNKNOWN) {
|
||||
try {
|
||||
booleanImage = VImage::new_from_buffer(baton->booleanBufferIn, baton->booleanBufferInLength,
|
||||
nullptr, VImage::option()->set("access", baton->accessMethod));
|
||||
} catch (...) {
|
||||
(baton->err).append("Boolean operation buffer has corrupt header");
|
||||
booleanImageType = ImageType::UNKNOWN;
|
||||
}
|
||||
} else {
|
||||
(baton->err).append("Boolean operation buffer contains unsupported image format");
|
||||
}
|
||||
} else if (!baton->booleanFileIn.empty()) {
|
||||
// File input for boolean operation
|
||||
booleanImageType = DetermineImageType(baton->booleanFileIn.data());
|
||||
if (booleanImageType != ImageType::UNKNOWN) {
|
||||
try {
|
||||
booleanImage = VImage::new_from_file(baton->booleanFileIn.data(),
|
||||
VImage::option()->set("access", baton->accessMethod));
|
||||
} catch (...) {
|
||||
(baton->err).append("Boolean operation file has corrupt header");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (booleanImageType == ImageType::UNKNOWN) {
|
||||
return Error();
|
||||
}
|
||||
// Apply the boolean operation
|
||||
image = Boolean(image, booleanImage, baton->booleanOp);
|
||||
}
|
||||
|
||||
// Apply per-channel Bandbool bitwise operations after all other operations
|
||||
if (baton->bandBoolOp >= VIPS_OPERATION_BOOLEAN_AND && baton->bandBoolOp < VIPS_OPERATION_BOOLEAN_LAST) {
|
||||
image = Bandbool(image, baton->bandBoolOp);
|
||||
}
|
||||
|
||||
// Extract an image channel (aka vips band)
|
||||
if(baton->extractChannel > -1) {
|
||||
if(baton->extractChannel >= image.bands()) {
|
||||
(baton->err).append("Cannot extract channel from image. Too few channels in image.");
|
||||
return Error();
|
||||
}
|
||||
image = image.extract_band(baton->extractChannel);
|
||||
}
|
||||
|
||||
// Override EXIF Orientation tag
|
||||
if (baton->withMetadata && baton->withMetadataOrientation != -1) {
|
||||
SetExifOrientation(image, baton->withMetadataOrientation);
|
||||
@@ -788,7 +930,9 @@ class PipelineWorker : public AsyncWorker {
|
||||
bool isTiff = IsTiff(baton->fileOut);
|
||||
bool isDz = IsDz(baton->fileOut);
|
||||
bool isDzZip = IsDzZip(baton->fileOut);
|
||||
bool matchInput = baton->formatOut == "input" && !(isJpeg || isPng || isWebp || isTiff || isDz || isDzZip);
|
||||
bool isV = IsV(baton->fileOut);
|
||||
bool matchInput = baton->formatOut == "input" &&
|
||||
!(isJpeg || isPng || isWebp || isTiff || isDz || isDzZip || isV);
|
||||
if (baton->formatOut == "jpeg" || isJpeg || (matchInput && inputImageType == ImageType::JPEG)) {
|
||||
// Write JPEG to file
|
||||
image.jpegsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
@@ -842,6 +986,12 @@ class PipelineWorker : public AsyncWorker {
|
||||
->set("layout", baton->tileLayout)
|
||||
);
|
||||
baton->formatOut = "dz";
|
||||
} else if (baton->formatOut == "v" || isV || (matchInput && inputImageType == ImageType::VIPS)) {
|
||||
// Write V to file
|
||||
image.vipssave(const_cast<char*>(baton->fileOut.data()), VImage::option()
|
||||
->set("strip", !baton->withMetadata)
|
||||
);
|
||||
baton->formatOut = "v";
|
||||
} else {
|
||||
// Unsupported output format
|
||||
(baton->err).append("Unsupported output format " + baton->fileOut);
|
||||
@@ -899,12 +1049,12 @@ class PipelineWorker : public AsyncWorker {
|
||||
}
|
||||
|
||||
// Dispose of Persistent wrapper around input Buffers so they can be garbage collected
|
||||
if (baton->bufferInLength > 0) {
|
||||
GetFromPersistent("bufferIn");
|
||||
}
|
||||
if (baton->overlayBufferInLength > 0) {
|
||||
GetFromPersistent("overlayBufferIn");
|
||||
}
|
||||
std::accumulate(buffersToPersist.begin(), buffersToPersist.end(), 0,
|
||||
[this](uint32_t index, Local<Object> const buffer) -> uint32_t {
|
||||
GetFromPersistent(index);
|
||||
return index + 1;
|
||||
}
|
||||
);
|
||||
delete baton;
|
||||
|
||||
// Decrement processing task counter
|
||||
@@ -920,6 +1070,7 @@ class PipelineWorker : public AsyncWorker {
|
||||
private:
|
||||
PipelineBaton *baton;
|
||||
Callback *queueListener;
|
||||
std::vector<Local<Object>> buffersToPersist;
|
||||
|
||||
/*
|
||||
Calculate the angle of rotation and need-to-flip for the output image.
|
||||
@@ -983,6 +1134,9 @@ NAN_METHOD(pipeline) {
|
||||
PipelineBaton *baton = new PipelineBaton;
|
||||
Local<Object> options = info[0].As<Object>();
|
||||
|
||||
// Input Buffers must not undergo GC compaction during processing
|
||||
std::vector<Local<Object>> buffersToPersist;
|
||||
|
||||
// Input filename
|
||||
baton->fileIn = attrAsStr(options, "fileIn");
|
||||
baton->accessMethod = attrAs<bool>(options, "sequentialRead") ?
|
||||
@@ -993,6 +1147,7 @@ NAN_METHOD(pipeline) {
|
||||
bufferIn = Get(options, New("bufferIn").ToLocalChecked()).ToLocalChecked().As<Object>();
|
||||
baton->bufferInLength = node::Buffer::Length(bufferIn);
|
||||
baton->bufferIn = node::Buffer::Data(bufferIn);
|
||||
buffersToPersist.push_back(bufferIn);
|
||||
}
|
||||
// ICC profile to use when input CMYK image has no embedded profile
|
||||
baton->iccProfilePath = attrAsStr(options, "iccProfilePath");
|
||||
@@ -1041,8 +1196,22 @@ NAN_METHOD(pipeline) {
|
||||
overlayBufferIn = Get(options, New("overlayBufferIn").ToLocalChecked()).ToLocalChecked().As<Object>();
|
||||
baton->overlayBufferInLength = node::Buffer::Length(overlayBufferIn);
|
||||
baton->overlayBufferIn = node::Buffer::Data(overlayBufferIn);
|
||||
buffersToPersist.push_back(overlayBufferIn);
|
||||
}
|
||||
baton->overlayGravity = attrAs<int32_t>(options, "overlayGravity");
|
||||
baton->overlayXOffset = attrAs<int32_t>(options, "overlayXOffset");
|
||||
baton->overlayYOffset = attrAs<int32_t>(options, "overlayYOffset");
|
||||
baton->overlayTile = attrAs<bool>(options, "overlayTile");
|
||||
baton->overlayCutout = attrAs<bool>(options, "overlayCutout");
|
||||
// Boolean options
|
||||
baton->booleanFileIn = attrAsStr(options, "booleanFileIn");
|
||||
Local<Object> booleanBufferIn;
|
||||
if (node::Buffer::HasInstance(Get(options, New("booleanBufferIn").ToLocalChecked()).ToLocalChecked())) {
|
||||
booleanBufferIn = Get(options, New("booleanBufferIn").ToLocalChecked()).ToLocalChecked().As<Object>();
|
||||
baton->booleanBufferInLength = node::Buffer::Length(booleanBufferIn);
|
||||
baton->booleanBufferIn = node::Buffer::Data(booleanBufferIn);
|
||||
buffersToPersist.push_back(booleanBufferIn);
|
||||
}
|
||||
// Resize options
|
||||
baton->withoutEnlargement = attrAs<bool>(options, "withoutEnlargement");
|
||||
baton->crop = attrAs<int32_t>(options, "crop");
|
||||
@@ -1056,6 +1225,11 @@ NAN_METHOD(pipeline) {
|
||||
baton->sharpenFlat = attrAs<double>(options, "sharpenFlat");
|
||||
baton->sharpenJagged = attrAs<double>(options, "sharpenJagged");
|
||||
baton->threshold = attrAs<int32_t>(options, "threshold");
|
||||
baton->thresholdGrayscale = attrAs<bool>(options, "thresholdGrayscale");
|
||||
baton->trimTolerance = attrAs<int32_t>(options, "trimTolerance");
|
||||
if(baton->accessMethod == VIPS_ACCESS_SEQUENTIAL && baton->trimTolerance != 0) {
|
||||
baton->accessMethod = VIPS_ACCESS_RANDOM;
|
||||
}
|
||||
baton->gamma = attrAs<double>(options, "gamma");
|
||||
baton->greyscale = attrAs<bool>(options, "greyscale");
|
||||
baton->normalize = attrAs<bool>(options, "normalize");
|
||||
@@ -1067,6 +1241,7 @@ NAN_METHOD(pipeline) {
|
||||
baton->extendBottom = attrAs<int32_t>(options, "extendBottom");
|
||||
baton->extendLeft = attrAs<int32_t>(options, "extendLeft");
|
||||
baton->extendRight = attrAs<int32_t>(options, "extendRight");
|
||||
baton->extractChannel = attrAs<int32_t>(options, "extractChannel");
|
||||
// Output options
|
||||
baton->progressive = attrAs<bool>(options, "progressive");
|
||||
baton->quality = attrAs<int32_t>(options, "quality");
|
||||
@@ -1098,6 +1273,26 @@ NAN_METHOD(pipeline) {
|
||||
} else {
|
||||
baton->tileLayout = VIPS_FOREIGN_DZ_LAYOUT_DZ;
|
||||
}
|
||||
// Convolution Kernel
|
||||
if(Has(options, New("convKernel").ToLocalChecked()).FromJust()) {
|
||||
Local<Object> kernel = Get(options, New("convKernel").ToLocalChecked()).ToLocalChecked().As<Object>();
|
||||
baton->convKernelWidth = attrAs<uint32_t>(kernel, "width");
|
||||
baton->convKernelHeight = attrAs<uint32_t>(kernel, "height");
|
||||
baton->convKernelScale = attrAs<double>(kernel, "scale");
|
||||
baton->convKernelOffset = attrAs<double>(kernel, "offset");
|
||||
|
||||
size_t const kernelSize = static_cast<size_t>(baton->convKernelWidth * baton->convKernelHeight);
|
||||
baton->convKernel = std::unique_ptr<double[]>(new double[kernelSize]);
|
||||
Local<Array> kdata = Get(kernel, New("kernel").ToLocalChecked()).ToLocalChecked().As<Array>();
|
||||
for(unsigned int i = 0; i < kernelSize; i++) {
|
||||
baton->convKernel[i] = To<double>(Get(kdata, i).ToLocalChecked()).FromJust();
|
||||
}
|
||||
}
|
||||
// Bandbool operation
|
||||
baton->bandBoolOp = GetBooleanOperation(attrAsStr(options, "bandBoolOp"));
|
||||
|
||||
// Boolean operation
|
||||
baton->booleanOp = GetBooleanOperation(attrAsStr(options, "booleanOp"));
|
||||
|
||||
// Function to notify of queue length changes
|
||||
Callback *queueListener = new Callback(
|
||||
@@ -1106,7 +1301,7 @@ NAN_METHOD(pipeline) {
|
||||
|
||||
// Join queue for worker thread
|
||||
Callback *callback = new Callback(info[1].As<Function>());
|
||||
AsyncQueueWorker(new PipelineWorker(callback, baton, queueListener, bufferIn, overlayBufferIn));
|
||||
AsyncQueueWorker(new PipelineWorker(callback, baton, queueListener, buffersToPersist));
|
||||
|
||||
// Increment queued task counter
|
||||
g_atomic_int_inc(&counterQueue);
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#ifndef SRC_PIPELINE_H_
|
||||
#define SRC_PIPELINE_H_
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include <vips/vips8>
|
||||
|
||||
#include "nan.h"
|
||||
@@ -33,6 +35,13 @@ struct PipelineBaton {
|
||||
char *overlayBufferIn;
|
||||
size_t overlayBufferInLength;
|
||||
int overlayGravity;
|
||||
int overlayXOffset;
|
||||
int overlayYOffset;
|
||||
bool overlayTile;
|
||||
bool overlayCutout;
|
||||
std::string booleanFileIn;
|
||||
char *booleanBufferIn;
|
||||
size_t booleanBufferInLength;
|
||||
int topOffsetPre;
|
||||
int leftOffsetPre;
|
||||
int widthPre;
|
||||
@@ -56,6 +65,8 @@ struct PipelineBaton {
|
||||
double sharpenFlat;
|
||||
double sharpenJagged;
|
||||
int threshold;
|
||||
bool thresholdGrayscale;
|
||||
int trimTolerance;
|
||||
double gamma;
|
||||
bool greyscale;
|
||||
bool normalize;
|
||||
@@ -80,6 +91,14 @@ struct PipelineBaton {
|
||||
std::string err;
|
||||
bool withMetadata;
|
||||
int withMetadataOrientation;
|
||||
std::unique_ptr<double[]> convKernel;
|
||||
int convKernelWidth;
|
||||
int convKernelHeight;
|
||||
double convKernelScale;
|
||||
double convKernelOffset;
|
||||
VipsOperationBoolean bandBoolOp;
|
||||
VipsOperationBoolean booleanOp;
|
||||
int extractChannel;
|
||||
int tileSize;
|
||||
int tileOverlap;
|
||||
VipsForeignDzContainer tileContainer;
|
||||
@@ -97,6 +116,11 @@ struct PipelineBaton {
|
||||
bufferOutLength(0),
|
||||
overlayBufferInLength(0),
|
||||
overlayGravity(0),
|
||||
overlayXOffset(-1),
|
||||
overlayYOffset(-1),
|
||||
overlayTile(false),
|
||||
overlayCutout(false),
|
||||
booleanBufferInLength(0),
|
||||
topOffsetPre(-1),
|
||||
topOffsetPost(-1),
|
||||
channels(0),
|
||||
@@ -109,6 +133,8 @@ struct PipelineBaton {
|
||||
sharpenFlat(1.0),
|
||||
sharpenJagged(2.0),
|
||||
threshold(0),
|
||||
thresholdGrayscale(true),
|
||||
trimTolerance(0),
|
||||
gamma(0.0),
|
||||
greyscale(false),
|
||||
normalize(false),
|
||||
@@ -130,6 +156,13 @@ struct PipelineBaton {
|
||||
optimiseScans(false),
|
||||
withMetadata(false),
|
||||
withMetadataOrientation(-1),
|
||||
convKernelWidth(0),
|
||||
convKernelHeight(0),
|
||||
convKernelScale(0.0),
|
||||
convKernelOffset(0.0),
|
||||
bandBoolOp(VIPS_OPERATION_BOOLEAN_LAST),
|
||||
booleanOp(VIPS_OPERATION_BOOLEAN_LAST),
|
||||
extractChannel(-1),
|
||||
tileSize(256),
|
||||
tileOverlap(0),
|
||||
tileContainer(VIPS_FOREIGN_DZ_CONTAINER_FS),
|
||||
|
||||
@@ -139,7 +139,7 @@ NAN_METHOD(format) {
|
||||
// Which load/save operations are available for each compressed format?
|
||||
Local<Object> format = New<Object>();
|
||||
for (std::string f : {
|
||||
"jpeg", "png", "webp", "tiff", "magick", "openslide", "dz", "ppm", "fits", "gif", "svg", "pdf"
|
||||
"jpeg", "png", "webp", "tiff", "magick", "openslide", "dz", "ppm", "fits", "gif", "svg", "pdf", "v"
|
||||
}) {
|
||||
// Input
|
||||
Local<Boolean> hasInputFile =
|
||||
|
||||
BIN
test/fixtures/bandbool.png
vendored
Normal file
|
After Width: | Height: | Size: 10 KiB |
BIN
test/fixtures/booleanTest.jpg
vendored
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
test/fixtures/expected/addAlphaChanelBeforeExtend.png
vendored
Normal file
|
After Width: | Height: | Size: 644 KiB |
BIN
test/fixtures/expected/alpha-layer-1-fill-trim-resize.png
vendored
Normal file
|
After Width: | Height: | Size: 71 KiB |
BIN
test/fixtures/expected/bandbool_and_result.png
vendored
Normal file
|
After Width: | Height: | Size: 4.8 KiB |
BIN
test/fixtures/expected/bandbool_eor_result.png
vendored
Normal file
|
After Width: | Height: | Size: 7.6 KiB |
BIN
test/fixtures/expected/bandbool_or_result.png
vendored
Normal file
|
After Width: | Height: | Size: 4.7 KiB |
BIN
test/fixtures/expected/boolean_and_result.jpg
vendored
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
test/fixtures/expected/boolean_eor_result.jpg
vendored
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
test/fixtures/expected/boolean_or_result.jpg
vendored
Normal file
|
After Width: | Height: | Size: 17 KiB |
BIN
test/fixtures/expected/conv-1.png
vendored
Normal file
|
After Width: | Height: | Size: 807 B |
BIN
test/fixtures/expected/conv-2.png
vendored
Normal file
|
After Width: | Height: | Size: 806 B |
BIN
test/fixtures/expected/crop-entropy.jpg
vendored
|
Before Width: | Height: | Size: 8.5 KiB After Width: | Height: | Size: 8.5 KiB |
BIN
test/fixtures/expected/embed-16bit.png
vendored
|
Before Width: | Height: | Size: 999 B After Width: | Height: | Size: 789 B |
BIN
test/fixtures/expected/extract-blue.jpg
vendored
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
test/fixtures/expected/extract-green.jpg
vendored
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
test/fixtures/expected/extract-red.jpg
vendored
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
test/fixtures/expected/extract.jpg
vendored
|
Before Width: | Height: | Size: 348 B After Width: | Height: | Size: 329 B |
BIN
test/fixtures/expected/gamma-0.0.jpg
vendored
|
Before Width: | Height: | Size: 621 B After Width: | Height: | Size: 1.2 KiB |
BIN
test/fixtures/expected/overlay-cutout-gravity-center.jpg
vendored
Normal file
|
After Width: | Height: | Size: 795 B |
BIN
test/fixtures/expected/overlay-cutout-gravity-centre.jpg
vendored
Normal file
|
After Width: | Height: | Size: 795 B |
BIN
test/fixtures/expected/overlay-cutout-gravity-east.jpg
vendored
Normal file
|
After Width: | Height: | Size: 692 B |
BIN
test/fixtures/expected/overlay-cutout-gravity-north.jpg
vendored
Normal file
|
After Width: | Height: | Size: 745 B |
BIN
test/fixtures/expected/overlay-cutout-gravity-northeast.jpg
vendored
Normal file
|
After Width: | Height: | Size: 633 B |
BIN
test/fixtures/expected/overlay-cutout-gravity-northwest.jpg
vendored
Normal file
|
After Width: | Height: | Size: 725 B |
BIN
test/fixtures/expected/overlay-cutout-gravity-south.jpg
vendored
Normal file
|
After Width: | Height: | Size: 823 B |
BIN
test/fixtures/expected/overlay-cutout-gravity-southeast.jpg
vendored
Normal file
|
After Width: | Height: | Size: 745 B |
BIN
test/fixtures/expected/overlay-cutout-gravity-southwest.jpg
vendored
Normal file
|
After Width: | Height: | Size: 743 B |
BIN
test/fixtures/expected/overlay-cutout-gravity-west.jpg
vendored
Normal file
|
After Width: | Height: | Size: 745 B |
BIN
test/fixtures/expected/overlay-cutout-rotated90-gravity-northwest.jpg
vendored
Normal file
|
After Width: | Height: | Size: 699 B |
BIN
test/fixtures/expected/overlay-cutout-rotated90.jpg
vendored
Normal file
|
After Width: | Height: | Size: 842 B |
BIN
test/fixtures/expected/overlay-offset-0.jpg
vendored
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
test/fixtures/expected/overlay-offset-with-gravity-tile.jpg
vendored
Normal file
|
After Width: | Height: | Size: 64 KiB |
BIN
test/fixtures/expected/overlay-offset-with-gravity.jpg
vendored
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
test/fixtures/expected/overlay-offset-with-tile.jpg
vendored
Normal file
|
After Width: | Height: | Size: 64 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-center.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.1 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-centre.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.1 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-east.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.1 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-north.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.0 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-northeast.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.1 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-northwest.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.0 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-south.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.0 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-southeast.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.1 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-southwest.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.0 KiB |
BIN
test/fixtures/expected/overlay-tile-gravity-west.jpg
vendored
Normal file
|
After Width: | Height: | Size: 3.1 KiB |
BIN
test/fixtures/expected/overlay-tile-rotated90-gravity-northwest.jpg
vendored
Normal file
|
After Width: | Height: | Size: 4.4 KiB |
BIN
test/fixtures/expected/overlay-tile-rotated90.jpg
vendored
Normal file
|
After Width: | Height: | Size: 4.4 KiB |
BIN
test/fixtures/expected/overlay-valid-offsets-10-10.jpg
vendored
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
test/fixtures/expected/overlay-valid-offsets-100-300.jpg
vendored
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
test/fixtures/expected/overlay-very-large-offset.jpg
vendored
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
test/fixtures/expected/sharpen-rgba.png
vendored
Normal file
|
After Width: | Height: | Size: 7.1 KiB |
BIN
test/fixtures/expected/threshold-color-128.jpg
vendored
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
test/fixtures/expected/trim-16bit-rgba.png
vendored
Normal file
|
After Width: | Height: | Size: 2.6 KiB |
BIN
test/fixtures/expected/vfile.jpg
vendored
Normal file
|
After Width: | Height: | Size: 1.7 KiB |
9
test/fixtures/index.js
vendored
@@ -79,6 +79,7 @@ module.exports = {
|
||||
inputPngOverlayLayer2LowAlpha: getPath('alpha-layer-2-ink-low-alpha.png'),
|
||||
inputPngAlphaPremultiplicationSmall: getPath('alpha-premultiply-1024x768-paper.png'),
|
||||
inputPngAlphaPremultiplicationLarge: getPath('alpha-premultiply-2048x1536-paper.png'),
|
||||
inputPngBooleanNoAlpha: getPath('bandbool.png'),
|
||||
|
||||
inputWebP: getPath('4.webp'), // http://www.gstatic.com/webp/gallery/4.webp
|
||||
inputWebPWithTransparency: getPath('5_webp_a.webp'), // http://www.gstatic.com/webp/gallery3/5_webp_a.webp
|
||||
@@ -91,9 +92,17 @@ module.exports = {
|
||||
|
||||
inputJPGBig: getPath('flowers.jpeg'),
|
||||
|
||||
inputPngStripesV: getPath('stripesV.png'),
|
||||
inputPngStripesH: getPath('stripesH.png'),
|
||||
|
||||
inputJpgBooleanTest: getPath('booleanTest.jpg'),
|
||||
|
||||
inputV: getPath('vfile.v'),
|
||||
|
||||
outputJpg: getPath('output.jpg'),
|
||||
outputPng: getPath('output.png'),
|
||||
outputWebP: getPath('output.webp'),
|
||||
outputV: getPath('output.v'),
|
||||
outputZoinks: getPath('output.zoinks'), // an 'unknown' file extension
|
||||
|
||||
// Path for tests requiring human inspection
|
||||
|
||||
BIN
test/fixtures/stripesH.png
vendored
Normal file
|
After Width: | Height: | Size: 502 B |
BIN
test/fixtures/stripesV.png
vendored
Normal file
|
After Width: | Height: | Size: 624 B |
BIN
test/fixtures/vfile.v
vendored
Normal file
39
test/unit/bandbool.js
Normal file
@@ -0,0 +1,39 @@
|
||||
'use strict';
|
||||
|
||||
var assert = require('assert');
|
||||
var fixtures = require('../fixtures');
|
||||
var sharp = require('../../index');
|
||||
|
||||
describe('Bandbool per-channel boolean operations', function() {
|
||||
|
||||
[
|
||||
sharp.bool.and,
|
||||
sharp.bool.or,
|
||||
sharp.bool.eor
|
||||
]
|
||||
.forEach(function(op) {
|
||||
it(op + ' operation', function(done) {
|
||||
sharp(fixtures.inputPngBooleanNoAlpha)
|
||||
.bandbool(op)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(200, info.width);
|
||||
assert.strictEqual(200, info.height);
|
||||
assert.strictEqual(1, info.channels);
|
||||
fixtures.assertSimilar(fixtures.expected('bandbool_' + op + '_result.png'), data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Invalid operation', function() {
|
||||
assert.throws(function() {
|
||||
sharp().bandbool('fail');
|
||||
});
|
||||
});
|
||||
|
||||
it('Missing operation', function() {
|
||||
assert.throws(function() {
|
||||
sharp().bandbool();
|
||||
});
|
||||
});
|
||||
});
|
||||
62
test/unit/boolean.js
Normal file
@@ -0,0 +1,62 @@
|
||||
'use strict';
|
||||
|
||||
var fs = require('fs');
|
||||
var assert = require('assert');
|
||||
var fixtures = require('../fixtures');
|
||||
var sharp = require('../../index');
|
||||
|
||||
describe('Boolean operation between two images', function() {
|
||||
|
||||
var inputJpgBooleanTestBuffer = fs.readFileSync(fixtures.inputJpgBooleanTest);
|
||||
|
||||
[
|
||||
sharp.bool.and,
|
||||
sharp.bool.or,
|
||||
sharp.bool.eor
|
||||
]
|
||||
.forEach(function(op) {
|
||||
|
||||
it(op + ' operation, file', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.boolean(fixtures.inputJpgBooleanTest, op)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('boolean_' + op + '_result.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it(op + ' operation, buffer', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.boolean(inputJpgBooleanTestBuffer, op)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('boolean_' + op + '_result.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('Invalid operation', function() {
|
||||
assert.throws(function() {
|
||||
sharp().boolean(fixtures.inputJpgBooleanTest, 'fail');
|
||||
});
|
||||
});
|
||||
|
||||
it('Invalid operation, non-string', function() {
|
||||
assert.throws(function() {
|
||||
sharp().boolean(fixtures.inputJpgBooleanTest, null);
|
||||
});
|
||||
});
|
||||
|
||||
it('Missing input', function() {
|
||||
assert.throws(function() {
|
||||
sharp().boolean();
|
||||
});
|
||||
});
|
||||
});
|
||||
82
test/unit/convolve.js
Normal file
@@ -0,0 +1,82 @@
|
||||
'use strict';
|
||||
|
||||
var assert = require('assert');
|
||||
|
||||
var sharp = require('../../index');
|
||||
var fixtures = require('../fixtures');
|
||||
|
||||
describe('Convolve', function() {
|
||||
|
||||
it('specific convolution kernel 1', function(done) {
|
||||
sharp(fixtures.inputPngStripesV)
|
||||
.resize(320, 240)
|
||||
.convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 3,
|
||||
'scale': 50,
|
||||
'offset': 0,
|
||||
'kernel': [ 10, 20, 10,
|
||||
0, 0, 0,
|
||||
10, 20, 10 ]
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('conv-1.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('specific convolution kernel 2', function(done) {
|
||||
sharp(fixtures.inputPngStripesH)
|
||||
.resize(320, 240)
|
||||
.convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 3,
|
||||
'kernel': [ 1, 0, 1,
|
||||
2, 0, 2,
|
||||
1, 0, 1 ]
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('conv-2.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid kernel specification: no data', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 3,
|
||||
'kernel': []
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid kernel specification: bad data format', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 3,
|
||||
'kernel': [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid kernel specification: wrong width', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).convolve(
|
||||
{
|
||||
'width': 3,
|
||||
'height': 4,
|
||||
'kernel': [1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -49,4 +49,16 @@ describe('Extend', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('should add alpha channel before extending with a transparent Background', function( done ){
|
||||
sharp(fixtures.inputJpgWithLandscapeExif1)
|
||||
.background({r: 0, g: 0, b: 0, a: 0})
|
||||
.toFormat( sharp.format.png )
|
||||
.extend({top: 0, bottom: 10, left: 0, right: 10})
|
||||
.toBuffer( function(err, data, info){
|
||||
assert.strictEqual(610, info.width);
|
||||
assert.strictEqual(460, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('addAlphaChanelBeforeExtend.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -14,7 +14,7 @@ describe('Partial image extraction', function() {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(20, info.width);
|
||||
assert.strictEqual(20, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('extract.jpg'), data, done);
|
||||
fixtures.assertSimilar(fixtures.expected('extract.jpg'), data, { threshold: 8 }, done);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -175,5 +175,14 @@ describe('Partial image extraction', function() {
|
||||
sharp(fixtures.inputJpg).extract({ left: 10, top: 10, width: 10, height: null });
|
||||
});
|
||||
});
|
||||
|
||||
it('Bad image area', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.extract({ left: 3000, top: 10, width: 10, height: 10 })
|
||||
.toBuffer(function(err) {
|
||||
assert(err instanceof Error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
72
test/unit/extractChannel.js
Normal file
@@ -0,0 +1,72 @@
|
||||
'use strict';
|
||||
|
||||
var assert = require('assert');
|
||||
|
||||
var sharp = require('../../index');
|
||||
var fixtures = require('../fixtures');
|
||||
|
||||
describe('Image channel extraction', function() {
|
||||
|
||||
it('Red channel', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.extractChannel('red')
|
||||
.resize(320,240)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('extract-red.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Green channel', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.extractChannel('green')
|
||||
.resize(320,240)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('extract-green.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Blue channel', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.extractChannel('blue')
|
||||
.resize(320,240)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('extract-blue.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Blue channel by number', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.extractChannel(2)
|
||||
.resize(320,240)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('extract-blue.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Invalid channel number', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg)
|
||||
.extractChannel(-1);
|
||||
});
|
||||
});
|
||||
|
||||
it('No arguments', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg)
|
||||
.extractChannel();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@@ -781,6 +781,37 @@ describe('Input/output', function() {
|
||||
});
|
||||
}
|
||||
|
||||
if (sharp.format.v.input.file) {
|
||||
it("Load Vips V file", function(done) {
|
||||
sharp(fixtures.inputV)
|
||||
.jpeg()
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(70, info.width);
|
||||
assert.strictEqual(60, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('vfile.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (sharp.format.v.output.file) {
|
||||
it("Save Vips V file", function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.extract({left: 910, top: 1105, width: 70, height: 60})
|
||||
.toFile(fixtures.outputV, function(err, info) {
|
||||
if(err) throw err;
|
||||
assert.strictEqual(true, info.size > 0);
|
||||
assert.strictEqual('v', info.format);
|
||||
assert.strictEqual(70, info.width);
|
||||
assert.strictEqual(60, info.height);
|
||||
fs.unlinkSync(fixtures.outputV);
|
||||
done();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (sharp.format.raw.output.buffer) {
|
||||
describe('Ouput raw, uncompressed image data', function() {
|
||||
it('1 channel greyscale image', function(done) {
|
||||
|
||||
@@ -303,7 +303,9 @@ describe('Image metadata', function() {
|
||||
assert.strictEqual(true, metadata.icc instanceof Buffer);
|
||||
var profile = icc.parse(metadata.icc);
|
||||
assert.strictEqual('object', typeof profile);
|
||||
assert.strictEqual('sRGB IEC61966-2-1 black scaled', profile.description);
|
||||
assert.strictEqual('RGB', profile.colorSpace);
|
||||
assert.strictEqual('Perceptual', profile.intent);
|
||||
assert.strictEqual('Monitor', profile.deviceClass);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -242,4 +242,290 @@ describe('Overlays', function() {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Overlay with tile enabled and gravity', function() {
|
||||
Object.keys(sharp.gravity).forEach(function(gravity) {
|
||||
it(gravity, function(done) {
|
||||
var expected = fixtures.expected('overlay-tile-gravity-' + gravity + '.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(80)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
tile: true,
|
||||
gravity: gravity
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(65, info.height);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Overlay with top-left offsets", function() {
|
||||
it('Overlay with 10px top & 10px left offsets', function(done) {
|
||||
var expected = fixtures.expected('overlay-valid-offsets-10-10.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
top: 10,
|
||||
left: 10
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('Overlay with 100px top & 300px left offsets', function(done) {
|
||||
var expected = fixtures.expected('overlay-valid-offsets-100-300.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
top: 100,
|
||||
left: 300
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('Overlay with only top offset', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
top: 1000
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Overlay with only left offset', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
left: 1000
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Overlay with negative offsets', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
top: -1000,
|
||||
left: -1000
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Overlay with 0 offset', function(done) {
|
||||
var expected = fixtures.expected('overlay-offset-0.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
top: 0,
|
||||
left: 0
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('Overlay with offset and gravity', function(done) {
|
||||
var expected = fixtures.expected('overlay-offset-with-gravity.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
left: 10,
|
||||
top: 10,
|
||||
gravity : 4
|
||||
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('Overlay with offset and gravity and tile', function(done) {
|
||||
var expected = fixtures.expected('overlay-offset-with-gravity-tile.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
left: 10,
|
||||
top: 10,
|
||||
gravity : 4,
|
||||
tile: true
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('Overlay with offset and tile', function(done) {
|
||||
var expected = fixtures.expected('overlay-offset-with-tile.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
left: 10,
|
||||
top: 10,
|
||||
tile: true
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Overlay with invalid cutout option', function() {
|
||||
assert.throws(function() {
|
||||
sharp().overlayWith('ignore', { cutout: 1 });
|
||||
});
|
||||
});
|
||||
|
||||
it('Overlay with invalid tile option', function() {
|
||||
assert.throws(function() {
|
||||
sharp().overlayWith('ignore', { tile: 1 });
|
||||
});
|
||||
});
|
||||
|
||||
it('Overlay with very large offset', function(done) {
|
||||
var expected = fixtures.expected('overlay-very-large-offset.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(400)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
left: 10000,
|
||||
top: 10000
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
it('With tile enabled and image rotated 90 degrees', function(done) {
|
||||
var expected = fixtures.expected('overlay-tile-rotated90.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.rotate(90)
|
||||
.resize(80)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
tile: true
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(98, info.height);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('With tile enabled and image rotated 90 degrees and gravity northwest', function(done) {
|
||||
var expected = fixtures.expected('overlay-tile-rotated90-gravity-northwest.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.rotate(90)
|
||||
.resize(80)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
tile: true,
|
||||
gravity: 'northwest'
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(98, info.height);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Overlay with cutout enabled and gravity', function() {
|
||||
Object.keys(sharp.gravity).forEach(function(gravity) {
|
||||
it(gravity, function(done) {
|
||||
var expected = fixtures.expected('overlay-cutout-gravity-' + gravity + '.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(80)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
cutout: true,
|
||||
gravity: gravity
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(65, info.height);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('With cutout enabled and image rotated 90 degrees', function(done) {
|
||||
var expected = fixtures.expected('overlay-cutout-rotated90.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.rotate(90)
|
||||
.resize(80)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
cutout: true
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(98, info.height);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('With cutout enabled and image rotated 90 degrees and gravity northwest', function(done) {
|
||||
var expected = fixtures.expected('overlay-cutout-rotated90-gravity-northwest.jpg');
|
||||
sharp(fixtures.inputJpg)
|
||||
.rotate(90)
|
||||
.resize(80)
|
||||
.overlayWith(fixtures.inputPngWithTransparency16bit, {
|
||||
cutout: true,
|
||||
gravity: 'northwest'
|
||||
})
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(80, info.width);
|
||||
assert.strictEqual(98, info.height);
|
||||
assert.strictEqual(3, info.channels);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -12,6 +12,7 @@ describe('Sharpen', function() {
|
||||
.resize(320, 240)
|
||||
.sharpen(6)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
@@ -24,6 +25,7 @@ describe('Sharpen', function() {
|
||||
.resize(320, 240)
|
||||
.sharpen(1.5, 0.5, 2.5)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
@@ -36,6 +38,7 @@ describe('Sharpen', function() {
|
||||
.resize(320, 240)
|
||||
.sharpen(3.5, 2, 4)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
@@ -43,11 +46,26 @@ describe('Sharpen', function() {
|
||||
});
|
||||
});
|
||||
|
||||
it('specific radius/levels with alpha channel', function(done) {
|
||||
sharp(fixtures.inputPngWithTransparency)
|
||||
.resize(320, 240)
|
||||
.sharpen(5, 4, 8)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(4, info.channels);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('sharpen-rgba.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('mild sharpen', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.sharpen()
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
@@ -78,6 +96,7 @@ describe('Sharpen', function() {
|
||||
.resize(320, 240)
|
||||
.sharpen(false)
|
||||
.toBuffer(function(err, notSharpened, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, notSharpened.length > 0);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
@@ -86,6 +105,7 @@ describe('Sharpen', function() {
|
||||
.resize(320, 240)
|
||||
.sharpen(true)
|
||||
.toBuffer(function(err, sharpened, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, sharpened.length > 0);
|
||||
assert.strictEqual(true, sharpened.length > notSharpened.length);
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
|
||||
@@ -42,7 +42,7 @@ describe('Threshold', function() {
|
||||
});
|
||||
});
|
||||
|
||||
it('threshold true (=128)', function(done) {
|
||||
it('threshold true (=128)', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.threshold(true)
|
||||
@@ -54,6 +54,26 @@ describe('Threshold', function() {
|
||||
});
|
||||
});
|
||||
|
||||
it('threshold false (=0)', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.threshold(false)
|
||||
.toBuffer(function(err, data, info) {
|
||||
fixtures.assertSimilar(fixtures.inputJpg, data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('threshold grayscale: true (=128)', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.threshold(128, { grayscale: true } )
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('threshold-128.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('threshold default jpeg', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
@@ -101,15 +121,27 @@ describe('Threshold', function() {
|
||||
});
|
||||
}
|
||||
|
||||
it('color threshold', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.threshold(128,{'grayscale':false})
|
||||
.toBuffer(function(err, data, info) {
|
||||
assert.strictEqual('jpeg', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
fixtures.assertSimilar(fixtures.expected('threshold-color-128.jpg'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid threshold -1', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).threshold(-1);
|
||||
sharp().threshold(-1);
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid threshold 256', function() {
|
||||
assert.throws(function() {
|
||||
sharp(fixtures.inputJpg).threshold(256);
|
||||
sharp().threshold(256);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
48
test/unit/trim.js
Normal file
@@ -0,0 +1,48 @@
|
||||
'use strict';
|
||||
|
||||
var assert = require('assert');
|
||||
|
||||
var sharp = require('../../index');
|
||||
var fixtures = require('../fixtures');
|
||||
|
||||
describe('Trim borders', function() {
|
||||
|
||||
it('Threshold default', function(done) {
|
||||
var expected = fixtures.expected('alpha-layer-1-fill-trim-resize.png');
|
||||
sharp(fixtures.inputPngOverlayLayer1)
|
||||
.resize(450, 322)
|
||||
.trim()
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(450, info.width);
|
||||
assert.strictEqual(322, info.height);
|
||||
fixtures.assertSimilar(expected, data, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('16-bit PNG with alpha channel', function(done) {
|
||||
sharp(fixtures.inputPngWithTransparency16bit)
|
||||
.resize(32, 32)
|
||||
.trim(20)
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('png', info.format);
|
||||
assert.strictEqual(32, info.width);
|
||||
assert.strictEqual(32, info.height);
|
||||
assert.strictEqual(4, info.channels);
|
||||
fixtures.assertSimilar(fixtures.expected('trim-16bit-rgba.png'), data, done);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid thresholds', function() {
|
||||
[-1, 100, 'fail', {}].forEach(function(threshold) {
|
||||
it(threshold, function() {
|
||||
assert.throws(function() {
|
||||
sharp().trim(threshold);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||