Expose libvips affine operation (#2336)

This commit is contained in:
Guillermo Varela 2020-11-16 12:27:38 +00:00 committed by GitHub
parent 2872602c9e
commit 0f473fe3b1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 463 additions and 25 deletions

View File

@ -65,6 +65,61 @@ The use of `flop` implies the removal of the EXIF `Orientation` tag, if any.
Returns **Sharp**
## affine
Perform an affine transform on an image. This operation will always occur after resizing, extraction and rotation, if any.
You must provide an array of length 4 or a 2x2 affine transformation matrix.
By default, new pixels are filled with a black background. You can provide a background color with the `background` option.
A particular interpolator may also be specified. Set the `interpolator` option to an attribute of the `sharp.interpolator` Object e.g. `sharp.interpolator.nohalo`.
In the case of a 2x2 matrix, the transform is:
- X = `matrix[0, 0]` \* (x + `idx`) + `matrix[0, 1]` \* (y + `idy`) + `odx`
- Y = `matrix[1, 0]` \* (x + `idx`) + `matrix[1, 1]` \* (y + `idy`) + `ody`
where:
- x and y are the coordinates in input image.
- X and Y are the coordinates in output image.
- (0,0) is the upper left corner.
### Parameters
- `matrix` **([Array][7]<[Array][7]<[number][1]>> | [Array][7]<[number][1]>)** affine transformation matrix
- `options` **[Object][2]?** if present, is an Object with optional attributes.
- `options.background` **([String][3] \| [Object][2])** parsed by the [color][4] module to extract values for red, green, blue and alpha. (optional, default `"#000000"`)
- `options.idx` **[Number][1]** input horizontal offset (optional, default `0`)
- `options.idy` **[Number][1]** input vertical offset (optional, default `0`)
- `options.odx` **[Number][1]** output horizontal offset (optional, default `0`)
- `options.ody` **[Number][1]** output vertical offset (optional, default `0`)
- `options.interpolator` **[String][3]** interpolator (optional, default `sharp.interpolators.bicubic`)
### Examples
```javascript
const pipeline = sharp()
.affine([[1, 0.3], [0.1, 0.7]], {
background: 'white',
interpolate: sharp.interpolators.nohalo
})
.toBuffer((err, outputBuffer, info) => {
// outputBuffer contains the transformed image
// info.width and info.height contain the new dimensions
});
inputStream
.pipe(pipeline);
```
- Throws **[Error][5]** Invalid parameters
Returns **Sharp**
**Meta**
- **since**: 0.27.0
## sharpen
Sharpen the image.

View File

@ -12,6 +12,36 @@ console.log(sharp.format);
Returns **[Object][1]**
## interpolators
An Object containing the available interpolators and their proper values
Type: [string][2]
### nearest
[Nearest neighbour interpolation][3]. Suitable for image enlargement only.
### bilinear
[Bilinear interpolation][4]. Faster than bicubic but with less smooth results.
### bicubic
[Bicubic interpolation][5] (the default).
### locallyBoundedBicubic
[LBB interpolation][6]. Prevents some "[acutance][7]" but typically reduces performance by a factor of 2.
### nohalo
[Nohalo interpolation][8]. Prevents acutance but typically reduces performance by a factor of 3.
### vertexSplitQuadraticBasisSpline
[VSQBS interpolation][9]. Prevents "staircasing" when enlarging.
## versions
An Object containing the version numbers of libvips and its dependencies.
@ -31,10 +61,10 @@ useful for determining how much working memory is required for a particular task
### Parameters
- `options` **([Object][1] \| [boolean][2])** Object with the following attributes, or boolean where true uses default cache settings and false removes all caching (optional, default `true`)
- `options.memory` **[number][3]** is the maximum memory in MB to use for this cache (optional, default `50`)
- `options.files` **[number][3]** is the maximum number of files to hold open (optional, default `20`)
- `options.items` **[number][3]** is the maximum number of operations to cache (optional, default `100`)
- `options` **([Object][1] \| [boolean][10])** Object with the following attributes, or boolean where true uses default cache settings and false removes all caching (optional, default `true`)
- `options.memory` **[number][11]** is the maximum memory in MB to use for this cache (optional, default `50`)
- `options.files` **[number][11]** is the maximum number of files to hold open (optional, default `20`)
- `options.items` **[number][11]** is the maximum number of operations to cache (optional, default `100`)
### Examples
@ -64,7 +94,7 @@ This method always returns the current concurrency.
### Parameters
- `concurrency` **[number][3]?**
- `concurrency` **[number][11]?**
### Examples
@ -74,7 +104,7 @@ sharp.concurrency(2); // 2
sharp.concurrency(0); // 4
```
Returns **[number][3]** concurrency
Returns **[number][11]** concurrency
## queue
@ -116,7 +146,7 @@ by taking advantage of the SIMD vector unit of the CPU, e.g. Intel SSE and ARM N
### Parameters
- `simd` **[boolean][2]** (optional, default `true`)
- `simd` **[boolean][10]** (optional, default `true`)
### Examples
@ -130,10 +160,26 @@ const simd = sharp.simd(false);
// prevent libvips from using liborc at runtime
```
Returns **[boolean][2]**
Returns **[boolean][10]**
[1]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[3]: http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation
[4]: http://en.wikipedia.org/wiki/Bilinear_interpolation
[5]: http://en.wikipedia.org/wiki/Bicubic_interpolation
[6]: https://github.com/jcupitt/libvips/blob/master/libvips/resample/lbb.cpp#L100
[7]: http://en.wikipedia.org/wiki/Acutance
[8]: http://eprints.soton.ac.uk/268086/
[9]: https://github.com/jcupitt/libvips/blob/master/libvips/resample/vsqbs.cpp#L48
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[11]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number

File diff suppressed because one or more lines are too long

View File

@ -155,6 +155,13 @@ const Sharp = function (input, options) {
extendRight: 0,
extendBackground: [0, 0, 0, 255],
withoutEnlargement: false,
affineMatrix: [],
affineBackground: [0, 0, 0, 255],
affineIdx: 0,
affineIdy: 0,
affineOdx: 0,
affineOdy: 0,
affineInterpolator: this.constructor.interpolators.bilinear,
kernel: 'lanczos3',
fastShrinkOnLoad: true,
// operations

View File

@ -1,5 +1,6 @@
'use strict';
const { flatten: flattenArray } = require('array-flatten');
const color = require('color');
const is = require('./is');
@ -82,6 +83,103 @@ function flop (flop) {
return this;
}
/**
* Perform an affine transform on an image. This operation will always occur after resizing, extraction and rotation, if any.
*
* You must provide an array of length 4 or a 2x2 affine transformation matrix.
* By default, new pixels are filled with a black background. You can provide a background color with the `background` option.
* A particular interpolator may also be specified. Set the `interpolator` option to an attribute of the `sharp.interpolator` Object e.g. `sharp.interpolator.nohalo`.
*
* In the case of a 2x2 matrix, the transform is:
* - X = `matrix[0, 0]` \* (x + `idx`) + `matrix[0, 1]` \* (y + `idy`) + `odx`
* - Y = `matrix[1, 0]` \* (x + `idx`) + `matrix[1, 1]` \* (y + `idy`) + `ody`
*
* where:
* - x and y are the coordinates in input image.
* - X and Y are the coordinates in output image.
* - (0,0) is the upper left corner.
*
* @since 0.27.0
*
* @example
* const pipeline = sharp()
* .affine([[1, 0.3], [0.1, 0.7]], {
* background: 'white',
* interpolate: sharp.interpolators.nohalo
* })
* .toBuffer((err, outputBuffer, info) => {
* // outputBuffer contains the transformed image
* // info.width and info.height contain the new dimensions
* });
*
* inputStream
* .pipe(pipeline);
*
* @param {Array<Array<number>>|Array<number>} matrix - affine transformation matrix
* @param {Object} [options] - if present, is an Object with optional attributes.
* @param {String|Object} [options.background="#000000"] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @param {Number} [options.idx=0] - input horizontal offset
* @param {Number} [options.idy=0] - input vertical offset
* @param {Number} [options.odx=0] - output horizontal offset
* @param {Number} [options.ody=0] - output vertical offset
* @param {String} [options.interpolator=sharp.interpolators.bicubic] - interpolator
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
function affine (matrix, options) {
const flatMatrix = flattenArray(matrix);
if (flatMatrix.length === 4 && flatMatrix.every(is.number)) {
this.options.affineMatrix = flatMatrix;
} else {
throw is.invalidParameterError('matrix', '1x4 or 2x2 array', matrix);
}
if (is.defined(options)) {
if (is.object(options)) {
this._setBackgroundColourOption('affineBackground', options.background);
if (is.defined(options.idx)) {
if (is.number(options.idx)) {
this.options.affineIdx = options.idx;
} else {
throw is.invalidParameterError('options.idx', 'number', options.idx);
}
}
if (is.defined(options.idy)) {
if (is.number(options.idy)) {
this.options.affineIdy = options.idy;
} else {
throw is.invalidParameterError('options.idy', 'number', options.idy);
}
}
if (is.defined(options.odx)) {
if (is.number(options.odx)) {
this.options.affineOdx = options.odx;
} else {
throw is.invalidParameterError('options.odx', 'number', options.odx);
}
}
if (is.defined(options.ody)) {
if (is.number(options.ody)) {
this.options.affineOdy = options.ody;
} else {
throw is.invalidParameterError('options.ody', 'number', options.ody);
}
}
if (is.defined(options.interpolator)) {
if (is.inArray(options.interpolator, Object.values(this.constructor.interpolators))) {
this.options.affineInterpolator = options.interpolator;
} else {
throw is.invalidParameterError('options.interpolator', 'valid interpolator name', options.interpolator);
}
}
} else {
throw is.invalidParameterError('options', 'object', options);
}
}
return this;
}
/**
* Sharpen the image.
* When used without parameters, performs a fast, mild sharpen of the output image.
@ -482,6 +580,7 @@ module.exports = function (Sharp) {
rotate,
flip,
flop,
affine,
sharpen,
median,
blur,

View File

@ -13,6 +13,26 @@ const sharp = require('../build/Release/sharp.node');
*/
const format = sharp.format();
/**
* An Object containing the available interpolators and their proper values
* @readonly
* @enum {string}
*/
const interpolators = {
/** [Nearest neighbour interpolation](http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation). Suitable for image enlargement only. */
nearest: 'nearest',
/** [Bilinear interpolation](http://en.wikipedia.org/wiki/Bilinear_interpolation). Faster than bicubic but with less smooth results. */
bilinear: 'bilinear',
/** [Bicubic interpolation](http://en.wikipedia.org/wiki/Bicubic_interpolation) (the default). */
bicubic: 'bicubic',
/** [LBB interpolation](https://github.com/jcupitt/libvips/blob/master/libvips/resample/lbb.cpp#L100). Prevents some "[acutance](http://en.wikipedia.org/wiki/Acutance)" but typically reduces performance by a factor of 2. */
locallyBoundedBicubic: 'lbb',
/** [Nohalo interpolation](http://eprints.soton.ac.uk/268086/). Prevents acutance but typically reduces performance by a factor of 3. */
nohalo: 'nohalo',
/** [VSQBS interpolation](https://github.com/jcupitt/libvips/blob/master/libvips/resample/vsqbs.cpp#L48). Prevents "staircasing" when enlarging. */
vertexSplitQuadraticBasisSpline: 'vsqbs'
};
/**
* An Object containing the version numbers of libvips and its dependencies.
* @member
@ -146,6 +166,7 @@ module.exports = function (Sharp) {
Sharp[f.name] = f;
});
Sharp.format = format;
Sharp.interpolators = interpolators;
Sharp.versions = versions;
Sharp.queue = queue;
};

View File

@ -69,7 +69,8 @@
"Edward Silverton <e.silverton@gmail.com>",
"Roman Malieiev <aromaleev@gmail.com>",
"Tomas Szabo <tomas.szabo@deftomat.com>",
"Robert O'Rourke <robert@o-rourke.org>"
"Robert O'Rourke <robert@o-rourke.org>",
"Guillermo Alfonso Varela Chouciño <guillevch@gmail.com>"
],
"scripts": {
"install": "(node install/libvips && node install/dll-copy && prebuild-install) || (node-gyp rebuild && node install/dll-copy)",
@ -112,6 +113,7 @@
"vips"
],
"dependencies": {
"array-flatten": "^3.0.0",
"color": "^3.1.2",
"detect-libc": "^1.0.3",
"node-addon-api": "^3.0.2",

View File

@ -54,13 +54,13 @@ namespace sharp {
bool AttrAsBool(Napi::Object obj, std::string attr) {
return obj.Get(attr).As<Napi::Boolean>().Value();
}
std::vector<double> AttrAsRgba(Napi::Object obj, std::string attr) {
Napi::Array background = obj.Get(attr).As<Napi::Array>();
std::vector<double> rgba(background.Length());
for (unsigned int i = 0; i < background.Length(); i++) {
rgba[i] = AttrAsDouble(background, i);
std::vector<double> AttrAsVectorOfDouble(Napi::Object obj, std::string attr) {
Napi::Array napiArray = obj.Get(attr).As<Napi::Array>();
std::vector<double> vectorOfDouble(napiArray.Length());
for (unsigned int i = 0; i < napiArray.Length(); i++) {
vectorOfDouble[i] = AttrAsDouble(napiArray, i);
}
return rgba;
return vectorOfDouble;
}
std::vector<int32_t> AttrAsInt32Vector(Napi::Object obj, std::string attr) {
Napi::Array array = obj.Get(attr).As<Napi::Array>();
@ -109,7 +109,7 @@ namespace sharp {
descriptor->createChannels = AttrAsUint32(input, "createChannels");
descriptor->createWidth = AttrAsUint32(input, "createWidth");
descriptor->createHeight = AttrAsUint32(input, "createHeight");
descriptor->createBackground = AttrAsRgba(input, "createBackground");
descriptor->createBackground = AttrAsVectorOfDouble(input, "createBackground");
}
// Limit input images to a given number of pixels, where pixels = width * height
descriptor->limitInputPixels = AttrAsUint32(input, "limitInputPixels");

View File

@ -92,7 +92,7 @@ namespace sharp {
double AttrAsDouble(Napi::Object obj, std::string attr);
double AttrAsDouble(Napi::Object obj, unsigned int const attr);
bool AttrAsBool(Napi::Object obj, std::string attr);
std::vector<double> AttrAsRgba(Napi::Object obj, std::string attr);
std::vector<double> AttrAsVectorOfDouble(Napi::Object obj, std::string attr);
std::vector<int32_t> AttrAsInt32Vector(Napi::Object obj, std::string attr);
// Create an InputDescriptor instance from a Napi::Object describing an input image

View File

@ -485,6 +485,18 @@ class PipelineWorker : public Napi::AsyncWorker {
baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost);
}
// Affine transform
if (baton->affineMatrix.size() > 0) {
std::vector<double> background;
std::tie(image, background) = sharp::ApplyAlpha(image, baton->affineBackground);
image = image.affine(baton->affineMatrix, VImage::option()->set("background", background)
->set("idx", baton->affineIdx)
->set("idy", baton->affineIdy)
->set("odx", baton->affineOdx)
->set("ody", baton->affineOdy)
->set("interpolate", baton->affineInterpolator));
}
// Extend edges
if (baton->extendTop > 0 || baton->extendBottom > 0 || baton->extendLeft > 0 || baton->extendRight > 0) {
std::vector<double> background;
@ -1249,7 +1261,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
// Resize options
baton->withoutEnlargement = sharp::AttrAsBool(options, "withoutEnlargement");
baton->position = sharp::AttrAsInt32(options, "position");
baton->resizeBackground = sharp::AttrAsRgba(options, "resizeBackground");
baton->resizeBackground = sharp::AttrAsVectorOfDouble(options, "resizeBackground");
baton->kernel = sharp::AttrAsStr(options, "kernel");
baton->fastShrinkOnLoad = sharp::AttrAsBool(options, "fastShrinkOnLoad");
// Join Channel Options
@ -1262,7 +1274,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
}
// Operators
baton->flatten = sharp::AttrAsBool(options, "flatten");
baton->flattenBackground = sharp::AttrAsRgba(options, "flattenBackground");
baton->flattenBackground = sharp::AttrAsVectorOfDouble(options, "flattenBackground");
baton->negate = sharp::AttrAsBool(options, "negate");
baton->blurSigma = sharp::AttrAsDouble(options, "blurSigma");
baton->brightness = sharp::AttrAsDouble(options, "brightness");
@ -1284,7 +1296,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->useExifOrientation = sharp::AttrAsBool(options, "useExifOrientation");
baton->angle = sharp::AttrAsInt32(options, "angle");
baton->rotationAngle = sharp::AttrAsDouble(options, "rotationAngle");
baton->rotationBackground = sharp::AttrAsRgba(options, "rotationBackground");
baton->rotationBackground = sharp::AttrAsVectorOfDouble(options, "rotationBackground");
baton->rotateBeforePreExtract = sharp::AttrAsBool(options, "rotateBeforePreExtract");
baton->flip = sharp::AttrAsBool(options, "flip");
baton->flop = sharp::AttrAsBool(options, "flop");
@ -1292,8 +1304,15 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->extendBottom = sharp::AttrAsInt32(options, "extendBottom");
baton->extendLeft = sharp::AttrAsInt32(options, "extendLeft");
baton->extendRight = sharp::AttrAsInt32(options, "extendRight");
baton->extendBackground = sharp::AttrAsRgba(options, "extendBackground");
baton->extendBackground = sharp::AttrAsVectorOfDouble(options, "extendBackground");
baton->extractChannel = sharp::AttrAsInt32(options, "extractChannel");
baton->affineMatrix = sharp::AttrAsVectorOfDouble(options, "affineMatrix");
baton->affineBackground = sharp::AttrAsVectorOfDouble(options, "affineBackground");
baton->affineIdx = sharp::AttrAsDouble(options, "affineIdx");
baton->affineIdy = sharp::AttrAsDouble(options, "affineIdy");
baton->affineOdx = sharp::AttrAsDouble(options, "affineOdx");
baton->affineOdy = sharp::AttrAsDouble(options, "affineOdy");
baton->affineInterpolator = vips::VInterpolate::new_from_name(sharp::AttrAsStr(options, "affineInterpolator").data());
baton->removeAlpha = sharp::AttrAsBool(options, "removeAlpha");
baton->ensureAlpha = sharp::AttrAsBool(options, "ensureAlpha");
@ -1392,7 +1411,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
baton->tileSize = sharp::AttrAsUint32(options, "tileSize");
baton->tileOverlap = sharp::AttrAsUint32(options, "tileOverlap");
baton->tileAngle = sharp::AttrAsInt32(options, "tileAngle");
baton->tileBackground = sharp::AttrAsRgba(options, "tileBackground");
baton->tileBackground = sharp::AttrAsVectorOfDouble(options, "tileBackground");
baton->tileSkipBlanks = sharp::AttrAsInt32(options, "tileSkipBlanks");
baton->tileContainer = static_cast<VipsForeignDzContainer>(
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_DZ_CONTAINER,

View File

@ -119,6 +119,13 @@ struct PipelineBaton {
int extendRight;
std::vector<double> extendBackground;
bool withoutEnlargement;
std::vector<double> affineMatrix;
std::vector<double> affineBackground;
double affineIdx;
double affineIdy;
double affineOdx;
double affineOdy;
vips::VInterpolate affineInterpolator;
int jpegQuality;
bool jpegProgressive;
std::string jpegChromaSubsampling;
@ -231,6 +238,13 @@ struct PipelineBaton {
extendRight(0),
extendBackground{ 0.0, 0.0, 0.0, 255.0 },
withoutEnlargement(false),
affineMatrix{ 1.0, 0.0, 0.0, 1.0 },
affineBackground{ 0.0, 0.0, 0.0, 255.0 },
affineIdx(0),
affineIdy(0),
affineOdx(0),
affineOdy(0),
affineInterpolator(vips::VInterpolate::new_from_name("bicubic")),
jpegQuality(80),
jpegProgressive(false),
jpegChromaSubsampling("4:2:0"),

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

175
test/unit/affine.js Normal file
View File

@ -0,0 +1,175 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Affine transform', () => {
describe('Invalid input', () => {
it('Missing matrix', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine();
});
});
it('Invalid 1d matrix', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine(['123', 123, 123, 123]);
});
});
it('Invalid 2d matrix', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine([[123, 123], [null, 123]]);
});
});
it('Invalid options parameter type', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine([[1, 0], [0, 1]], 'invalid options type');
});
});
it('Invalid background color', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine([4, 4, 4, 4], { background: 'not a color' });
});
});
it('Invalid idx offset type', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine([[4, 4], [4, 4]], { idx: 'invalid idx type' });
});
});
it('Invalid idy offset type', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine([4, 4, 4, 4], { idy: 'invalid idy type' });
});
});
it('Invalid odx offset type', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine([[4, 4], [4, 4]], { odx: 'invalid odx type' });
});
});
it('Invalid ody offset type', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine([[4, 4], [4, 4]], { ody: 'invalid ody type' });
});
});
it('Invalid interpolator', () => {
assert.throws(() => {
sharp(fixtures.inputJpg)
.affine([[4, 4], [4, 4]], { interpolator: 'cubic' });
});
});
});
it('Applies identity matrix', done => {
const input = fixtures.inputJpg;
sharp(input)
.affine([[1, 0], [0, 1]])
.toBuffer((err, data) => {
if (err) throw err;
fixtures.assertSimilar(input, data, done);
});
});
it('Applies resize affine matrix', done => {
const input = fixtures.inputJpg;
const inputWidth = 2725;
const inputHeight = 2225;
sharp(input)
.affine([[0.2, 0], [0, 1.5]])
.toBuffer((err, data, info) => {
if (err) throw err;
fixtures.assertSimilar(input, data, done);
assert.strictEqual(info.width, Math.ceil(inputWidth * 0.2));
assert.strictEqual(info.height, Math.ceil(inputHeight * 1.5));
});
});
it('Resizes and applies affine transform', done => {
const input = fixtures.inputJpg;
sharp(input)
.resize(500, 500)
.affine([[0.5, 1], [1, 0.5]])
.toBuffer((err, data) => {
if (err) throw err;
fixtures.assertSimilar(data, fixtures.expected('affine-resize-expected.jpg'), done);
});
});
it('Extracts and applies affine transform', done => {
sharp(fixtures.inputJpg)
.extract({ left: 300, top: 300, width: 600, height: 600 })
.affine([0.3, 0, -0.5, 0.3])
.toBuffer((err, data) => {
if (err) throw err;
fixtures.assertSimilar(data, fixtures.expected('affine-extract-expected.jpg'), done);
});
});
it('Rotates and applies affine transform', done => {
sharp(fixtures.inputJpg320x240)
.rotate(90)
.affine([[-1.2, 0], [0, -1.2]])
.toBuffer((err, data) => {
if (err) throw err;
fixtures.assertSimilar(data, fixtures.expected('affine-rotate-expected.jpg'), done);
});
});
it('Extracts, rotates and applies affine transform', done => {
sharp(fixtures.inputJpg)
.extract({ left: 1000, top: 1000, width: 200, height: 200 })
.rotate(45, { background: 'blue' })
.affine([[2, 1], [2, -0.5]], { background: 'red' })
.toBuffer((err, data) => {
if (err) throw err;
fixtures.assertSimilar(fixtures.expected('affine-extract-rotate-expected.jpg'), data, done);
});
});
it('Applies affine transform with background color', done => {
sharp(fixtures.inputJpg320x240)
.rotate(180)
.affine([[-1.5, 1.2], [-1, 1]], { background: 'red' })
.toBuffer((err, data) => {
if (err) throw err;
fixtures.assertSimilar(fixtures.expected('affine-background-expected.jpg'), data, done);
});
});
it('Applies affine transform with background color and output offsets', done => {
sharp(fixtures.inputJpg320x240)
.rotate(180)
.affine([[-2, 1.5], [-1, 2]], { background: 'blue', odx: 40, ody: -100 })
.toBuffer((err, data) => {
if (err) throw err;
fixtures.assertSimilar(fixtures.expected('affine-background-output-offsets-expected.jpg'), data, done);
});
});
it('Applies affine transform with background color and all offsets', done => {
sharp(fixtures.inputJpg320x240)
.rotate(180)
.affine([[-1.2, 1.8], [-1, 2]], { background: 'yellow', idx: 10, idy: -40, odx: 10, ody: -50 })
.toBuffer((err, data) => {
if (err) throw err;
fixtures.assertSimilar(fixtures.expected('affine-background-all-offsets-expected.jpg'), data, done);
});
});
describe('Interpolations', () => {
const input = fixtures.inputJpg320x240;
const inputWidth = 320;
const inputHeight = 240;
for (const interp in sharp.interpolators) {
it(`Performs 2x upscale with ${interp} interpolation`, done => {
sharp(input)
.affine([[2, 0], [0, 2]], { interpolator: sharp.interpolators[interp] })
.toBuffer((err, data, info) => {
if (err) throw err;
assert.strictEqual(info.width, Math.ceil(inputWidth * 2));
assert.strictEqual(info.height, Math.ceil(inputHeight * 2));
fixtures.assertSimilar(fixtures.expected(`affine-${sharp.interpolators[interp]}-2x-upscale-expected.jpg`), data, done);
});
});
}
});
});