Compare commits

..

6 Commits

Author SHA1 Message Date
Lovell Fuller
142c431745 Release v0.17.2 2017-02-11 10:35:34 +00:00
Lovell Fuller
81f5589411 Add use of 'cc' to improve C++ code style linting 2017-02-11 09:59:23 +00:00
Mark van Seventer
04f5c884a4 Add CLI tools section to installation guide (#691) 2017-01-25 21:34:23 +00:00
Lovell Fuller
d8df503404 Ensure Readable can start flowing after Writable finish #671 2017-01-22 14:03:06 +00:00
Lovell Fuller
d241efcdbe Add changelog entry and credit for #685 2017-01-22 13:58:11 +00:00
Rahul Nanwani
a1b8efe721 Expose WebP alpha quality, lossless and near-lossless output options (#685) 2017-01-19 13:45:32 +00:00
23 changed files with 429 additions and 216 deletions

View File

@@ -115,6 +115,9 @@ Use these WebP options for output image.
- `options` **[Object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object)?** output options
- `options.quality` **[Number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number)?** quality, integer 1-100 (optional, default `80`)
- `options.alphaQuality` **[Number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number)?** quality of alpha layer, integer 0-100 (optional, default `100`)
- `options.lossless` **[Boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** use lossless compression mode (optional, default `false`)
- `options.nearLossless` **[Boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** use near_lossless compression mode (optional, default `false`)
- `options.force` **[Boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** force WebP output, otherwise attempt to use input format (optional, default `true`)

View File

@@ -4,6 +4,16 @@
Requires libvips v8.4.2.
#### v0.17.2 - 11<sup>th</sup> February 2017
* Ensure Readable side of Stream can start flowing after Writable side has finished.
[#671](https://github.com/lovell/sharp/issues/671)
[@danhaller](https://github.com/danhaller)
* Expose WebP alpha quality, lossless and near-lossless output options.
[#685](https://github.com/lovell/sharp/pull/685)
[@rnanwani](https://github.com/rnanwani)
#### v0.17.1 - 15<sup>th</sup> January 2017
* Improve error messages for invalid parameters.

View File

@@ -135,6 +135,11 @@ You can now download your deployment ZIP using `scp` and upload it to Lambda. Be
* [gulp-responsive](https://www.npmjs.com/package/gulp-responsive)
* [grunt-sharp](https://www.npmjs.com/package/grunt-sharp)
### CLI tools
* [sharp-cli](https://www.npmjs.com/package/sharp-cli)
### Security
Many users of this module process untrusted, user-supplied images,

View File

@@ -145,6 +145,9 @@ const Sharp = function (input, options) {
pngCompressionLevel: 6,
pngAdaptiveFiltering: true,
webpQuality: 80,
webpAlphaQuality: 100,
webpLossless: false,
webpNearLossless: false,
tiffQuality: 80,
tileSize: 256,
tileOverlap: 0,

View File

@@ -64,6 +64,12 @@ const _write = function _write (chunk, encoding, callback) {
if (Array.isArray(this.options.input.buffer)) {
/* istanbul ignore else */
if (is.buffer(chunk)) {
if (this.options.input.buffer.length === 0) {
const that = this;
this.on('finish', function () {
that.streamInFinished = true;
});
}
this.options.input.buffer.push(chunk);
callback();
} else {

View File

@@ -168,6 +168,9 @@ const png = function png (options) {
* Use these WebP options for output image.
* @param {Object} [options] - output options
* @param {Number} [options.quality=80] - quality, integer 1-100
* @param {Number} [options.alphaQuality=100] - quality of alpha layer, integer 0-100
* @param {Boolean} [options.lossless=false] - use lossless compression mode
* @param {Boolean} [options.nearLossless=false] - use near_lossless compression mode
* @param {Boolean} [options.force=true] - force WebP output, otherwise attempt to use input format
* @returns {Sharp}
* @throws {Error} Invalid options
@@ -180,6 +183,19 @@ const webp = function webp (options) {
throw new Error('Invalid quality (integer, 1-100) ' + options.quality);
}
}
if (is.object(options) && is.defined(options.alphaQuality)) {
if (is.integer(options.alphaQuality) && is.inRange(options.alphaQuality, 1, 100)) {
this.options.webpAlphaQuality = options.alphaQuality;
} else {
throw new Error('Invalid webp alpha quality (integer, 1-100) ' + options.alphaQuality);
}
}
if (is.object(options) && is.defined(options.lossless)) {
this._setBooleanOption('webpLossless', options.lossless);
}
if (is.object(options) && is.defined(options.nearLossless)) {
this._setBooleanOption('webpNearLossless', options.nearLossless);
}
return this._updateFormatOut('webp', options);
};
@@ -362,9 +378,9 @@ const _pipeline = function _pipeline (callback) {
// output=stream
if (this._isStreamInput()) {
// output=stream, input=stream
this.on('finish', function () {
that._flattenBufferIn();
sharp.pipeline(that.options, function (err, data, info) {
if (this.streamInFinished) {
this._flattenBufferIn();
sharp.pipeline(this.options, function (err, data, info) {
if (err) {
that.emit('error', err);
} else {
@@ -373,7 +389,20 @@ const _pipeline = function _pipeline (callback) {
}
that.push(null);
});
});
} else {
this.on('finish', function () {
that._flattenBufferIn();
sharp.pipeline(that.options, function (err, data, info) {
if (err) {
that.emit('error', err);
} else {
that.emit('info', info);
that.push(data);
}
that.push(null);
});
});
}
} else {
// output=stream, input=file/buffer
sharp.pipeline(this.options, function (err, data, info) {

View File

@@ -1,8 +1,9 @@
{
"name": "sharp",
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images",
"version": "0.17.1",
"version": "0.17.2",
"author": "Lovell Fuller <npm@lovell.info>",
"homepage": "https://github.com/lovell/sharp",
"contributors": [
"Pierre Inglebert <pierre.inglebert@gmail.com>",
"Jonathan Ong <jonathanrichardong@gmail.com>",
@@ -30,11 +31,12 @@
"Matt Hirsch <mhirsch@media.mit.edu>",
"Matthias Thoemmes <thoemmes@gmail.com>",
"Patrick Paskaris <patrick@paskaris.gr>",
"Jérémy Lal <kapouer@melix.org>"
"Jérémy Lal <kapouer@melix.org>",
"Rahul Nanwani <r.nanwani@gmail.com>"
],
"scripts": {
"clean": "rm -rf node_modules/ build/ vendor/ coverage/ test/fixtures/output.*",
"test": "semistandard && cross-env VIPS_WARNING=0 nyc --reporter=lcov --branches=99 mocha --slow=5000 --timeout=60000 ./test/unit/*.js",
"test": "semistandard && cc && cross-env VIPS_WARNING=0 nyc --reporter=lcov --branches=99 mocha --slow=5000 --timeout=60000 ./test/unit/*.js",
"test-leak": "./test/leak/leak.sh",
"test-packaging": "./packaging/test-linux-x64.sh",
"docs": "for m in constructor input resize composite operation colour channel output utility; do documentation build --shallow --format=md lib/$m.js >docs/api-$m.md; done"
@@ -63,20 +65,20 @@
"caw": "^2.0.0",
"color": "^1.0.3",
"got": "^6.7.1",
"nan": "^2.5.0",
"nan": "^2.5.1",
"semver": "^5.3.0",
"tar": "^2.2.1"
},
"devDependencies": {
"async": "^2.1.4",
"bufferutil": "^1.3.0",
"bufferutil": "^2.0.1",
"cc": "^1.0.0",
"cross-env": "^3.1.4",
"documentation": "^4.0.0-beta.18",
"exif-reader": "^1.0.2",
"icc": "^1.0.0",
"mocha": "^3.2.0",
"node-cpplint": "^0.4.0",
"nyc": "^10.0.0",
"nyc": "^10.1.2",
"rimraf": "^2.5.4",
"semistandard": "^9.2.1",
"unzip": "^0.1.11"
@@ -92,5 +94,12 @@
"env": [
"mocha"
]
},
"cc": {
"linelength": "120",
"filter": [
"build/include",
"runtime/indentation_namespace"
]
}
}

View File

@@ -1,12 +1,27 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <cstdlib>
#include <string>
#include <string.h>
#include <vector>
#include <node.h>
#include <node_buffer.h>
#include <nan.h>
#include <vips/vips8>
#include "nan.h"
#include "common.h"
using vips::VImage;
@@ -254,8 +269,7 @@ namespace sharp {
return (
(bands == 2 && interpretation == VIPS_INTERPRETATION_B_W) ||
(bands == 4 && interpretation != VIPS_INTERPRETATION_CMYK) ||
(bands == 5 && interpretation == VIPS_INTERPRETATION_CMYK)
);
(bands == 5 && interpretation == VIPS_INTERPRETATION_CMYK));
}
/*
@@ -378,23 +392,23 @@ namespace sharp {
int top = 0;
// assign only if valid
if(x >= 0 && x < (inWidth - outWidth)) {
if (x >= 0 && x < (inWidth - outWidth)) {
left = x;
} else if(x >= (inWidth - outWidth)) {
} else if (x >= (inWidth - outWidth)) {
left = inWidth - outWidth;
}
if(y >= 0 && y < (inHeight - outHeight)) {
if (y >= 0 && y < (inHeight - outHeight)) {
top = y;
} else if(y >= (inHeight - outHeight)) {
} else if (y >= (inHeight - outHeight)) {
top = inHeight - outHeight;
}
// the resulting left and top could have been outside the image after calculation from bottom/right edges
if(left < 0) {
if (left < 0) {
left = 0;
}
if(top < 0) {
if (top < 0) {
top = 0;
}
@@ -421,8 +435,7 @@ namespace sharp {
*/
VipsOperationBoolean GetBooleanOperation(std::string const opStr) {
return static_cast<VipsOperationBoolean>(
vips_enum_from_nick(nullptr, VIPS_TYPE_OPERATION_BOOLEAN, opStr.data())
);
vips_enum_from_nick(nullptr, VIPS_TYPE_OPERATION_BOOLEAN, opStr.data()));
}
/*
@@ -430,8 +443,7 @@ namespace sharp {
*/
VipsInterpretation GetInterpretation(std::string const typeStr) {
return static_cast<VipsInterpretation>(
vips_enum_from_nick(nullptr, VIPS_TYPE_INTERPRETATION, typeStr.data())
);
vips_enum_from_nick(nullptr, VIPS_TYPE_INTERPRETATION, typeStr.data()));
}
/*

View File

@@ -1,14 +1,28 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef SRC_COMMON_H_
#define SRC_COMMON_H_
#include <string>
#include <tuple>
#include <vector>
#include <node.h>
#include <nan.h>
#include <vips/vips8>
#include "nan.h"
// Verify platform and compiler compatibility
#if (VIPS_MAJOR_VERSION < 8 || (VIPS_MAJOR_VERSION == 8 && VIPS_MINOR_VERSION < 4))
@@ -63,8 +77,7 @@ namespace sharp {
// Create an InputDescriptor instance from a v8::Object describing an input image
InputDescriptor* CreateInputDescriptor(
v8::Handle<v8::Object> input, std::vector<v8::Local<v8::Object>> buffersToPersist
);
v8::Handle<v8::Object> input, std::vector<v8::Local<v8::Object>> buffersToPersist);
enum class ImageType {
JPEG,

View File

@@ -1,9 +1,24 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <numeric>
#include <vector>
#include <node.h>
#include <nan.h>
#include <vips/vips8>
#include "nan.h"
#include "common.h"
#include "metadata.h"
@@ -11,15 +26,14 @@ class MetadataWorker : public Nan::AsyncWorker {
public:
MetadataWorker(
Nan::Callback *callback, MetadataBaton *baton,
std::vector<v8::Local<v8::Object>> const buffersToPersist
) : Nan::AsyncWorker(callback), baton(baton), buffersToPersist(buffersToPersist) {
std::vector<v8::Local<v8::Object>> const buffersToPersist)
: Nan::AsyncWorker(callback), baton(baton), buffersToPersist(buffersToPersist) {
// Protect Buffer objects from GC, keyed on index
std::accumulate(buffersToPersist.begin(), buffersToPersist.end(), 0,
[this](uint32_t index, v8::Local<v8::Object> const buffer) -> uint32_t {
SaveToPersistent(index, buffer);
return index + 1;
}
);
});
}
~MetadataWorker() {}
@@ -72,7 +86,7 @@ class MetadataWorker : public Nan::AsyncWorker {
vips_thread_shutdown();
}
void HandleOKCallback () {
void HandleOKCallback() {
using Nan::New;
using Nan::Set;
Nan::HandleScope();
@@ -99,14 +113,12 @@ class MetadataWorker : public Nan::AsyncWorker {
if (baton->exifLength > 0) {
Set(info,
New("exif").ToLocalChecked(),
Nan::NewBuffer(baton->exif, baton->exifLength, sharp::FreeCallback, nullptr).ToLocalChecked()
);
Nan::NewBuffer(baton->exif, baton->exifLength, sharp::FreeCallback, nullptr).ToLocalChecked());
}
if (baton->iccLength > 0) {
Set(info,
New("icc").ToLocalChecked(),
Nan::NewBuffer(baton->icc, baton->iccLength, sharp::FreeCallback, nullptr).ToLocalChecked()
);
Nan::NewBuffer(baton->icc, baton->iccLength, sharp::FreeCallback, nullptr).ToLocalChecked());
}
argv[1] = info;
}
@@ -116,8 +128,7 @@ class MetadataWorker : public Nan::AsyncWorker {
[this](uint32_t index, v8::Local<v8::Object> const buffer) -> uint32_t {
GetFromPersistent(index);
return index + 1;
}
);
});
delete baton->input;
delete baton;

View File

@@ -1,8 +1,24 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef SRC_METADATA_H_
#define SRC_METADATA_H_
#include "nan.h"
#include "common.h"
#include <string>
#include <nan.h>
#include "./common.h"
struct MetadataBaton {
// Input

View File

@@ -1,7 +1,23 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <algorithm>
#include <functional>
#include <memory>
#include <tuple>
#include <vector>
#include <vips/vips8>
#include "common.h"
@@ -17,7 +33,7 @@ namespace sharp {
Assumes alpha channels are already premultiplied and will be unpremultiplied after.
*/
VImage Composite(VImage src, VImage dst, const int gravity) {
if(IsInputValidForComposition(src, dst)) {
if (IsInputValidForComposition(src, dst)) {
// Enlarge overlay src, if required
if (src.width() < dst.width() || src.height() < dst.height()) {
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
@@ -28,8 +44,7 @@ namespace sharp {
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
src = src.embed(left, top, dst.width(), dst.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background)
);
->set("background", background));
}
return CompositeImage(src, dst);
}
@@ -38,7 +53,7 @@ namespace sharp {
}
VImage Composite(VImage src, VImage dst, const int x, const int y) {
if(IsInputValidForComposition(src, dst)) {
if (IsInputValidForComposition(src, dst)) {
// Enlarge overlay src, if required
if (src.width() < dst.width() || src.height() < dst.height()) {
// Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity.
@@ -49,8 +64,7 @@ namespace sharp {
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
src = src.embed(left, top, dst.width(), dst.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background)
);
->set("background", background));
}
return CompositeImage(src, dst);
}
@@ -145,12 +159,11 @@ namespace sharp {
std::vector<double> background { 0.0, 0.0, 0.0, 0.0 };
mask = mask.embed(left, top, dst.width(), dst.height(), VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background)
);
->set("background", background));
}
// we use the mask alpha if it has alpha
if(maskHasAlpha) {
if (maskHasAlpha) {
mask = mask.extract_band(mask.bands() - 1, VImage::option()->set("n", 1));;
}
@@ -284,8 +297,8 @@ namespace sharp {
colourspaceBeforeSharpen = VIPS_INTERPRETATION_sRGB;
}
return image.sharpen(
VImage::option()->set("sigma", sigma)->set("m1", flat)->set("m2", jagged)
).colourspace(colourspaceBeforeSharpen);
VImage::option()->set("sigma", sigma)->set("m1", flat)->set("m2", jagged))
.colourspace(colourspaceBeforeSharpen);
}
}
@@ -415,12 +428,11 @@ namespace sharp {
->set("tile_height", 10)
->set("max_tiles", static_cast<int>(round(1.0 + need_lines / 10.0)))
->set("access", VIPS_ACCESS_SEQUENTIAL)
->set("threaded", TRUE)
);
->set("threaded", TRUE));
}
VImage Threshold(VImage image, double const threshold, bool const thresholdGrayscale) {
if(!thresholdGrayscale) {
if (!thresholdGrayscale) {
return image >= threshold;
}
return image.colourspace(VIPS_INTERPRETATION_B_W) >= threshold;
@@ -485,7 +497,7 @@ namespace sharp {
int width = right - left;
int height = bottom - top;
if(width <= 0 || height <= 0) {
if (width <= 0 || height <= 0) {
throw VError("Unexpected error while trimming. Try to lower the tolerance");
}

View File

@@ -1,3 +1,17 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef SRC_OPERATIONS_H_
#define SRC_OPERATIONS_H_
@@ -78,8 +92,7 @@ namespace sharp {
Calculate crop area based on given strategy (Entropy, Attention)
*/
std::tuple<int, int> Crop(
VImage image, int const outWidth, int const outHeight, std::function<double(VImage)> strategy
);
VImage image, int const outWidth, int const outHeight, std::function<double(VImage)> strategy);
/*
Insert a tile cache to prevent over-computation of any previous operations in the pipeline

View File

@@ -1,15 +1,31 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <algorithm>
#include <cmath>
#include <tuple>
#include <utility>
#include <map>
#include <memory>
#include <numeric>
#include <map>
#include <string>
#include <tuple>
#include <utility>
#include <vector>
#include <vips/vips8>
#include <node.h>
#include <nan.h>
#include "nan.h"
#include "common.h"
#include "operations.h"
#include "pipeline.h"
@@ -18,15 +34,14 @@ class PipelineWorker : public Nan::AsyncWorker {
public:
PipelineWorker(
Nan::Callback *callback, PipelineBaton *baton, Nan::Callback *queueListener,
std::vector<v8::Local<v8::Object>> const buffersToPersist
) : Nan::AsyncWorker(callback), baton(baton), queueListener(queueListener), buffersToPersist(buffersToPersist) {
std::vector<v8::Local<v8::Object>> const buffersToPersist)
: Nan::AsyncWorker(callback), baton(baton), queueListener(queueListener), buffersToPersist(buffersToPersist) {
// Protect Buffer objects from GC, keyed on index
std::accumulate(buffersToPersist.begin(), buffersToPersist.end(), 0,
[this](uint32_t index, v8::Local<v8::Object> const buffer) -> uint32_t {
SaveToPersistent(index, buffer);
return index + 1;
}
);
});
}
~PipelineWorker() {}
@@ -84,7 +99,7 @@ class PipelineWorker : public Nan::AsyncWorker {
}
// Trim
if(baton->trimTolerance != 0) {
if (baton->trimTolerance != 0) {
image = sharp::Trim(image, baton->trimTolerance);
}
@@ -277,8 +292,7 @@ class PipelineWorker : public Nan::AsyncWorker {
image = image.icc_transform(
const_cast<char*>(profileMap[VIPS_INTERPRETATION_sRGB].data()), VImage::option()
->set("embedded", TRUE)
->set("intent", VIPS_INTENT_PERCEPTUAL)
);
->set("intent", VIPS_INTENT_PERCEPTUAL));
} catch(...) {
// Ignore failure of embedded profile
}
@@ -286,8 +300,7 @@ class PipelineWorker : public Nan::AsyncWorker {
image = image.icc_transform(
const_cast<char*>(profileMap[VIPS_INTERPRETATION_sRGB].data()), VImage::option()
->set("input_profile", profileMap[VIPS_INTERPRETATION_CMYK].data())
->set("intent", VIPS_INTENT_PERCEPTUAL)
);
->set("intent", VIPS_INTENT_PERCEPTUAL));
}
// Flatten image to remove alpha channel
@@ -301,8 +314,7 @@ class PipelineWorker : public Nan::AsyncWorker {
baton->background[2] * multiplier
};
image = image.flatten(VImage::option()
->set("background", background)
);
->set("background", background));
}
// Negate the colours in the image
@@ -325,8 +337,7 @@ class PipelineWorker : public Nan::AsyncWorker {
if (hasOverlay && !HasAlpha(image)) {
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier)
);
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
bool const shouldShrink = xshrink > 1 || yshrink > 1;
@@ -380,22 +391,19 @@ class PipelineWorker : public Nan::AsyncWorker {
// Perform kernel-based reduction
if (yresidual < 1.0 || xresidual < 1.0) {
VipsKernel kernel = static_cast<VipsKernel>(
vips_enum_from_nick(nullptr, VIPS_TYPE_KERNEL, baton->kernel.data())
);
vips_enum_from_nick(nullptr, VIPS_TYPE_KERNEL, baton->kernel.data()));
if (kernel != VIPS_KERNEL_CUBIC && kernel != VIPS_KERNEL_LANCZOS2 && kernel != VIPS_KERNEL_LANCZOS3) {
throw vips::VError("Unknown kernel");
}
if (yresidual < 1.0) {
image = image.reducev(1.0 / yresidual, VImage::option()
->set("kernel", kernel)
->set("centre", baton->centreSampling)
);
->set("centre", baton->centreSampling));
}
if (xresidual < 1.0) {
image = image.reduceh(1.0 / xresidual, VImage::option()
->set("kernel", kernel)
->set("centre", baton->centreSampling)
);
->set("centre", baton->centreSampling));
}
}
// Perform affine enlargement
@@ -403,13 +411,11 @@ class PipelineWorker : public Nan::AsyncWorker {
vips::VInterpolate interpolator = vips::VInterpolate::new_from_name(baton->interpolator.data());
if (yresidual > 1.0) {
image = image.affine({1.0, 0.0, 0.0, yresidual}, VImage::option()
->set("interpolate", interpolator)
);
->set("interpolate", interpolator));
}
if (xresidual > 1.0) {
image = image.affine({xresidual, 0.0, 0.0, 1.0}, VImage::option()
->set("interpolate", interpolator)
);
->set("interpolate", interpolator));
}
}
}
@@ -433,13 +439,12 @@ class PipelineWorker : public Nan::AsyncWorker {
}
// Join additional color channels to the image
if(baton->joinChannelIn.size() > 0) {
if (baton->joinChannelIn.size() > 0) {
VImage joinImage;
ImageType joinImageType = ImageType::UNKNOWN;
for(unsigned int i = 0; i < baton->joinChannelIn.size(); i++) {
for (unsigned int i = 0; i < baton->joinChannelIn.size(); i++) {
std::tie(joinImage, joinImageType) = sharp::OpenInput(baton->joinChannelIn[i], baton->accessMethod);
image = image.bandjoin(joinImage);
}
image = image.copy(VImage::option()->set("interpretation", baton->colourspace));
@@ -463,8 +468,8 @@ class PipelineWorker : public Nan::AsyncWorker {
background = { multiplier * (
0.2126 * baton->background[0] +
0.7152 * baton->background[1] +
0.0722 * baton->background[2]
)};
0.0722 * baton->background[2])
};
}
// Add alpha channel to background colour
if (baton->background[3] < 255.0 || HasAlpha(image)) {
@@ -475,16 +480,14 @@ class PipelineWorker : public Nan::AsyncWorker {
// Add non-transparent alpha channel, if required
if (baton->background[3] < 255.0 && !HasAlpha(image)) {
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier)
);
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
// Embed
int left = static_cast<int>(round((baton->width - image.width()) / 2));
int top = static_cast<int>(round((baton->height - image.height()) / 2));
image = image.embed(left, top, baton->width, baton->height, VImage::option()
->set("extend", VIPS_EXTEND_BACKGROUND)
->set("background", background)
);
->set("background", background));
} else if (baton->canvas != Canvas::IGNORE_ASPECT) {
// Crop/max/min
int left;
@@ -492,8 +495,7 @@ class PipelineWorker : public Nan::AsyncWorker {
if (baton->crop < 9) {
// Gravity-based crop
std::tie(left, top) = sharp::CalculateCrop(
image.width(), image.height(), baton->width, baton->height, baton->crop
);
image.width(), image.height(), baton->width, baton->height, baton->crop);
} else if (baton->crop == 16) {
// Entropy-based crop
std::tie(left, top) = sharp::Crop(image, baton->width, baton->height, sharp::EntropyStrategy());
@@ -512,8 +514,7 @@ class PipelineWorker : public Nan::AsyncWorker {
// Post extraction
if (baton->topOffsetPost != -1) {
image = image.extract_area(
baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost
);
baton->leftOffsetPost, baton->topOffsetPost, baton->widthPost, baton->heightPost);
}
// Extend edges
@@ -533,8 +534,8 @@ class PipelineWorker : public Nan::AsyncWorker {
background = { multiplier * (
0.2126 * baton->background[0] +
0.7152 * baton->background[1] +
0.0722 * baton->background[2]
)};
0.0722 * baton->background[2])
};
}
// Add alpha channel to background colour
if (baton->background[3] < 255.0 || HasAlpha(image)) {
@@ -545,8 +546,7 @@ class PipelineWorker : public Nan::AsyncWorker {
// Add non-transparent alpha channel, if required
if (baton->background[3] < 255.0 && !HasAlpha(image)) {
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier)
);
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
// Embed
baton->width = image.width() + baton->extendLeft + baton->extendRight;
@@ -571,8 +571,7 @@ class PipelineWorker : public Nan::AsyncWorker {
image = sharp::Convolve(image,
baton->convKernelWidth, baton->convKernelHeight,
baton->convKernelScale, baton->convKernelOffset,
baton->convKernel
);
baton->convKernel);
}
// Sharpen
@@ -606,17 +605,13 @@ class PipelineWorker : public Nan::AsyncWorker {
// the overlayX/YOffsets will now be used to CalculateCrop for extract_area
std::tie(left, top) = sharp::CalculateCrop(
overlayImage.width(), overlayImage.height(), image.width(), image.height(),
baton->overlayXOffset, baton->overlayYOffset
);
baton->overlayXOffset, baton->overlayYOffset);
} else {
// the overlayGravity will now be used to CalculateCrop for extract_area
std::tie(left, top) = sharp::CalculateCrop(
overlayImage.width(), overlayImage.height(), image.width(), image.height(), baton->overlayGravity
);
overlayImage.width(), overlayImage.height(), image.width(), image.height(), baton->overlayGravity);
}
overlayImage = overlayImage.extract_area(
left, top, image.width(), image.height()
);
overlayImage = overlayImage.extract_area(left, top, image.width(), image.height());
}
// the overlayGravity was used for extract_area, therefore set it back to its default value of 0
baton->overlayGravity = 0;
@@ -629,15 +624,13 @@ class PipelineWorker : public Nan::AsyncWorker {
if (!HasAlpha(overlayImage)) {
double const multiplier = sharp::Is16Bit(overlayImage.interpretation()) ? 256.0 : 1.0;
overlayImage = overlayImage.bandjoin(
VImage::new_matrix(overlayImage.width(), overlayImage.height()).new_from_image(255 * multiplier)
);
VImage::new_matrix(overlayImage.width(), overlayImage.height()).new_from_image(255 * multiplier));
}
// Ensure image has alpha channel
if (!HasAlpha(image)) {
double const multiplier = sharp::Is16Bit(image.interpretation()) ? 256.0 : 1.0;
image = image.bandjoin(
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier)
);
VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier));
}
// Ensure overlay is premultiplied sRGB
overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB).premultiply();
@@ -686,8 +679,8 @@ class PipelineWorker : public Nan::AsyncWorker {
}
// Extract an image channel (aka vips band)
if(baton->extractChannel > -1) {
if(baton->extractChannel >= image.bands()) {
if (baton->extractChannel > -1) {
if (baton->extractChannel >= image.bands()) {
(baton->err).append("Cannot extract channel from image. Too few channels in image.");
return Error();
}
@@ -701,12 +694,9 @@ class PipelineWorker : public Nan::AsyncWorker {
// Convert colourspace, pass the current known interpretation so libvips doesn't have to guess
image = image.colourspace(baton->colourspace, VImage::option()->set("source_space", image.interpretation()));
// Transform colours from embedded profile to output profile
if (baton->withMetadata &&
sharp::HasProfile(image) &&
profileMap[baton->colourspace] != std::string()) {
if (baton->withMetadata && sharp::HasProfile(image) && profileMap[baton->colourspace] != std::string()) {
image = image.icc_transform(const_cast<char*>(profileMap[baton->colourspace].data()),
VImage::option()->set("embedded", TRUE)
);
VImage::option()->set("embedded", TRUE));
}
}
@@ -732,14 +722,13 @@ class PipelineWorker : public Nan::AsyncWorker {
->set("trellis_quant", baton->jpegTrellisQuantisation)
->set("overshoot_deringing", baton->jpegOvershootDeringing)
->set("optimize_scans", baton->jpegOptimiseScans)
->set("optimize_coding", TRUE)
));
->set("optimize_coding", TRUE)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
area->free_fn = nullptr;
vips_area_unref(area);
baton->formatOut = "jpeg";
if(baton->colourspace == VIPS_INTERPRETATION_CMYK) {
if (baton->colourspace == VIPS_INTERPRETATION_CMYK) {
baton->channels = std::min(baton->channels, 4);
} else {
baton->channels = std::min(baton->channels, 3);
@@ -754,9 +743,7 @@ class PipelineWorker : public Nan::AsyncWorker {
VipsArea *area = VIPS_AREA(image.pngsave_buffer(VImage::option()
->set("interlace", baton->pngProgressive)
->set("compression", baton->pngCompressionLevel)
->set("filter", baton->pngAdaptiveFiltering ?
VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE )
));
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
area->free_fn = nullptr;
@@ -767,7 +754,9 @@ class PipelineWorker : public Nan::AsyncWorker {
VipsArea *area = VIPS_AREA(image.webpsave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("Q", baton->webpQuality)
));
->set("lossless", baton->webpLossless)
->set("near_lossless", baton->webpNearLossless)
->set("alpha_q", baton->webpAlphaQuality)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
area->free_fn = nullptr;
@@ -821,8 +810,7 @@ class PipelineWorker : public Nan::AsyncWorker {
->set("trellis_quant", baton->jpegTrellisQuantisation)
->set("overshoot_deringing", baton->jpegOvershootDeringing)
->set("optimize_scans", baton->jpegOptimiseScans)
->set("optimize_coding", TRUE)
);
->set("optimize_coding", TRUE));
baton->formatOut = "jpeg";
baton->channels = std::min(baton->channels, 3);
} else if (baton->formatOut == "png" || isPng || (matchInput &&
@@ -835,24 +823,23 @@ class PipelineWorker : public Nan::AsyncWorker {
image.pngsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("interlace", baton->pngProgressive)
->set("compression", baton->pngCompressionLevel)
->set("filter", baton->pngAdaptiveFiltering ?
VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE )
);
->set("filter", baton->pngAdaptiveFiltering ? VIPS_FOREIGN_PNG_FILTER_ALL : VIPS_FOREIGN_PNG_FILTER_NONE));
baton->formatOut = "png";
} else if (baton->formatOut == "webp" || isWebp || (matchInput && inputImageType == ImageType::WEBP)) {
// Write WEBP to file
image.webpsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("strip", !baton->withMetadata)
->set("Q", baton->webpQuality)
);
->set("lossless", baton->webpLossless)
->set("near_lossless", baton->webpNearLossless)
->set("alpha_q", baton->webpAlphaQuality));
baton->formatOut = "webp";
} else if (baton->formatOut == "tiff" || isTiff || (matchInput && inputImageType == ImageType::TIFF)) {
// Write TIFF to file
image.tiffsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("strip", !baton->withMetadata)
->set("Q", baton->tiffQuality)
->set("compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG)
);
->set("compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG));
baton->formatOut = "tiff";
baton->channels = std::min(baton->channels, 3);
} else if (baton->formatOut == "dz" || isDz || isDzZip) {
@@ -870,7 +857,10 @@ class PipelineWorker : public Nan::AsyncWorker {
suffix = AssembleSuffixString(".png", options);
} else if (baton->tileFormat == "webp") {
std::vector<std::pair<std::string, std::string>> options {
{"Q", std::to_string(baton->webpQuality)}
{"Q", std::to_string(baton->webpQuality)},
{"alpha_q", std::to_string(baton->webpAlphaQuality)},
{"lossless", baton->webpLossless ? "TRUE" : "FALSE"},
{"near_lossless", baton->webpNearLossless ? "TRUE" : "FALSE"}
};
suffix = AssembleSuffixString(".webp", options);
} else {
@@ -895,14 +885,12 @@ class PipelineWorker : public Nan::AsyncWorker {
->set("overlap", baton->tileOverlap)
->set("container", baton->tileContainer)
->set("layout", baton->tileLayout)
->set("suffix", const_cast<char*>(suffix.data()))
);
->set("suffix", const_cast<char*>(suffix.data())));
baton->formatOut = "dz";
} else if (baton->formatOut == "v" || isV || (matchInput && inputImageType == ImageType::VIPS)) {
// Write V to file
image.vipssave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("strip", !baton->withMetadata)
);
->set("strip", !baton->withMetadata));
baton->formatOut = "v";
} else {
// Unsupported output format
@@ -918,7 +906,7 @@ class PipelineWorker : public Nan::AsyncWorker {
vips_thread_shutdown();
}
void HandleOKCallback () {
void HandleOKCallback() {
using Nan::New;
using Nan::Set;
Nan::HandleScope();
@@ -952,8 +940,8 @@ class PipelineWorker : public Nan::AsyncWorker {
if (baton->bufferOutLength > 0) {
// Pass ownership of output data to Buffer instance
argv[1] = Nan::NewBuffer(
static_cast<char*>(baton->bufferOut), baton->bufferOutLength, sharp::FreeCallback, nullptr
).ToLocalChecked();
static_cast<char*>(baton->bufferOut), baton->bufferOutLength, sharp::FreeCallback, nullptr)
.ToLocalChecked();
// Add buffer size to info
Set(info, New("size").ToLocalChecked(), New<v8::Uint32>(static_cast<uint32_t>(baton->bufferOutLength)));
argv[2] = info;
@@ -972,16 +960,14 @@ class PipelineWorker : public Nan::AsyncWorker {
[this](uint32_t index, v8::Local<v8::Object> const buffer) -> uint32_t {
GetFromPersistent(index);
return index + 1;
}
);
});
delete baton->input;
delete baton->overlay;
delete baton->boolean;
for_each(baton->joinChannelIn.begin(), baton->joinChannelIn.end(),
[this](sharp::InputDescriptor *joinChannelIn) {
delete joinChannelIn;
}
);
});
delete baton;
// Decrement processing task counter
@@ -1012,7 +998,7 @@ class PipelineWorker : public Nan::AsyncWorker {
bool flip = FALSE;
bool flop = FALSE;
if (angle == -1) {
switch(sharp::ExifOrientation(image)) {
switch (sharp::ExifOrientation(image)) {
case 6: rotate = VIPS_ANGLE_D90; break;
case 3: rotate = VIPS_ANGLE_D180; break;
case 8: rotate = VIPS_ANGLE_D270; break;
@@ -1132,12 +1118,12 @@ NAN_METHOD(pipeline) {
baton->interpolator = AttrAsStr(options, "interpolator");
baton->centreSampling = AttrTo<bool>(options, "centreSampling");
// Join Channel Options
if(HasAttr(options, "joinChannelIn")) {
if (HasAttr(options, "joinChannelIn")) {
v8::Local<v8::Object> joinChannelObject = Nan::Get(options, Nan::New("joinChannelIn").ToLocalChecked())
.ToLocalChecked().As<v8::Object>();
v8::Local<v8::Array> joinChannelArray = joinChannelObject.As<v8::Array>();
int joinChannelArrayLength = AttrTo<int32_t>(joinChannelObject, "length");
for(int i = 0; i < joinChannelArrayLength; i++) {
for (int i = 0; i < joinChannelArrayLength; i++) {
baton->joinChannelIn.push_back(
CreateInputDescriptor(
Nan::Get(joinChannelArray, i).ToLocalChecked().As<v8::Object>(),
@@ -1154,7 +1140,7 @@ NAN_METHOD(pipeline) {
baton->threshold = AttrTo<int32_t>(options, "threshold");
baton->thresholdGrayscale = AttrTo<bool>(options, "thresholdGrayscale");
baton->trimTolerance = AttrTo<int32_t>(options, "trimTolerance");
if(baton->accessMethod == VIPS_ACCESS_SEQUENTIAL && baton->trimTolerance != 0) {
if (baton->accessMethod == VIPS_ACCESS_SEQUENTIAL && baton->trimTolerance != 0) {
baton->accessMethod = VIPS_ACCESS_RANDOM;
}
baton->gamma = AttrTo<double>(options, "gamma");
@@ -1209,6 +1195,9 @@ NAN_METHOD(pipeline) {
baton->pngCompressionLevel = AttrTo<uint32_t>(options, "pngCompressionLevel");
baton->pngAdaptiveFiltering = AttrTo<bool>(options, "pngAdaptiveFiltering");
baton->webpQuality = AttrTo<uint32_t>(options, "webpQuality");
baton->webpAlphaQuality = AttrTo<uint32_t>(options, "webpAlphaQuality");
baton->webpLossless = AttrTo<bool>(options, "webpLossless");
baton->webpNearLossless = AttrTo<bool>(options, "webpNearLossless");
baton->tiffQuality = AttrTo<uint32_t>(options, "tiffQuality");
// Tile output
baton->tileSize = AttrTo<uint32_t>(options, "tileSize");

View File

@@ -1,12 +1,28 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef SRC_PIPELINE_H_
#define SRC_PIPELINE_H_
#include <memory>
#include <string>
#include <vector>
#include <nan.h>
#include <vips/vips8>
#include "nan.h"
#include "common.h"
#include "./common.h"
NAN_METHOD(pipeline);
@@ -84,6 +100,9 @@ struct PipelineBaton {
int pngCompressionLevel;
bool pngAdaptiveFiltering;
int webpQuality;
int webpAlphaQuality;
bool webpNearLossless;
bool webpLossless;
int tiffQuality;
std::string err;
bool withMetadata;

View File

@@ -1,7 +1,20 @@
#include <node.h>
#include <vips/vips8>
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "nan.h"
#include <node.h>
#include <nan.h>
#include <vips/vips8>
#include "common.h"
#include "metadata.h"

View File

@@ -1,10 +1,25 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <cmath>
#include <string>
#include <node.h>
#include <nan.h>
#include <vips/vips8>
#include <vips/vector.h>
#include "nan.h"
#include "common.h"
#include "operations.h"
#include "utilities.h"
@@ -45,14 +60,11 @@ NAN_METHOD(cache) {
// Get memory stats
Local<Object> memory = New<Object>();
Set(memory, New("current").ToLocalChecked(),
New<Integer>(static_cast<int>(round(vips_tracked_get_mem() / 1048576)))
);
New<Integer>(static_cast<int>(round(vips_tracked_get_mem() / 1048576))));
Set(memory, New("high").ToLocalChecked(),
New<Integer>(static_cast<int>(round(vips_tracked_get_mem_highwater() / 1048576)))
);
New<Integer>(static_cast<int>(round(vips_tracked_get_mem_highwater() / 1048576))));
Set(memory, New("max").ToLocalChecked(),
New<Integer>(static_cast<int>(round(vips_cache_get_max_mem() / 1048576)))
);
New<Integer>(static_cast<int>(round(vips_cache_get_max_mem() / 1048576))));
// Get file stats
Local<Object> files = New<Object>();
Set(files, New("current").ToLocalChecked(), New<Integer>(vips_tracked_get_files()));

View File

@@ -1,7 +1,21 @@
// Copyright 2013, 2014, 2015, 2016, 2017 Lovell Fuller and contributors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef SRC_UTILITIES_H_
#define SRC_UTILITIES_H_
#include "nan.h"
#include <nan.h>
NAN_METHOD(cache);
NAN_METHOD(concurrency);

Binary file not shown.

After

Width:  |  Height:  |  Size: 116 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 170 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 94 KiB

View File

@@ -1,48 +0,0 @@
'use strict';
const fs = require('fs');
const path = require('path');
const assert = require('assert');
const cpplint = require('node-cpplint/lib/');
describe('cpplint', function () {
// Ignore cpplint failures, possibly newline-related, on Windows
if (process.platform !== 'win32') {
// List C++ source files
fs.readdirSync(path.join(__dirname, '..', '..', 'src')).filter(function (source) {
return source !== 'libvips';
}).forEach(function (source) {
const file = path.join('src', source);
it(file, function (done) {
// Lint each source file
cpplint({
files: [file],
linelength: 120,
filters: {
legal: {
copyright: false
},
build: {
include: false
},
whitespace: {
parens: false
},
runtime: {
indentation_namespace: false
}
}
}, function (err, report) {
if (err) {
throw err;
}
const expected = {};
expected[file] = [];
assert.deepEqual(expected, report);
done();
});
});
});
}
});

View File

@@ -157,6 +157,28 @@ describe('Input/output', function () {
readableButNotAnImage.pipe(writable);
});
it('Readable side of Stream can start flowing after Writable side has finished', function (done) {
const readable = fs.createReadStream(fixtures.inputJpg);
const writable = fs.createWriteStream(fixtures.outputJpg);
writable.on('finish', function () {
sharp(fixtures.outputJpg).toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
fs.unlinkSync(fixtures.outputJpg);
done();
});
});
const pipeline = sharp().resize(320, 240);
readable.pipe(pipeline);
pipeline.on('finish', function () {
pipeline.pipe(writable);
});
});
it('Sequential read, force JPEG', function (done) {
sharp(fixtures.inputJpg)
.sequentialRead()
@@ -372,6 +394,50 @@ describe('Input/output', function () {
done();
});
});
it('should work for webp alpha quality', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({alphaQuality: 80})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
fixtures.assertSimilar(fixtures.expected('webp-alpha-80.webp'), data, done);
});
});
it('should work for webp lossless', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({lossless: true})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual('webp', info.format);
fixtures.assertSimilar(fixtures.expected('webp-lossless.webp'), data, done);
});
});
it('should work for webp near-lossless', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({nearLossless: true, quality: 50})
.toBuffer(function (err50, data50, info50) {
if (err50) throw err50;
assert.strictEqual(true, data50.length > 0);
assert.strictEqual('webp', info50.format);
fixtures.assertSimilar(fixtures.expected('webp-near-lossless-50.webp'), data50, done);
});
});
it('should use near-lossless when both lossless and nearLossless are specified', function (done) {
sharp(fixtures.inputPngAlphaPremultiplicationSmall)
.webp({nearLossless: true, quality: 50, lossless: true})
.toBuffer(function (err50, data50, info50) {
if (err50) throw err50;
assert.strictEqual(true, data50.length > 0);
assert.strictEqual('webp', info50.format);
fixtures.assertSimilar(fixtures.expected('webp-near-lossless-50.webp'), data50, done);
});
});
}
it('Invalid output format', function (done) {
@@ -735,6 +801,12 @@ describe('Input/output', function () {
});
});
it('Invalid WebP alpha quality throws error', function () {
assert.throws(function () {
sharp().webp({ alphaQuality: 101 });
});
});
it('Invalid TIFF quality throws error', function () {
assert.throws(function () {
sharp().tiff({ quality: 101 });