mirror of
https://github.com/lovell/sharp.git
synced 2025-07-09 10:30:15 +02:00
Windows compatibility #19
Hide WebP format and normalise option Separate test runners for node and iojs
This commit is contained in:
parent
8926ebc56c
commit
1e52c2dbe6
13
README.md
13
README.md
@ -88,9 +88,12 @@ The _gettext_ dependency of _libvips_ [can lead](https://github.com/lovell/sharp
|
||||
Requires x86 32-bit Node.js or io.js (use `iojs.exe` rather than `node.exe`).
|
||||
The WebP format is currently unsupported.
|
||||
|
||||
1. [Download](http://www.vips.ecs.soton.ac.uk/supported/current/win32/) and unzip `vips-dev.x.y.z.zip`.
|
||||
2. Set the `VIPS_HOME` environment variable to the full path of the `vips-dev-x.y.z` directory.
|
||||
3. Add `vips-dev-x.y.z\bin` to `PATH`.
|
||||
1. Ensure the [node-gyp prerequisites](https://github.com/TooTallNate/node-gyp#installation) are met.
|
||||
2. [Download](http://www.vips.ecs.soton.ac.uk/supported/current/win32/) and unzip `vips-dev.x.y.z.zip`.
|
||||
3. Set the `VIPS_HOME` environment variable to the full path of the `vips-dev-x.y.z` directory.
|
||||
4. Add `vips-dev-x.y.z\bin` to `PATH`.
|
||||
|
||||
Versions of MSVC more recent than 2013 may require the use of `npm install --arch=ia32 --msvs_version=2013`.
|
||||
|
||||
### Heroku
|
||||
|
||||
@ -678,6 +681,10 @@ A [guide for contributors](https://github.com/lovell/sharp/blob/master/CONTRIBUT
|
||||
|
||||
[](https://snap-ci.com/lovell/sharp/branch/master)
|
||||
|
||||
#### Windows Server 2012
|
||||
|
||||
[](https://ci.appveyor.com/project/lovell/sharp)
|
||||
|
||||
### Benchmark tests
|
||||
|
||||
```
|
||||
|
@ -1,17 +1,17 @@
|
||||
os: Visual Studio 2014 CTP4
|
||||
platform: x86
|
||||
environment:
|
||||
VIPS_VERSION_MAJOR_MINOR: 7.42
|
||||
VIPS_VERSION_PATCH: 3
|
||||
VIPS_WARNING: 0
|
||||
install:
|
||||
- ps: $env:VIPS_VERSION = "$env:VIPS_VERSION_MAJOR_MINOR.$env:VIPS_VERSION_PATCH"
|
||||
- ps: Write-Output "VIPS_VERSION=$env:VIPS_VERSION"
|
||||
- ps: Write-Output "Fetching http://www.vips.ecs.soton.ac.uk/supported/$env:VIPS_VERSION_MAJOR_MINOR/win32/vips-dev-$env:VIPS_VERSION.zip"
|
||||
- ps: Start-FileDownload http://www.vips.ecs.soton.ac.uk/supported/$env:VIPS_VERSION_MAJOR_MINOR/win32/vips-dev-$env:VIPS_VERSION.zip -FileName c:\vips-dev-$env:VIPS_VERSION.zip
|
||||
- ps: Write-Output "Extracting c:\vips-dev-$env:VIPS_VERSION.zip"
|
||||
- ps: Invoke-Expression "& 7z -y x c:\vips-dev-$env:VIPS_VERSION.zip -oc:\ | FIND /V `"ing `""
|
||||
- ps: $env:VIPS_HOME = "c:\vips-dev-$env:VIPS_VERSION"
|
||||
- ps: $env:PATH = "$env:VIPS_HOME\bin;$env:PATH"
|
||||
- ps: Install-Product node 0 x86
|
||||
- npm install --msvs_version=2013
|
||||
test_script:
|
||||
- npm test
|
||||
- npm run-script test-win32-node
|
8
index.js
8
index.js
@ -349,10 +349,14 @@ Sharp.prototype.gamma = function(gamma) {
|
||||
};
|
||||
|
||||
/*
|
||||
Normalize histogram
|
||||
Enhance output image contrast by stretching its luminance to cover the full dynamic range
|
||||
*/
|
||||
Sharp.prototype.normalize = function(normalize) {
|
||||
this.options.normalize = (typeof normalize === 'boolean') ? normalize : true;
|
||||
if (process.platform !== 'win32') {
|
||||
this.options.normalize = (typeof normalize === 'boolean') ? normalize : true;
|
||||
} else {
|
||||
console.error('normalize unavailable on win32 platform');
|
||||
}
|
||||
return this;
|
||||
};
|
||||
Sharp.prototype.normalise = Sharp.prototype.normalize;
|
||||
|
@ -20,7 +20,9 @@
|
||||
],
|
||||
"description": "High performance Node.js module to resize JPEG, PNG, WebP and TIFF images using the libvips library",
|
||||
"scripts": {
|
||||
"test": "VIPS_WARNING=0 node ./node_modules/istanbul/lib/cli.js cover ./node_modules/mocha/bin/_mocha -- --slow=5000 --timeout=10000 ./test/unit/*.js"
|
||||
"test": "VIPS_WARNING=0 node ./node_modules/istanbul/lib/cli.js cover ./node_modules/mocha/bin/_mocha -- --slow=5000 --timeout=15000 ./test/unit/*.js",
|
||||
"test-win32-node": "node ./node_modules/mocha/bin/mocha --slow=5000 --timeout=15000 ./test/unit/*.js",
|
||||
"test-win32-iojs": "iojs ./node_modules/mocha/bin/mocha --slow=5000 --timeout=15000 ./test/unit/*.js"
|
||||
},
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
|
@ -708,6 +708,7 @@ class ResizeWorker : public NanAsyncWorker {
|
||||
image = gammaDecoded;
|
||||
}
|
||||
|
||||
#ifndef _WIN32
|
||||
// Apply normalization
|
||||
if (baton->normalize) {
|
||||
VipsInterpretation typeBeforeNormalize = image->Type;
|
||||
@ -787,6 +788,7 @@ class ResizeWorker : public NanAsyncWorker {
|
||||
image = normalized;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// Convert image to sRGB, if not already
|
||||
if (image->Type != VIPS_INTERPRETATION_sRGB) {
|
||||
|
@ -8,6 +8,10 @@
|
||||
#include "resize.h"
|
||||
#include "utilities.h"
|
||||
|
||||
#ifdef _WIN64
|
||||
#error Windows 64-bit currently unsupported - see https://github.com/lovell/sharp#windows
|
||||
#endif
|
||||
|
||||
extern "C" void init(v8::Handle<v8::Object> target) {
|
||||
NanScope();
|
||||
vips_init("sharp");
|
||||
|
@ -31,16 +31,18 @@ describe('Colour space conversion', function() {
|
||||
.toFile(fixtures.path('output.greyscale-not.jpg'), done);
|
||||
});
|
||||
|
||||
it('From 1-bit TIFF to sRGB WebP [slow]', function(done) {
|
||||
sharp(fixtures.inputTiff)
|
||||
.webp()
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('webp', info.format);
|
||||
done();
|
||||
});
|
||||
});
|
||||
if (sharp.format.webp.output.buffer) {
|
||||
it('From 1-bit TIFF to sRGB WebP [slow]', function(done) {
|
||||
sharp(fixtures.inputTiff)
|
||||
.webp()
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('webp', info.format);
|
||||
done();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
it('From CMYK to sRGB', function(done) {
|
||||
sharp(fixtures.inputJpgWithCmykProfile)
|
||||
|
@ -8,39 +8,42 @@ var cpplint = require('node-cpplint/lib/');
|
||||
|
||||
describe('cpplint', function() {
|
||||
|
||||
// List C++ source files
|
||||
fs.readdirSync(path.join(__dirname, '..', '..', 'src')).forEach(function (source) {
|
||||
var file = path.join('src', source);
|
||||
it(file, function(done) {
|
||||
// Lint each source file
|
||||
cpplint({
|
||||
files: [file],
|
||||
linelength: 140,
|
||||
filters: {
|
||||
legal: {
|
||||
copyright: false
|
||||
},
|
||||
build: {
|
||||
include: false,
|
||||
include_order: false
|
||||
},
|
||||
whitespace: {
|
||||
blank_line: false,
|
||||
comments: false,
|
||||
parens: false
|
||||
// Ignore cpplint failures, possibly newline-related, on Windows
|
||||
if (process.platform !== 'win32') {
|
||||
// List C++ source files
|
||||
fs.readdirSync(path.join(__dirname, '..', '..', 'src')).forEach(function (source) {
|
||||
var file = path.join('src', source);
|
||||
it(file, function(done) {
|
||||
// Lint each source file
|
||||
cpplint({
|
||||
files: [file],
|
||||
linelength: 140,
|
||||
filters: {
|
||||
legal: {
|
||||
copyright: false
|
||||
},
|
||||
build: {
|
||||
include: false,
|
||||
include_order: false
|
||||
},
|
||||
whitespace: {
|
||||
blank_line: false,
|
||||
comments: false,
|
||||
parens: false
|
||||
}
|
||||
}
|
||||
}
|
||||
}, function(err, report) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
var expected = {};
|
||||
expected[file] = [];
|
||||
assert.deepEqual(expected, report);
|
||||
done();
|
||||
}, function(err, report) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
var expected = {};
|
||||
expected[file] = [];
|
||||
assert.deepEqual(expected, report);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
});
|
||||
|
@ -28,24 +28,26 @@ describe('Embed', function() {
|
||||
});
|
||||
});
|
||||
|
||||
it('JPEG within WebP, to include alpha channel', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.background({r: 0, g: 0, b: 0, a: 0})
|
||||
.embed()
|
||||
.webp()
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('webp', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
sharp(data).metadata(function(err, metadata) {
|
||||
if (sharp.format.webp.output.buffer) {
|
||||
it('JPEG within WebP, to include alpha channel', function(done) {
|
||||
sharp(fixtures.inputJpg)
|
||||
.resize(320, 240)
|
||||
.background({r: 0, g: 0, b: 0, a: 0})
|
||||
.embed()
|
||||
.webp()
|
||||
.toBuffer(function(err, data, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(4, metadata.channels);
|
||||
done();
|
||||
assert.strictEqual(true, data.length > 0);
|
||||
assert.strictEqual('webp', info.format);
|
||||
assert.strictEqual(320, info.width);
|
||||
assert.strictEqual(240, info.height);
|
||||
sharp(data).metadata(function(err, metadata) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(4, metadata.channels);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
});
|
||||
|
@ -31,16 +31,18 @@ describe('Partial image extraction', function() {
|
||||
});
|
||||
});
|
||||
|
||||
it('WebP', function(done) {
|
||||
sharp(fixtures.inputWebP)
|
||||
.extract(50, 100, 125, 200)
|
||||
.toFile(fixtures.path('output.extract.webp'), function(err, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(125, info.width);
|
||||
assert.strictEqual(200, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
if (sharp.format.webp.output.file) {
|
||||
it('WebP', function(done) {
|
||||
sharp(fixtures.inputWebP)
|
||||
.extract(50, 100, 125, 200)
|
||||
.toFile(fixtures.path('output.extract.webp'), function(err, info) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual(125, info.width);
|
||||
assert.strictEqual(200, info.height);
|
||||
done();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
it('TIFF', function(done) {
|
||||
sharp(fixtures.inputTiff)
|
||||
|
@ -806,8 +806,10 @@ describe('Input/output', function() {
|
||||
.toBuffer(function(err) {
|
||||
sharp.queue.removeListener('change', queueListener);
|
||||
if (err) throw err;
|
||||
assert.strictEqual(2, eventCounter);
|
||||
done();
|
||||
process.nextTick(function() {
|
||||
assert.strictEqual(2, eventCounter);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -82,19 +82,21 @@ describe('Image metadata', function() {
|
||||
});
|
||||
});
|
||||
|
||||
it('WebP', function(done) {
|
||||
sharp(fixtures.inputWebP).metadata(function(err, metadata) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('webp', metadata.format);
|
||||
assert.strictEqual(1024, metadata.width);
|
||||
assert.strictEqual(772, metadata.height);
|
||||
assert.strictEqual('srgb', metadata.space);
|
||||
assert.strictEqual(3, metadata.channels);
|
||||
assert.strictEqual(false, metadata.hasProfile);
|
||||
assert.strictEqual(false, metadata.hasAlpha);
|
||||
done();
|
||||
if (sharp.format.webp.input.file) {
|
||||
it('WebP', function(done) {
|
||||
sharp(fixtures.inputWebP).metadata(function(err, metadata) {
|
||||
if (err) throw err;
|
||||
assert.strictEqual('webp', metadata.format);
|
||||
assert.strictEqual(1024, metadata.width);
|
||||
assert.strictEqual(772, metadata.height);
|
||||
assert.strictEqual('srgb', metadata.space);
|
||||
assert.strictEqual(3, metadata.channels);
|
||||
assert.strictEqual(false, metadata.hasProfile);
|
||||
assert.strictEqual(false, metadata.hasAlpha);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
it('GIF via libmagick', function(done) {
|
||||
sharp(fixtures.inputGif).metadata(function(err, metadata) {
|
||||
|
@ -13,124 +13,117 @@ describe('Normalization', function () {
|
||||
assert.strictEqual(sharp.prototype.normalize, sharp.prototype.normalise);
|
||||
});
|
||||
|
||||
it('spreads rgb image values between 0 and 255', function(done) {
|
||||
sharp(fixtures.inputJpgWithLowContrast)
|
||||
.normalize()
|
||||
.raw()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
var min = 255, max = 0, i;
|
||||
for (i = 0; i < data.length; i += 3) {
|
||||
min = Math.min(min, data[i], data[i + 1], data[i + 2]);
|
||||
max = Math.max(max, data[i], data[i + 1], data[i + 2]);
|
||||
}
|
||||
assert.strictEqual(0, min);
|
||||
assert.strictEqual(255, max);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
// Normalize is currently unavailable on Windows
|
||||
if (process.platform !== 'win32') {
|
||||
|
||||
it('spreads grayscaled image values between 0 and 255', function(done) {
|
||||
sharp(fixtures.inputJpgWithLowContrast)
|
||||
.gamma()
|
||||
.greyscale()
|
||||
.normalize(true)
|
||||
.raw()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
var min = 255, max = 0, i;
|
||||
for (i = 0; i < data.length; i++) {
|
||||
min = Math.min(min, data[i]);
|
||||
max = Math.max(max, data[i]);
|
||||
}
|
||||
assert.strictEqual(0, min);
|
||||
assert.strictEqual(255, max);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
it('spreads rgb image values between 0 and 255', function(done) {
|
||||
sharp(fixtures.inputJpgWithLowContrast)
|
||||
.normalize()
|
||||
.raw()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
var min = 255, max = 0, i;
|
||||
for (i = 0; i < data.length; i += 3) {
|
||||
min = Math.min(min, data[i], data[i + 1], data[i + 2]);
|
||||
max = Math.max(max, data[i], data[i + 1], data[i + 2]);
|
||||
}
|
||||
assert.strictEqual(0, min);
|
||||
assert.strictEqual(255, max);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('stretches greyscale images with alpha channel', function (done) {
|
||||
sharp(fixtures.inputPngWithGreyAlpha)
|
||||
.normalize()
|
||||
.raw()
|
||||
.toBuffer(function (err, data, info) {
|
||||
// raw toBuffer does not return the alpha channel (yet?)
|
||||
var min = 255, max = 0, i;
|
||||
for (i = 0; i < data.length; i++) {
|
||||
min = Math.min(min, data[i]);
|
||||
max = Math.max(max, data[i]);
|
||||
}
|
||||
assert.strictEqual(0, min);
|
||||
assert.strictEqual(255, max);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
it('spreads grayscaled image values between 0 and 255', function(done) {
|
||||
sharp(fixtures.inputJpgWithLowContrast)
|
||||
.gamma()
|
||||
.greyscale()
|
||||
.normalize(true)
|
||||
.raw()
|
||||
.toBuffer(function (err, data, info) {
|
||||
if (err) throw err;
|
||||
var min = 255, max = 0, i;
|
||||
for (i = 0; i < data.length; i++) {
|
||||
min = Math.min(min, data[i]);
|
||||
max = Math.max(max, data[i]);
|
||||
}
|
||||
assert.strictEqual(0, min);
|
||||
assert.strictEqual(255, max);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('keeps an existing alpha channel', function (done) {
|
||||
sharp(fixtures.inputPngWithTransparency)
|
||||
.normalize()
|
||||
.toBuffer(function (err, data, info) {
|
||||
sharp(data)
|
||||
.metadata()
|
||||
.then(function (metadata) {
|
||||
assert.strictEqual(4, metadata.channels);
|
||||
assert.strictEqual(true, metadata.hasAlpha);
|
||||
assert.strictEqual('srgb', metadata.space);
|
||||
})
|
||||
.finally(done);
|
||||
});
|
||||
});
|
||||
it('stretches greyscale images with alpha channel', function (done) {
|
||||
sharp(fixtures.inputPngWithGreyAlpha)
|
||||
.normalize()
|
||||
.raw()
|
||||
.toBuffer(function (err, data, info) {
|
||||
var min = 255, max = 0, i;
|
||||
for (i = 0; i < data.length; i++) {
|
||||
min = Math.min(min, data[i]);
|
||||
max = Math.max(max, data[i]);
|
||||
}
|
||||
assert.strictEqual(0, min);
|
||||
assert.strictEqual(255, max);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('keeps the alpha channel of greyscale images intact', function (done) {
|
||||
sharp(fixtures.inputPngWithGreyAlpha)
|
||||
.normalize()
|
||||
.toBuffer(function (err, data, info) {
|
||||
sharp(data)
|
||||
.metadata()
|
||||
.then(function (metadata) {
|
||||
assert.strictEqual(true, metadata.hasAlpha);
|
||||
// because of complications with greyscale
|
||||
// we return everything in srgb for now.
|
||||
//
|
||||
// assert.strictEqual(2, metadata.channels);
|
||||
// assert.strictEqual('b-w', metadata.space);
|
||||
assert.strictEqual(4, metadata.channels);
|
||||
assert.strictEqual('srgb', metadata.space);
|
||||
})
|
||||
.finally(done);
|
||||
});
|
||||
});
|
||||
it('keeps an existing alpha channel', function (done) {
|
||||
sharp(fixtures.inputPngWithTransparency)
|
||||
.normalize()
|
||||
.toBuffer(function (err, data, info) {
|
||||
sharp(data)
|
||||
.metadata()
|
||||
.then(function (metadata) {
|
||||
assert.strictEqual(4, metadata.channels);
|
||||
assert.strictEqual(true, metadata.hasAlpha);
|
||||
assert.strictEqual('srgb', metadata.space);
|
||||
})
|
||||
.finally(done);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns a black image for images with only one color', function (done) {
|
||||
sharp(fixtures.inputPngWithOneColor)
|
||||
.normalize()
|
||||
.toBuffer()
|
||||
.bind({})
|
||||
.then(function (imageData) {
|
||||
this.imageData = imageData;
|
||||
return sharp(imageData)
|
||||
.metadata();
|
||||
})
|
||||
.then(function (metadata) {
|
||||
assert.strictEqual(false, metadata.hasAlpha);
|
||||
// because of complications with greyscale
|
||||
// we return everything in srgb for now.
|
||||
//
|
||||
// assert.strictEqual(1, metadata.channels);
|
||||
// assert.strictEqual('b-w', metadata.space);
|
||||
assert.strictEqual(3, metadata.channels);
|
||||
assert.strictEqual('srgb', metadata.space);
|
||||
})
|
||||
.then(function () {
|
||||
return sharp(this.imageData)
|
||||
.raw()
|
||||
.toBuffer();
|
||||
})
|
||||
.then(function (rawData) {
|
||||
// var blackBuffer = new Buffer([0,0,0,0]);
|
||||
var blackBuffer = new Buffer([0,0,0, 0,0,0, 0,0,0, 0,0,0]);
|
||||
assert.strictEqual(blackBuffer.toString(), rawData.toString());
|
||||
})
|
||||
.finally(done);
|
||||
});
|
||||
it('keeps the alpha channel of greyscale images intact', function (done) {
|
||||
sharp(fixtures.inputPngWithGreyAlpha)
|
||||
.normalize()
|
||||
.toBuffer(function (err, data, info) {
|
||||
sharp(data)
|
||||
.metadata()
|
||||
.then(function (metadata) {
|
||||
assert.strictEqual(true, metadata.hasAlpha);
|
||||
assert.strictEqual(4, metadata.channels);
|
||||
assert.strictEqual('srgb', metadata.space);
|
||||
})
|
||||
.finally(done);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns a black image for images with only one color', function (done) {
|
||||
sharp(fixtures.inputPngWithOneColor)
|
||||
.normalize()
|
||||
.toBuffer()
|
||||
.bind({})
|
||||
.then(function (imageData) {
|
||||
this.imageData = imageData;
|
||||
return sharp(imageData)
|
||||
.metadata();
|
||||
})
|
||||
.then(function (metadata) {
|
||||
assert.strictEqual(false, metadata.hasAlpha);
|
||||
assert.strictEqual(3, metadata.channels);
|
||||
assert.strictEqual('srgb', metadata.space);
|
||||
})
|
||||
.then(function () {
|
||||
return sharp(this.imageData)
|
||||
.raw()
|
||||
.toBuffer();
|
||||
})
|
||||
.then(function (rawData) {
|
||||
var blackBuffer = new Buffer([0,0,0, 0,0,0, 0,0,0, 0,0,0]);
|
||||
assert.strictEqual(blackBuffer.toString(), rawData.toString());
|
||||
})
|
||||
.finally(done);
|
||||
});
|
||||
|
||||
}
|
||||
});
|
||||
|
Loading…
x
Reference in New Issue
Block a user