Compare commits

..

31 Commits

Author SHA1 Message Date
Lovell Fuller
b19dad69d6 Release v0.27.1 2021-01-27 19:44:39 +00:00
Lovell Fuller
94c5ac64e9 Bump devDeps 2021-01-27 19:44:01 +00:00
Lovell Fuller
c4bcd088fb Tests: run in parallel, ~20% faster on multicore machines 2021-01-26 20:27:52 +00:00
Lovell Fuller
aeecbe9396 Tests: ensure faster metadata tests pass on ARM64 2021-01-26 20:27:20 +00:00
Lovell Fuller
171aade9cd Tests: reduce time taken by metadata tests 2021-01-26 19:52:33 +00:00
Lovell Fuller
67213ae86c Tests: refactor output paths, might enable parallel runs 2021-01-26 18:43:48 +00:00
Lovell Fuller
24d9e53c3f Docs: add example of 16-bit RGB output #2528 2021-01-26 15:03:43 +00:00
Kleis Auke Wolthuizen
573ed5f4b5 Avoid calling g_type_from_name #2535 2021-01-26 14:42:08 +00:00
Bert Verhelst
ceff628add Docs: ensure correct types for output options 2021-01-26 14:23:56 +00:00
Randy Ridge
0bb8cb9203 Ensure TIFF is cast when using float predictor (#2502) 2021-01-26 14:00:25 +00:00
Lovell Fuller
98349bde28 Docs: add section on known conflicts 2021-01-24 17:15:28 +00:00
Lovell Fuller
f09be932eb Docs: add info about npm v7 directory ownership change 2021-01-24 16:52:10 +00:00
Lovell Fuller
4c57ac2bbe Docs: sharp logos are now in the public domain 2021-01-18 16:52:23 +00:00
Lovell Fuller
1dd93c1b6b Docs: changelog entry and example for #2527 2021-01-16 14:26:38 +00:00
alza54
c9f85fe27f Expose libvips gaussnoise operation (#2527) 2021-01-16 14:03:25 +00:00
Lovell Fuller
419cbe50f6 Preserve transparancy in monochrome logo 2021-01-16 10:53:15 +00:00
Lovell Fuller
5031c8323f Add monochrome version of sharp logo 2021-01-15 22:03:00 +00:00
Lovell Fuller
762d5913ce Avoid nested macro, replace VIPS_AREA w/ reinterpret_cast 2021-01-13 18:32:37 +00:00
Lovell Fuller
290df1b1c7 Windows: fix preprocessor syntax 2021-01-13 18:09:42 +00:00
Lovell Fuller
79170afc51 Docs: add 2021 as a copyright year 2021-01-13 18:06:28 +00:00
Lovell Fuller
bba00c2bfe Revert: ensure all platforms use fontconfig #2399 #2515 2021-01-13 17:50:58 +00:00
Lovell Fuller
f7e2b3688f Bump devDependencies 2021-01-13 17:23:29 +00:00
Lovell Fuller
8d49b7dde1 Ensure tests pass with latest libvips master branch
Expose forthcoming HEIF features where available
2021-01-13 16:47:49 +00:00
Lovell Fuller
138e60adb3 Docs: add section for Apple M1 users #2460 2021-01-06 13:39:47 +00:00
Lovell Fuller
d6376c31e0 Test: ensure toBuffer tests return any errors 2021-01-06 13:12:24 +00:00
Lovell Fuller
a7003e93c8 Docs: changelog entry for #2511 2021-01-06 11:10:11 +00:00
Leon Radley
4821a11223 Add support for Uint8(Clamped)Array input (#2511) 2021-01-06 09:49:24 +00:00
Lovell Fuller
bf1b326988 Docs: allow docs to be built on Windows 2021-01-01 15:19:35 +00:00
Lovell Fuller
39ddb6a175 Docs: improve descripion of create/raw props 2021-01-01 14:47:26 +00:00
Lovell Fuller
b2a0b8c0f0 Release v0.27.0 2020-12-22 11:50:23 +00:00
Lovell Fuller
4debc46d0e Docs: add AVIF to supported formats 2020-12-22 11:47:54 +00:00
44 changed files with 882 additions and 249 deletions

View File

@@ -11,7 +11,9 @@ Have you ensured the architecture and platform of Node.js used for `npm install`
Are you using the latest version? Is the version currently in use as reported by `npm ls sharp` the same as the latest version as reported by `npm view sharp dist-tags.latest`?
If you are installing as a `root` or `sudo` user, have you tried with the `npm install --unsafe-perm` flag?
If you are using npm v6 or earlier and installing as a `root` or `sudo` user, have you tried with the `npm install --unsafe-perm` flag?
If you are using npm v7, does the user running `npm install` own the directory it is run in?
If you are using the `ignore-scripts` feature of `npm`, have you tried with the `npm install --ignore-scripts=false` flag?

View File

@@ -4,7 +4,7 @@
The typical use case for this high speed Node.js module
is to convert large images in common formats to
smaller, web-friendly JPEG, PNG and WebP images of varying dimensions.
smaller, web-friendly JPEG, PNG, WebP and AVIF images of varying dimensions.
Resizing an image is typically 4x-5x faster than using the
quickest ImageMagick and GraphicsMagick settings
@@ -102,7 +102,7 @@ covers reporting bugs, requesting features and submitting code changes.
### Licensing
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021 Lovell Fuller and contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

View File

@@ -66,7 +66,7 @@ covers reporting bugs, requesting features and submitting code changes.
### Licensing
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Lovell Fuller and contributors.
Copyright 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021 Lovell Fuller and contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

View File

@@ -70,7 +70,7 @@ Channel ordering follows vips convention:
- sRGB: 0: Red, 1: Green, 2: Blue, 3: Alpha.
- CMYK: 0: Magenta, 1: Cyan, 2: Yellow, 3: Black, 4: Alpha.
Buffers may be any of the image formats supported by sharp: JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data.
Buffers may be any of the image formats supported by sharp.
For raw pixel input, the `options` object should contain a `raw` attribute, which follows the format of the attribute of the same name in the `sharp()` constructor.
### Parameters

View File

@@ -48,6 +48,14 @@ By default output image will be web-friendly sRGB, with additional channels inte
- `colourspace` **[string][1]?** output colourspace e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...][6]
### Examples
```javascript
// Output 16 bits per pixel RGB
await sharp(input)
.toColourspace('rgb16')
.toFile('16-bpp.png')
```
- Throws **[Error][4]** Invalid parameters

View File

@@ -4,7 +4,7 @@
Constructor factory to create an instance of `sharp`, to which further methods are chained.
JPEG, PNG, WebP or TIFF format image data can be streamed out from this object.
JPEG, PNG, WebP, AVIF or TIFF format image data can be streamed out from this object.
When using Stream based output, derived attributes are available from the `info` event.
Non-critical problems encountered during processing are emitted as `warning` events.
@@ -13,32 +13,36 @@ Implements the [stream.Duplex][1] class.
### Parameters
- `input` **([Buffer][2] \| [string][3])?** if present, can be
a Buffer containing JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data, or
a String containing the filesystem path to an JPEG, PNG, WebP, GIF, SVG or TIFF image file.
JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
- `options` **[Object][4]?** if present, is an Object with optional attributes.
- `options.failOnError` **[boolean][5]** by default halt processing and raise an error when loading invalid images.
- `input` **([Buffer][2] \| [Uint8Array][3] \| [Uint8ClampedArray][4] \| [string][5])?** if present, can be
a Buffer / Uint8Array / Uint8ClampedArray containing JPEG, PNG, WebP, AVIF, GIF, SVG, TIFF or raw pixel image data, or
a String containing the filesystem path to an JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image file.
JPEG, PNG, WebP, AVIF, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
- `options` **[Object][6]?** if present, is an Object with optional attributes.
- `options.failOnError` **[boolean][7]** by default halt processing and raise an error when loading invalid images.
Set this flag to `false` if you'd rather apply a "best effort" to decode images, even if the data is corrupt or invalid. (optional, default `true`)
- `options.limitInputPixels` **([number][6] \| [boolean][5])** Do not process input images where the number of pixels
- `options.limitInputPixels` **([number][8] \| [boolean][7])** Do not process input images where the number of pixels
(width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted.
An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF). (optional, default `268402689`)
- `options.sequentialRead` **[boolean][5]** Set this to `true` to use sequential rather than random access where possible.
- `options.sequentialRead` **[boolean][7]** Set this to `true` to use sequential rather than random access where possible.
This can reduce memory usage and might improve performance on some systems. (optional, default `false`)
- `options.density` **[number][6]** number representing the DPI for vector images in the range 1 to 100000. (optional, default `72`)
- `options.pages` **[number][6]** number of pages to extract for multi-page input (GIF, TIFF, PDF), use -1 for all pages. (optional, default `1`)
- `options.page` **[number][6]** page number to start extracting from for multi-page input (GIF, TIFF, PDF), zero based. (optional, default `0`)
- `options.level` **[number][6]** level to extract from a multi-level input (OpenSlide), zero based. (optional, default `0`)
- `options.animated` **[boolean][5]** Set to `true` to read all frames/pages of an animated image (equivalent of setting `pages` to `-1`). (optional, default `false`)
- `options.raw` **[Object][4]?** describes raw pixel input image data. See `raw()` for pixel ordering.
- `options.raw.width` **[number][6]?**
- `options.raw.height` **[number][6]?**
- `options.raw.channels` **[number][6]?** 1-4
- `options.create` **[Object][4]?** describes a new image to be created.
- `options.create.width` **[number][6]?**
- `options.create.height` **[number][6]?**
- `options.create.channels` **[number][6]?** 3-4
- `options.create.background` **([string][3] \| [Object][4])?** parsed by the [color][7] module to extract values for red, green, blue and alpha.
- `options.density` **[number][8]** number representing the DPI for vector images in the range 1 to 100000. (optional, default `72`)
- `options.pages` **[number][8]** number of pages to extract for multi-page input (GIF, WebP, AVIF, TIFF, PDF), use -1 for all pages. (optional, default `1`)
- `options.page` **[number][8]** page number to start extracting from for multi-page input (GIF, WebP, AVIF, TIFF, PDF), zero based. (optional, default `0`)
- `options.level` **[number][8]** level to extract from a multi-level input (OpenSlide), zero based. (optional, default `0`)
- `options.animated` **[boolean][7]** Set to `true` to read all frames/pages of an animated image (equivalent of setting `pages` to `-1`). (optional, default `false`)
- `options.raw` **[Object][6]?** describes raw pixel input image data. See `raw()` for pixel ordering.
- `options.raw.width` **[number][8]?** integral number of pixels wide.
- `options.raw.height` **[number][8]?** integral number of pixels high.
- `options.raw.channels` **[number][8]?** integral number of channels, between 1 and 4.
- `options.create` **[Object][6]?** describes a new image to be created.
- `options.create.width` **[number][8]?** integral number of pixels wide.
- `options.create.height` **[number][8]?** integral number of pixels high.
- `options.create.channels` **[number][8]?** integral number of channels, either 3 (RGB) or 4 (RGBA).
- `options.create.background` **([string][5] \| [Object][6])?** parsed by the [color][9] module to extract values for red, green, blue and alpha.
- `options.create.noise` **[Object][6]?** describes a noise to be created.
- `options.create.noise.type` **[string][5]?** type of generated noise, currently only `gaussian` is supported.
- `options.create.noise.mean` **[number][8]?** mean of pixels in generated noise.
- `options.create.noise.sigma` **[number][8]?** standard deviation of pixels in generated noise.
### Examples
@@ -84,9 +88,40 @@ sharp({
await sharp('in.gif', { animated: true }).toFile('out.webp');
```
- Throws **[Error][8]** Invalid parameters
```javascript
// Read a raw array of pixels and save it to a png
const input = Uint8Array.from([255, 255, 255, 0, 0, 0]); // or Uint8ClampedArray
const image = sharp(input, {
// because the input does not contain its dimensions or how many channels it has
// we need to specify it in the constructor options
raw: {
width: 2,
height: 1,
channels: 3
}
});
await image.toFile('my-two-pixels.png');
```
Returns **[Sharp][9]**
```javascript
// Generate RGB Gaussian noise
await sharp({
create: {
width: 300,
height: 200,
channels: 3,
noise: {
type: 'gaussian',
mean: 128,
sigma: 30
}
}
}.toFile('noise.png');
```
- Throws **[Error][10]** Invalid parameters
Returns **[Sharp][11]**
## clone
@@ -154,22 +189,26 @@ Promise.all(promises)
});
```
Returns **[Sharp][9]**
Returns **[Sharp][11]**
[1]: http://nodejs.org/api/stream.html#stream_class_stream_duplex
[2]: https://nodejs.org/api/buffer.html
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[4]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Uint8ClampedArray
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[5]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[6]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object
[7]: https://www.npmjs.org/package/color
[7]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
[8]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number
[9]: #sharp
[9]: https://www.npmjs.org/package/color
[10]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Error
[11]: #sharp

View File

@@ -5,7 +5,7 @@
Write output image data to a file.
If an explicit output format is not selected, it will be inferred from the extension,
with JPEG, PNG, WebP, TIFF, DZI, and libvips' V format supported.
with JPEG, PNG, WebP, AVIF, TIFF, DZI, and libvips' V format supported.
Note that raw pixel data is only supported for buffer output.
By default all metadata will be removed, which includes EXIF-based orientation.
@@ -42,7 +42,7 @@ Returns **[Promise][5]<[Object][6]>** when no callback is provided
## toBuffer
Write output to a Buffer.
JPEG, PNG, WebP, TIFF and RAW output are supported.
JPEG, PNG, WebP, AVIF, TIFF and raw pixel data output are supported.
If no explicit format is set, the output format will match the input image, except GIF and SVG input which become PNG output.
@@ -86,6 +86,21 @@ sharp(input)
.catch(err => { ... });
```
```javascript
const data = await sharp('my-image.jpg')
// output the raw pixels
.raw()
.toBuffer();
// create a more type safe way to work with the raw pixel data
// this will not copy the data, instead it will change `data`s underlying ArrayBuffer
// so `data` and `pixelArray` point to the same memory location
const pixelArray = new Uint8ClampedArray(data.buffer);
// When you are done changing the pixelArray, sharp takes the `pixelArray` as an input
await sharp(pixelArray).toFile('my-changed-image.jpg');
```
Returns **[Promise][5]<[Buffer][8]>** when no callback is provided
## withMetadata
@@ -272,15 +287,15 @@ Use these TIFF options for output image.
- `options` **[Object][6]?** output options
- `options.quality` **[number][9]** quality, integer 1-100 (optional, default `80`)
- `options.force` **[boolean][7]** force TIFF output, otherwise attempt to use input format (optional, default `true`)
- `options.compression` **[boolean][7]** compression options: lzw, deflate, jpeg, ccittfax4 (optional, default `'jpeg'`)
- `options.predictor` **[boolean][7]** compression predictor options: none, horizontal, float (optional, default `'horizontal'`)
- `options.compression` **[string][2]** compression options: lzw, deflate, jpeg, ccittfax4 (optional, default `'jpeg'`)
- `options.predictor` **[string][2]** compression predictor options: none, horizontal, float (optional, default `'horizontal'`)
- `options.pyramid` **[boolean][7]** write an image pyramid (optional, default `false`)
- `options.tile` **[boolean][7]** write a tiled tiff (optional, default `false`)
- `options.tileWidth` **[boolean][7]** horizontal tile size (optional, default `256`)
- `options.tileHeight` **[boolean][7]** vertical tile size (optional, default `256`)
- `options.tileWidth` **[number][9]** horizontal tile size (optional, default `256`)
- `options.tileHeight` **[number][9]** vertical tile size (optional, default `256`)
- `options.xres` **[number][9]** horizontal resolution in pixels/mm (optional, default `1.0`)
- `options.yres` **[number][9]** vertical resolution in pixels/mm (optional, default `1.0`)
- `options.bitdepth` **[boolean][7]** reduce bitdepth to 1, 2 or 4 bit (optional, default `8`)
- `options.bitdepth` **[number][9]** reduce bitdepth to 1, 2 or 4 bit (optional, default `8`)
### Examples
@@ -312,6 +327,7 @@ most web browsers do not display these properly.
- `options.quality` **[number][9]** quality, integer 1-100 (optional, default `50`)
- `options.lossless` **[boolean][7]** use lossless compression (optional, default `false`)
- `options.speed` **[boolean][7]** CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest) (optional, default `5`)
- `options.chromaSubsampling` **[string][2]** set to '4:4:4' to prevent chroma subsampling otherwise defaults to '4:2:0' chroma subsampling, requires libvips v8.11.0 (optional, default `'4:2:0'`)
- Throws **[Error][4]** Invalid options
@@ -333,9 +349,10 @@ globally-installed libvips compiled with support for libheif, libde265 and x265.
- `options` **[Object][6]?** output options
- `options.quality` **[number][9]** quality, integer 1-100 (optional, default `50`)
- `options.compression` **[boolean][7]** compression format: av1, hevc (optional, default `'av1'`)
- `options.compression` **[string][2]** compression format: av1, hevc (optional, default `'av1'`)
- `options.lossless` **[boolean][7]** use lossless compression (optional, default `false`)
- `options.speed` **[boolean][7]** CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest) (optional, default `5`)
- `options.speed` **[number][9]** CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest) (optional, default `5`)
- `options.chromaSubsampling` **[string][2]** set to '4:4:4' to prevent chroma subsampling otherwise defaults to '4:2:0' chroma subsampling, requires libvips v8.11.0 (optional, default `'4:2:0'`)
- Throws **[Error][4]** Invalid options

25
docs/build.js Normal file
View File

@@ -0,0 +1,25 @@
'use strict';
const fs = require('fs').promises;
const path = require('path');
const documentation = require('documentation');
[
'constructor',
'input',
'resize',
'composite',
'operation',
'colour',
'channel',
'output',
'utility'
].forEach(async (m) => {
const input = path.join('lib', `${m}.js`);
const output = path.join('docs', `api-${m}.md`);
const ast = await documentation.build(input, { shallow: true });
const markdown = await documentation.formats.md(ast, { markdownToc: false });
await fs.writeFile(output, markdown);
});

View File

@@ -4,7 +4,24 @@
Requires libvips v8.10.5
### v0.27.0 - TBD
### v0.27.1 - 27th January 2021
* Ensure TIFF is cast when using float predictor.
[#2502](https://github.com/lovell/sharp/pull/2502)
[@randyridge](https://github.com/randyridge)
* Add support for Uint8Array and Uint8ClampedArray input.
[#2511](https://github.com/lovell/sharp/pull/2511)
[@leon](https://github.com/leon)
* Revert: ensure all platforms use fontconfig for font rendering.
[#2515](https://github.com/lovell/sharp/issues/2515)
* Expose libvips gaussnoise operation to allow creation of Gaussian noise.
[#2527](https://github.com/lovell/sharp/pull/2527)
[@alza54](https://github.com/alza54)
### v0.27.0 - 22nd December 2020
* Add support for AVIF to prebuilt binaries.

View File

@@ -4,6 +4,7 @@
"public": ".",
"ignore": [
".*",
"build.js",
"firebase.json",
"*.md",
"image/**",

View File

@@ -0,0 +1,11 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="86 86 550 550">
<!-- Creative Commons CC0 1.0 Universal Public Domain Dedication -->
<defs>
<mask id="c">
<path fill="none" stroke="#fff" stroke-width="80" d="M258.411 285.777l200.176-26.8M244.113 466.413L451.44 438.66m.001 0V238.484m0-150.121v171.572l178.725-23.917m-359.843 19.584V477.22m2.387 156.95V462.591L93.984 486.515"/>
<path fill="none" stroke="#000" stroke-width="112" d="M451.441 610.246V438.66l178.725-23.91M269.688 112.59v171.58L90.964 308.093"/>
</mask>
</defs>
<path mask="url(#c)" fill="none" stroke="#000" stroke-width="80" d="M258.411 285.777l200.176-26.8M244.113 466.413L451.44 438.66m.001 0V238.484m0-150.121v171.572l178.725-23.917m-359.843 19.584V477.22m2.387 156.95V462.591L93.984 486.515"/>
<path fill="none" stroke="#000" stroke-width="80" d="M451.441 610.246V438.66l178.725-23.91M269.688 112.59v171.58L90.964 308.093"/>
</svg>

After

Width:  |  Height:  |  Size: 929 B

View File

@@ -1,5 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="86 86 550 550">
<!-- Copyright 2019 Lovell Fuller. This work is licensed under the Creative Commons Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) License. -->
<!-- Creative Commons CC0 1.0 Universal Public Domain Dedication -->
<path fill="none" stroke="#9c0" stroke-width="80" d="M258.411 285.777l200.176-26.8M244.113 466.413L451.44 438.66M451.441 438.66V238.484M451.441 88.363v171.572l178.725-23.917M270.323 255.602V477.22M272.71 634.17V462.591L93.984 486.515"/>
<path fill="none" stroke="#090" stroke-width="80" d="M451.441 610.246V438.66l178.725-23.91M269.688 112.59v171.58L90.964 308.093"/>
</svg>

Before

Width:  |  Height:  |  Size: 592 B

After

Width:  |  Height:  |  Size: 508 B

View File

@@ -4,7 +4,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="description" content="Resize large images in common formats to smaller, web-friendly JPEG, PNG and WebP images of varying dimensions">
<meta name="description" content="Resize large images in common formats to smaller, web-friendly JPEG, PNG, WebP and AVIF images of varying dimensions">
<meta http-equiv="Content-Security-Policy" content="default-src 'self'; object-src 'none'; style-src 'unsafe-inline';
img-src 'unsafe-inline' data: https://pixel.plumbing/px/ https://cdn.jsdelivr.net/gh/lovell/ https://www.google-analytics.com;
connect-src 'self' https://cdn.jsdelivr.net/gh/lovell/ https://www.google-analytics.com;
@@ -19,7 +19,7 @@
<link rel="apple-touch-icon-precomposed" sizes="72x72" href="https://pixel.plumbing/px/72x72/sharp-logo.svg">
<link rel="apple-touch-icon-precomposed" href="https://pixel.plumbing/px/57x57/sharp-logo.svg">
<link rel="author" href="/humans.txt" type="text/plain">
<link rel="preload" href="https://cdn.jsdelivr.net/gh/lovell/sharp@v0.26.3/docs/README.md" as="fetch" type="text/markdown" crossorigin>
<link rel="preload" href="https://cdn.jsdelivr.net/gh/lovell/sharp@v0.27.1/docs/README.md" as="fetch" type="text/markdown" crossorigin>
<link rel="preload" href="https://cdn.jsdelivr.net/gh/lovell/sharp@master/docs/image/sharp-logo.svg" as="image" type="image/svg+xml" crossorigin>
<link rel="dns-prefetch" href="https://pixel.plumbing">
<link rel="dns-prefetch" href="https://www.google-analytics.com">
@@ -38,7 +38,7 @@
"@type": "Person",
"name": "Lovell Fuller"
},
"copyrightYear": [2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020],
"copyrightYear": [2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021],
"license": "https://www.apache.org/licenses/LICENSE-2.0"
}
</script>
@@ -139,7 +139,7 @@
docuteApiTitlePlugin,
docuteApiSearchPlugin
],
sourcePath: 'https://cdn.jsdelivr.net/gh/lovell/sharp@v0.26.3/docs',
sourcePath: 'https://cdn.jsdelivr.net/gh/lovell/sharp@v0.27.1/docs',
nav: [
{
title: 'Funding',

View File

@@ -50,12 +50,26 @@ The following platforms require compilation of both libvips and sharp from sourc
The architecture and platform of Node.js used for `npm install`
must be the same as the architecture and platform of Node.js used at runtime.
The `npm install --unsafe-perm` flag must be used when installing as `root` or a `sudo` user.
When using npm v6 or earlier, the `npm install --unsafe-perm` flag must be used when installing as `root` or a `sudo` user.
When using npm v7, the user running `npm install` must own the directory it is run in.
The `npm install --ignore-scripts=false` flag must be used when `npm` has been configured to ignore installation scripts.
Check the output of running `npm install --verbose sharp` for useful error messages.
## Apple M1
libvips must currently be installed via Homebrew before installing sharp.
```sh
brew install vips
```
When this new ARM64 CPU is made freely available
to open source projects via a CI service
then prebuilt binaries can be provided.
## Custom libvips
To use a custom, globally-installed version of libvips instead of the provided binaries,
@@ -184,3 +198,32 @@ The main thread must call `require('sharp')`
before worker threads are created
to ensure shared libraries remain loaded in memory
until after all threads are complete.
## Known conflicts
### Electron and Linux
The prebuilt binaries provided by Electron for Linux depend on many shared system libraries.
One of these, `libgobject-2.0.so`,
is known to conflict with the statically-linked binaries provided by sharp
and the following error can occur:
```
basic_string::_S_construct null not valid
```
To workaround this, set the `LD_PRELOAD` environment variable before the `electron` binary is run.
```sh
LD_PRELOAD=node_modules/sharp/vendor/8.10.5/lib/libvips.so.42 electron script.js
```
### Canvas and Windows
The prebuilt binaries provided by `canvas` for Windows depend on the unmaintained GTK 2, last updated in 2011.
These conflict with the modern, up-to-date binaries provided by sharp.
If both modules are used in the same Windows process, the following error will occur:
```
The specified procedure could not be found.
```

File diff suppressed because one or more lines are too long

View File

@@ -9,7 +9,7 @@ const searchIndex = [];
// Install
const contents = fs.readFileSync(path.join(__dirname, '..', 'install.md'), 'utf8');
const matches = contents.matchAll(
/## (?<title>[A-Za-z ]+)\n\n(?<body>[^#]+)/gs
/## (?<title>[A-Za-z0-9 ]+)\n\n(?<body>[^#]+)/gs
);
for (const match of matches) {
const { title, body } = match.groups;

View File

@@ -85,7 +85,7 @@ function extractChannel (channel) {
* - sRGB: 0: Red, 1: Green, 2: Blue, 3: Alpha.
* - CMYK: 0: Magenta, 1: Cyan, 2: Yellow, 3: Black, 4: Alpha.
*
* Buffers may be any of the image formats supported by sharp: JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data.
* Buffers may be any of the image formats supported by sharp.
* For raw pixel input, the `options` object should contain a `raw` attribute, which follows the format of the attribute of the same name in the `sharp()` constructor.
*
* @param {Array<string|Buffer>|string|Buffer} images - one or more images (file paths, Buffers).

View File

@@ -57,6 +57,13 @@ function grayscale (grayscale) {
/**
* Set the output colourspace.
* By default output image will be web-friendly sRGB, with additional channels interpreted as alpha channels.
*
* @example
* // Output 16 bits per pixel RGB
* await sharp(input)
* .toColourspace('rgb16')
* .toFile('16-bpp.png')
*
* @param {string} [colourspace] - output colourspace e.g. `srgb`, `rgb`, `cmyk`, `lab`, `b-w` [...](https://github.com/libvips/libvips/blob/master/libvips/iofuncs/enumtypes.c#L568)
* @returns {Sharp}
* @throws {Error} Invalid parameters

View File

@@ -40,7 +40,7 @@ const debuglog = util.debuglog('sharp');
/**
* Constructor factory to create an instance of `sharp`, to which further methods are chained.
*
* JPEG, PNG, WebP or TIFF format image data can be streamed out from this object.
* JPEG, PNG, WebP, AVIF or TIFF format image data can be streamed out from this object.
* When using Stream based output, derived attributes are available from the `info` event.
*
* Non-critical problems encountered during processing are emitted as `warning` events.
@@ -90,10 +90,39 @@ const debuglog = util.debuglog('sharp');
* // Convert an animated GIF to an animated WebP
* await sharp('in.gif', { animated: true }).toFile('out.webp');
*
* @param {(Buffer|string)} [input] - if present, can be
* a Buffer containing JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data, or
* a String containing the filesystem path to an JPEG, PNG, WebP, GIF, SVG or TIFF image file.
* JPEG, PNG, WebP, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
* @example
* // Read a raw array of pixels and save it to a png
* const input = Uint8Array.from([255, 255, 255, 0, 0, 0]); // or Uint8ClampedArray
* const image = sharp(input, {
* // because the input does not contain its dimensions or how many channels it has
* // we need to specify it in the constructor options
* raw: {
* width: 2,
* height: 1,
* channels: 3
* }
* });
* await image.toFile('my-two-pixels.png');
*
* @example
* // Generate RGB Gaussian noise
* await sharp({
* create: {
* width: 300,
* height: 200,
* channels: 3,
* noise: {
* type: 'gaussian',
* mean: 128,
* sigma: 30
* }
* }
* }.toFile('noise.png');
*
* @param {(Buffer|Uint8Array|Uint8ClampedArray|string)} [input] - if present, can be
* a Buffer / Uint8Array / Uint8ClampedArray containing JPEG, PNG, WebP, AVIF, GIF, SVG, TIFF or raw pixel image data, or
* a String containing the filesystem path to an JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image file.
* JPEG, PNG, WebP, AVIF, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
* @param {Object} [options] - if present, is an Object with optional attributes.
* @param {boolean} [options.failOnError=true] - by default halt processing and raise an error when loading invalid images.
* Set this flag to `false` if you'd rather apply a "best effort" to decode images, even if the data is corrupt or invalid.
@@ -103,19 +132,23 @@ const debuglog = util.debuglog('sharp');
* @param {boolean} [options.sequentialRead=false] - Set this to `true` to use sequential rather than random access where possible.
* This can reduce memory usage and might improve performance on some systems.
* @param {number} [options.density=72] - number representing the DPI for vector images in the range 1 to 100000.
* @param {number} [options.pages=1] - number of pages to extract for multi-page input (GIF, TIFF, PDF), use -1 for all pages.
* @param {number} [options.page=0] - page number to start extracting from for multi-page input (GIF, TIFF, PDF), zero based.
* @param {number} [options.pages=1] - number of pages to extract for multi-page input (GIF, WebP, AVIF, TIFF, PDF), use -1 for all pages.
* @param {number} [options.page=0] - page number to start extracting from for multi-page input (GIF, WebP, AVIF, TIFF, PDF), zero based.
* @param {number} [options.level=0] - level to extract from a multi-level input (OpenSlide), zero based.
* @param {boolean} [options.animated=false] - Set to `true` to read all frames/pages of an animated image (equivalent of setting `pages` to `-1`).
* @param {Object} [options.raw] - describes raw pixel input image data. See `raw()` for pixel ordering.
* @param {number} [options.raw.width]
* @param {number} [options.raw.height]
* @param {number} [options.raw.channels] - 1-4
* @param {number} [options.raw.width] - integral number of pixels wide.
* @param {number} [options.raw.height] - integral number of pixels high.
* @param {number} [options.raw.channels] - integral number of channels, between 1 and 4.
* @param {Object} [options.create] - describes a new image to be created.
* @param {number} [options.create.width]
* @param {number} [options.create.height]
* @param {number} [options.create.channels] - 3-4
* @param {number} [options.create.width] - integral number of pixels wide.
* @param {number} [options.create.height] - integral number of pixels high.
* @param {number} [options.create.channels] - integral number of channels, either 3 (RGB) or 4 (RGBA).
* @param {string|Object} [options.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
* @param {Object} [options.create.noise] - describes a noise to be created.
* @param {string} [options.create.noise.type] - type of generated noise, currently only `gaussian` is supported.
* @param {number} [options.create.noise.mean] - mean of pixels in generated noise.
* @param {number} [options.create.noise.sigma] - standard deviation of pixels in generated noise.
* @returns {Sharp}
* @throws {Error} Invalid parameters
*/
@@ -237,6 +270,7 @@ const Sharp = function (input, options) {
heifLossless: false,
heifCompression: 'av1',
heifSpeed: 5,
heifChromaSubsampling: '4:2:0',
tileSize: 256,
tileOverlap: 0,
tileContainer: 'fs',

View File

@@ -31,6 +31,9 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
} else if (is.buffer(input)) {
// Buffer
inputDescriptor.buffer = input;
} else if (is.uint8Array(input)) {
// Uint8Array or Uint8ClampedArray
inputDescriptor.buffer = Buffer.from(input.buffer);
} else if (is.plainObject(input) && !is.defined(inputOptions)) {
// Plain Object descriptor, e.g. create
inputOptions = input;
@@ -134,22 +137,50 @@ function _createInputDescriptor (input, inputOptions, containerOptions) {
is.object(inputOptions.create) &&
is.integer(inputOptions.create.width) && inputOptions.create.width > 0 &&
is.integer(inputOptions.create.height) && inputOptions.create.height > 0 &&
is.integer(inputOptions.create.channels) && is.inRange(inputOptions.create.channels, 3, 4) &&
is.defined(inputOptions.create.background)
is.integer(inputOptions.create.channels)
) {
inputDescriptor.createWidth = inputOptions.create.width;
inputDescriptor.createHeight = inputOptions.create.height;
inputDescriptor.createChannels = inputOptions.create.channels;
const background = color(inputOptions.create.background);
inputDescriptor.createBackground = [
background.red(),
background.green(),
background.blue(),
Math.round(background.alpha() * 255)
];
// Noise
if (is.defined(inputOptions.create.noise)) {
if (!is.object(inputOptions.create.noise)) {
throw new Error('Expected noise to be an object');
}
if (!is.inArray(inputOptions.create.noise.type, ['gaussian'])) {
throw new Error('Only gaussian noise is supported at the moment');
}
if (!is.inRange(inputOptions.create.channels, 1, 4)) {
throw is.invalidParameterError('create.channels', 'number between 1 and 4', inputOptions.create.channels);
}
inputDescriptor.createNoiseType = inputOptions.create.noise.type;
if (is.number(inputOptions.create.noise.mean) && is.inRange(inputOptions.create.noise.mean, 0, 10000)) {
inputDescriptor.createNoiseMean = inputOptions.create.noise.mean;
} else {
throw is.invalidParameterError('create.noise.mean', 'number between 0 and 10000', inputOptions.create.noise.mean);
}
if (is.number(inputOptions.create.noise.sigma) && is.inRange(inputOptions.create.noise.sigma, 0, 10000)) {
inputDescriptor.createNoiseSigma = inputOptions.create.noise.sigma;
} else {
throw is.invalidParameterError('create.noise.sigma', 'number between 0 and 10000', inputOptions.create.noise.sigma);
}
} else if (is.defined(inputOptions.create.background)) {
if (!is.inRange(inputOptions.create.channels, 3, 4)) {
throw is.invalidParameterError('create.channels', 'number between 3 and 4', inputOptions.create.channels);
}
const background = color(inputOptions.create.background);
inputDescriptor.createBackground = [
background.red(),
background.green(),
background.blue(),
Math.round(background.alpha() * 255)
];
} else {
throw new Error('Expected valid noise or background to create a new input image');
}
delete inputDescriptor.buffer;
} else {
throw new Error('Expected width, height, channels and background to create a new input image');
throw new Error('Expected valid width, height and channels to create a new input image');
}
}
} else if (is.defined(inputOptions)) {

View File

@@ -48,6 +48,15 @@ const buffer = function (val) {
return val instanceof Buffer;
};
/**
* Is this value a Uint8Array or Uint8ClampedArray object?
* @private
*/
const uint8Array = function (val) {
// allow both since Uint8ClampedArray simply clamps the values between 0-255
return val instanceof Uint8Array || val instanceof Uint8ClampedArray;
};
/**
* Is this value a non-empty string?
* @private
@@ -110,6 +119,7 @@ module.exports = {
fn: fn,
bool: bool,
buffer: buffer,
uint8Array: uint8Array,
string: string,
number: number,
integer: integer,

View File

@@ -39,8 +39,9 @@ const cachePath = function () {
const globalLibvipsVersion = function () {
if (process.platform !== 'win32') {
const globalLibvipsVersion = spawnSync(`PKG_CONFIG_PATH="${pkgConfigPath()}" pkg-config --modversion vips-cpp`, spawnSyncOptions).stdout || '';
return globalLibvipsVersion.trim();
const globalLibvipsVersion = spawnSync(`PKG_CONFIG_PATH="${pkgConfigPath()}" pkg-config --modversion vips-cpp`, spawnSyncOptions).stdout;
/* istanbul ignore next */
return (globalLibvipsVersion || '').trim();
} else {
return '';
}

View File

@@ -20,7 +20,7 @@ const formats = new Map([
* Write output image data to a file.
*
* If an explicit output format is not selected, it will be inferred from the extension,
* with JPEG, PNG, WebP, TIFF, DZI, and libvips' V format supported.
* with JPEG, PNG, WebP, AVIF, TIFF, DZI, and libvips' V format supported.
* Note that raw pixel data is only supported for buffer output.
*
* By default all metadata will be removed, which includes EXIF-based orientation.
@@ -72,7 +72,7 @@ function toFile (fileOut, callback) {
/**
* Write output to a Buffer.
* JPEG, PNG, WebP, TIFF and RAW output are supported.
* JPEG, PNG, WebP, AVIF, TIFF and raw pixel data output are supported.
*
* If no explicit format is set, the output format will match the input image, except GIF and SVG input which become PNG output.
*
@@ -104,6 +104,20 @@ function toFile (fileOut, callback) {
* .then(({ data, info }) => { ... })
* .catch(err => { ... });
*
* @example
* const data = await sharp('my-image.jpg')
* // output the raw pixels
* .raw()
* .toBuffer();
*
* // create a more type safe way to work with the raw pixel data
* // this will not copy the data, instead it will change `data`s underlying ArrayBuffer
* // so `data` and `pixelArray` point to the same memory location
* const pixelArray = new Uint8ClampedArray(data.buffer);
*
* // When you are done changing the pixelArray, sharp takes the `pixelArray` as an input
* await sharp(pixelArray).toFile('my-changed-image.jpg');
*
* @param {Object} [options]
* @param {boolean} [options.resolveWithObject] Resolve the Promise with an Object containing `data` and `info` properties instead of resolving only with `data`.
* @param {Function} [callback]
@@ -472,15 +486,15 @@ function trySetAnimationOptions (source, target) {
* @param {Object} [options] - output options
* @param {number} [options.quality=80] - quality, integer 1-100
* @param {boolean} [options.force=true] - force TIFF output, otherwise attempt to use input format
* @param {boolean} [options.compression='jpeg'] - compression options: lzw, deflate, jpeg, ccittfax4
* @param {boolean} [options.predictor='horizontal'] - compression predictor options: none, horizontal, float
* @param {string} [options.compression='jpeg'] - compression options: lzw, deflate, jpeg, ccittfax4
* @param {string} [options.predictor='horizontal'] - compression predictor options: none, horizontal, float
* @param {boolean} [options.pyramid=false] - write an image pyramid
* @param {boolean} [options.tile=false] - write a tiled tiff
* @param {boolean} [options.tileWidth=256] - horizontal tile size
* @param {boolean} [options.tileHeight=256] - vertical tile size
* @param {number} [options.tileWidth=256] - horizontal tile size
* @param {number} [options.tileHeight=256] - vertical tile size
* @param {number} [options.xres=1.0] - horizontal resolution in pixels/mm
* @param {number} [options.yres=1.0] - vertical resolution in pixels/mm
* @param {boolean} [options.bitdepth=8] - reduce bitdepth to 1, 2 or 4 bit
* @param {number} [options.bitdepth=8] - reduce bitdepth to 1, 2 or 4 bit
* @returns {Sharp}
* @throws {Error} Invalid options
*/
@@ -569,6 +583,7 @@ function tiff (options) {
* @param {number} [options.quality=50] - quality, integer 1-100
* @param {boolean} [options.lossless=false] - use lossless compression
* @param {boolean} [options.speed=5] - CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest)
* @param {string} [options.chromaSubsampling='4:2:0'] - set to '4:4:4' to prevent chroma subsampling otherwise defaults to '4:2:0' chroma subsampling, requires libvips v8.11.0
* @returns {Sharp}
* @throws {Error} Invalid options
*/
@@ -586,9 +601,10 @@ function avif (options) {
*
* @param {Object} [options] - output options
* @param {number} [options.quality=50] - quality, integer 1-100
* @param {boolean} [options.compression='av1'] - compression format: av1, hevc
* @param {string} [options.compression='av1'] - compression format: av1, hevc
* @param {boolean} [options.lossless=false] - use lossless compression
* @param {boolean} [options.speed=5] - CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest)
* @param {number} [options.speed=5] - CPU effort vs file size, 0 (slowest/smallest) to 8 (fastest/largest)
* @param {string} [options.chromaSubsampling='4:2:0'] - set to '4:4:4' to prevent chroma subsampling otherwise defaults to '4:2:0' chroma subsampling, requires libvips v8.11.0
* @returns {Sharp}
* @throws {Error} Invalid options
*/
@@ -622,6 +638,13 @@ function heif (options) {
throw is.invalidParameterError('speed', 'integer between 0 and 8', options.speed);
}
}
if (is.defined(options.chromaSubsampling)) {
if (is.string(options.chromaSubsampling) && is.inArray(options.chromaSubsampling, ['4:2:0', '4:4:4'])) {
this.options.heifChromaSubsampling = options.chromaSubsampling;
} else {
throw is.invalidParameterError('chromaSubsampling', 'one of: 4:2:0, 4:4:4', options.chromaSubsampling);
}
}
}
return this._updateFormatOut('heif', options);
}

View File

@@ -1,7 +1,7 @@
{
"name": "sharp",
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP and TIFF images",
"version": "0.27.0-beta1",
"description": "High performance Node.js image processing, the fastest module to resize JPEG, PNG, WebP, AVIF and TIFF images",
"version": "0.27.1",
"author": "Lovell Fuller <npm@lovell.info>",
"homepage": "https://github.com/lovell/sharp",
"contributors": [
@@ -72,17 +72,19 @@
"Robert O'Rourke <robert@o-rourke.org>",
"Guillermo Alfonso Varela Chouciño <guillevch@gmail.com>",
"Christian Flintrup <chr@gigahost.dk>",
"Manan Jadhav <manan@motionden.com>"
"Manan Jadhav <manan@motionden.com>",
"Leon Radley <leon@radley.se>",
"alza54 <alza54@thiocod.in>"
],
"scripts": {
"install": "(node install/libvips && node install/dll-copy && prebuild-install) || (node-gyp rebuild && node install/dll-copy)",
"clean": "rm -rf node_modules/ build/ vendor/ .nyc_output/ coverage/ test/fixtures/output.*",
"test": "semistandard && cpplint && npm run test-unit && npm run test-licensing",
"test-unit": "nyc --reporter=lcov --branches=99 mocha --slow=5000 --timeout=60000 ./test/unit/*.js",
"test-unit": "nyc --reporter=lcov --branches=99 mocha --parallel --slow=5000 --timeout=60000 ./test/unit/*.js",
"test-licensing": "license-checker --production --summary --onlyAllow=\"Apache-2.0;BSD;ISC;MIT\"",
"test-coverage": "./test/coverage/report.sh",
"test-leak": "./test/leak/leak.sh",
"docs-build": "documentation lint lib && for m in constructor input resize composite operation colour channel output utility; do documentation build --shallow --format=md --markdown-toc=false lib/$m.js >docs/api-$m.md; done && node docs/search-index/build",
"docs-build": "documentation lint lib && node docs/build && node docs/search-index/build",
"docs-serve": "cd docs && npx serve",
"docs-publish": "cd docs && npx firebase-tools deploy --project pixelplumbing --only hosting:pixelplumbing-sharp"
},
@@ -129,8 +131,8 @@
"devDependencies": {
"async": "^3.2.0",
"cc": "^3.0.1",
"decompress-zip": "^0.3.2",
"documentation": "^13.1.0",
"decompress-zip": "^0.3.3",
"documentation": "^13.1.1",
"exif-reader": "^1.0.3",
"icc": "^2.0.0",
"license-checker": "^25.0.1",

View File

@@ -109,7 +109,13 @@ namespace sharp {
descriptor->createChannels = AttrAsUint32(input, "createChannels");
descriptor->createWidth = AttrAsUint32(input, "createWidth");
descriptor->createHeight = AttrAsUint32(input, "createHeight");
descriptor->createBackground = AttrAsVectorOfDouble(input, "createBackground");
if (HasAttr(input, "createNoiseType")) {
descriptor->createNoiseType = AttrAsStr(input, "createNoiseType");
descriptor->createNoiseMean = AttrAsDouble(input, "createNoiseMean");
descriptor->createNoiseSigma = AttrAsDouble(input, "createNoiseSigma");
} else {
descriptor->createBackground = AttrAsVectorOfDouble(input, "createBackground");
}
}
// Limit input images to a given number of pixels, where pixels = width * height
descriptor->limitInputPixels = AttrAsUint32(input, "limitInputPixels");
@@ -189,31 +195,38 @@ namespace sharp {
return id;
}
/**
* Regenerate this table with something like:
*
* $ vips -l foreign | grep -i load | awk '{ print $2, $1; }'
*
* Plus a bit of editing.
*/
std::map<std::string, ImageType> loaderToType = {
{ "jpegload", ImageType::JPEG },
{ "jpegload_buffer", ImageType::JPEG },
{ "pngload", ImageType::PNG },
{ "pngload_buffer", ImageType::PNG },
{ "webpload", ImageType::WEBP },
{ "webpload_buffer", ImageType::WEBP },
{ "tiffload", ImageType::TIFF },
{ "tiffload_buffer", ImageType::TIFF },
{ "gifload", ImageType::GIF },
{ "gifload_buffer", ImageType::GIF },
{ "svgload", ImageType::SVG },
{ "svgload_buffer", ImageType::SVG },
{ "heifload", ImageType::HEIF },
{ "heifload_buffer", ImageType::HEIF },
{ "pdfload", ImageType::PDF },
{ "pdfload_buffer", ImageType::PDF },
{ "magickload", ImageType::MAGICK },
{ "magickload_buffer", ImageType::MAGICK },
{ "openslideload", ImageType::OPENSLIDE },
{ "ppmload", ImageType::PPM },
{ "fitsload", ImageType::FITS },
{ "openexrload", ImageType::EXR },
{ "vipsload", ImageType::VIPS },
{ "rawload", ImageType::RAW }
{ "VipsForeignLoadJpegFile", ImageType::JPEG },
{ "VipsForeignLoadJpegBuffer", ImageType::JPEG },
{ "VipsForeignLoadPngFile", ImageType::PNG },
{ "VipsForeignLoadPngBuffer", ImageType::PNG },
{ "VipsForeignLoadWebpFile", ImageType::WEBP },
{ "VipsForeignLoadWebpBuffer", ImageType::WEBP },
{ "VipsForeignLoadTiffFile", ImageType::TIFF },
{ "VipsForeignLoadTiffBuffer", ImageType::TIFF },
{ "VipsForeignLoadGifFile", ImageType::GIF },
{ "VipsForeignLoadGifBuffer", ImageType::GIF },
{ "VipsForeignLoadSvgFile", ImageType::SVG },
{ "VipsForeignLoadSvgBuffer", ImageType::SVG },
{ "VipsForeignLoadHeifFile", ImageType::HEIF },
{ "VipsForeignLoadHeifBuffer", ImageType::HEIF },
{ "VipsForeignLoadPdfFile", ImageType::PDF },
{ "VipsForeignLoadPdfBuffer", ImageType::PDF },
{ "VipsForeignLoadMagickFile", ImageType::MAGICK },
{ "VipsForeignLoadMagickBuffer", ImageType::MAGICK },
{ "VipsForeignLoadOpenslide", ImageType::OPENSLIDE },
{ "VipsForeignLoadPpmFile", ImageType::PPM },
{ "VipsForeignLoadFits", ImageType::FITS },
{ "VipsForeignLoadOpenexr", ImageType::EXR },
{ "VipsForeignLoadVips", ImageType::VIPS },
{ "VipsForeignLoadRaw", ImageType::RAW }
};
/*
@@ -223,7 +236,7 @@ namespace sharp {
ImageType imageType = ImageType::UNKNOWN;
char const *load = vips_foreign_find_load_buffer(buffer, length);
if (load != nullptr) {
auto it = loaderToType.find(vips_nickname_find(g_type_from_name(load)));
auto it = loaderToType.find(load);
if (it != loaderToType.end()) {
imageType = it->second;
}
@@ -238,7 +251,7 @@ namespace sharp {
ImageType imageType = ImageType::UNKNOWN;
char const *load = vips_foreign_find_load(file);
if (load != nullptr) {
auto it = loaderToType.find(vips_nickname_find(g_type_from_name(load)));
auto it = loaderToType.find(load);
if (it != loaderToType.end()) {
imageType = it->second;
}
@@ -318,15 +331,35 @@ namespace sharp {
} else {
if (descriptor->createChannels > 0) {
// Create new image
std::vector<double> background = {
descriptor->createBackground[0],
descriptor->createBackground[1],
descriptor->createBackground[2]
};
if (descriptor->createChannels == 4) {
background.push_back(descriptor->createBackground[3]);
if (descriptor->createNoiseType == "gaussian") {
int const channels = descriptor->createChannels;
image = VImage::new_matrix(descriptor->createWidth, descriptor->createHeight);
std::vector<VImage> bands = {};
bands.reserve(channels);
for (int _band = 0; _band < channels; _band++) {
bands.push_back(image.gaussnoise(
descriptor->createWidth,
descriptor->createHeight,
VImage::option()->set("mean", descriptor->createNoiseMean)->set("sigma", descriptor->createNoiseSigma)));
}
image = image.bandjoin(bands);
image = image.cast(VipsBandFormat::VIPS_FORMAT_UCHAR);
if (channels < 3) {
image = image.colourspace(VIPS_INTERPRETATION_B_W);
} else {
image = image.colourspace(VIPS_INTERPRETATION_sRGB);
}
} else {
std::vector<double> background = {
descriptor->createBackground[0],
descriptor->createBackground[1],
descriptor->createBackground[2]
};
if (descriptor->createChannels == 4) {
background.push_back(descriptor->createBackground[3]);
}
image = VImage::new_matrix(descriptor->createWidth, descriptor->createHeight).new_from_image(background);
}
image = VImage::new_matrix(descriptor->createWidth, descriptor->createHeight).new_from_image(background);
image.get_image()->Type = VIPS_INTERPRETATION_sRGB;
imageType = ImageType::RAW;
} else {

View File

@@ -64,6 +64,9 @@ namespace sharp {
int createWidth;
int createHeight;
std::vector<double> createBackground;
std::string createNoiseType;
double createNoiseMean;
double createNoiseSigma;
InputDescriptor():
buffer(nullptr),
@@ -82,7 +85,9 @@ namespace sharp {
createChannels(0),
createWidth(0),
createHeight(0),
createBackground{ 0.0, 0.0, 0.0, 255.0 } {}
createBackground{ 0.0, 0.0, 0.0, 255.0 },
createNoiseMean(0.0),
createNoiseSigma(0.0) {}
};
// Convenience methods to access the attributes of a Napi::Object

View File

@@ -74,6 +74,9 @@ class MetadataWorker : public Napi::AsyncWorker {
if (image.get_typeof("heif-primary") == G_TYPE_INT) {
baton->pagePrimary = image.get_int("heif-primary");
}
if (image.get_typeof("heif-compression") == VIPS_TYPE_REF_STRING) {
baton->compression = image.get_string("heif-compression");
}
if (image.get_typeof("openslide.level-count") == VIPS_TYPE_REF_STRING) {
int const levels = std::stoi(image.get_string("openslide.level-count"));
for (int l = 0; l < levels; l++) {
@@ -186,6 +189,9 @@ class MetadataWorker : public Napi::AsyncWorker {
if (baton->pagePrimary > -1) {
info.Set("pagePrimary", baton->pagePrimary);
}
if (!baton->compression.empty()) {
info.Set("compression", baton->compression);
}
if (!baton->levels.empty()) {
int i = 0;
Napi::Array levels = Napi::Array::New(env, static_cast<size_t>(baton->levels.size()));

View File

@@ -39,6 +39,7 @@ struct MetadataBaton {
int loop;
std::vector<int> delay;
int pagePrimary;
std::string compression;
std::vector<std::pair<int, int>> levels;
bool hasProfile;
bool hasAlpha;

View File

@@ -730,7 +730,7 @@ class PipelineWorker : public Napi::AsyncWorker {
if (baton->formatOut == "jpeg" || (baton->formatOut == "input" && inputImageType == sharp::ImageType::JPEG)) {
// Write JPEG to buffer
sharp::AssertImageTypeDimensions(image, sharp::ImageType::JPEG);
VipsArea *area = VIPS_AREA(image.jpegsave_buffer(VImage::option()
VipsArea *area = reinterpret_cast<VipsArea*>(image.jpegsave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("Q", baton->jpegQuality)
->set("interlace", baton->jpegProgressive)
@@ -757,7 +757,7 @@ class PipelineWorker : public Napi::AsyncWorker {
inputImageType == sharp::ImageType::SVG))) {
// Write PNG to buffer
sharp::AssertImageTypeDimensions(image, sharp::ImageType::PNG);
VipsArea *area = VIPS_AREA(image.pngsave_buffer(VImage::option()
VipsArea *area = reinterpret_cast<VipsArea*>(image.pngsave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("interlace", baton->pngProgressive)
->set("compression", baton->pngCompressionLevel)
@@ -775,7 +775,7 @@ class PipelineWorker : public Napi::AsyncWorker {
(baton->formatOut == "input" && inputImageType == sharp::ImageType::WEBP)) {
// Write WEBP to buffer
sharp::AssertImageTypeDimensions(image, sharp::ImageType::WEBP);
VipsArea *area = VIPS_AREA(image.webpsave_buffer(VImage::option()
VipsArea *area = reinterpret_cast<VipsArea*>(image.webpsave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("Q", baton->webpQuality)
->set("lossless", baton->webpLossless)
@@ -792,7 +792,7 @@ class PipelineWorker : public Napi::AsyncWorker {
(baton->formatOut == "input" && inputImageType == sharp::ImageType::GIF && supportsGifOutput)) {
// Write GIF to buffer
sharp::AssertImageTypeDimensions(image, sharp::ImageType::GIF);
VipsArea *area = VIPS_AREA(image.magicksave_buffer(VImage::option()
VipsArea *area = reinterpret_cast<VipsArea*>(image.magicksave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("optimize_gif_frames", TRUE)
->set("optimize_gif_transparency", TRUE)
@@ -813,7 +813,7 @@ class PipelineWorker : public Napi::AsyncWorker {
if (baton->tiffPredictor == VIPS_FOREIGN_TIFF_PREDICTOR_FLOAT) {
image = image.cast(VIPS_FORMAT_FLOAT);
}
VipsArea *area = VIPS_AREA(image.tiffsave_buffer(VImage::option()
VipsArea *area = reinterpret_cast<VipsArea*>(image.tiffsave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("Q", baton->tiffQuality)
->set("bitdepth", baton->tiffBitdepth)
@@ -833,11 +833,15 @@ class PipelineWorker : public Napi::AsyncWorker {
} else if (baton->formatOut == "heif" ||
(baton->formatOut == "input" && inputImageType == sharp::ImageType::HEIF)) {
// Write HEIF to buffer
VipsArea *area = VIPS_AREA(image.heifsave_buffer(VImage::option()
VipsArea *area = reinterpret_cast<VipsArea*>(image.heifsave_buffer(VImage::option()
->set("strip", !baton->withMetadata)
->set("compression", baton->heifCompression)
->set("Q", baton->heifQuality)
->set("speed", baton->heifSpeed)
#if defined(VIPS_TYPE_FOREIGN_SUBSAMPLE)
->set("subsample_mode", baton->heifChromaSubsampling == "4:4:4"
? VIPS_FOREIGN_SUBSAMPLE_OFF : VIPS_FOREIGN_SUBSAMPLE_ON)
#endif
->set("lossless", baton->heifLossless)));
baton->bufferOut = static_cast<char*>(area->data);
baton->bufferOutLength = area->length;
@@ -951,6 +955,10 @@ class PipelineWorker : public Napi::AsyncWorker {
sharp::AssertImageTypeDimensions(image, sharp::ImageType::JPEG);
baton->channels = std::min(baton->channels, 3);
}
// Cast pixel values to float, if required
if (baton->tiffPredictor == VIPS_FOREIGN_TIFF_PREDICTOR_FLOAT) {
image = image.cast(VIPS_FORMAT_FLOAT);
}
image.tiffsave(const_cast<char*>(baton->fileOut.data()), VImage::option()
->set("strip", !baton->withMetadata)
->set("Q", baton->tiffQuality)
@@ -972,6 +980,10 @@ class PipelineWorker : public Napi::AsyncWorker {
->set("Q", baton->heifQuality)
->set("compression", baton->heifCompression)
->set("speed", baton->heifSpeed)
#if defined(VIPS_TYPE_FOREIGN_SUBSAMPLE)
->set("subsample_mode", baton->heifChromaSubsampling == "4:4:4"
? VIPS_FOREIGN_SUBSAMPLE_OFF : VIPS_FOREIGN_SUBSAMPLE_ON)
#endif
->set("lossless", baton->heifLossless));
baton->formatOut = "heif";
} else if (baton->formatOut == "dz" || isDz || isDzZip) {
@@ -1396,6 +1408,7 @@ Napi::Value pipeline(const Napi::CallbackInfo& info) {
vips_enum_from_nick(nullptr, VIPS_TYPE_FOREIGN_HEIF_COMPRESSION,
sharp::AttrAsStr(options, "heifCompression").data()));
baton->heifSpeed = sharp::AttrAsUint32(options, "heifSpeed");
baton->heifChromaSubsampling = sharp::AttrAsStr(options, "heifChromaSubsampling");
// Animated output
if (sharp::HasAttr(options, "pageHeight")) {

View File

@@ -162,6 +162,7 @@ struct PipelineBaton {
int heifQuality;
VipsForeignHeifCompression heifCompression;
int heifSpeed;
std::string heifChromaSubsampling;
bool heifLossless;
std::string err;
bool withMetadata;
@@ -282,6 +283,7 @@ struct PipelineBaton {
heifQuality(50),
heifCompression(VIPS_FOREIGN_HEIF_COMPRESSION_AV1),
heifSpeed(5),
heifChromaSubsampling("4:2:0"),
heifLossless(false),
withMetadata(false),
withMetadataOrientation(-1),

View File

@@ -23,7 +23,6 @@
static void* sharp_vips_init(void*) {
g_setenv("VIPS_MIN_STACK_SIZE", "2m", FALSE);
g_setenv("PANGOCAIRO_BACKEND", "fontconfig", FALSE);
vips_init("sharp");
return nullptr;
}

View File

@@ -15,6 +15,10 @@ const jimp = require('jimp');
const fixtures = require('../fixtures');
const outputJpg = fixtures.path('output.jpg');
const outputPng = fixtures.path('output.png');
const outputWebP = fixtures.path('output.webp');
const width = 720;
const height = 588;
@@ -56,7 +60,7 @@ async.series({
image
.resize(width, height, jimp.RESIZE_BICUBIC)
.quality(80)
.write(fixtures.outputJpg, function (err) {
.write(outputJpg, function (err) {
if (err) {
throw err;
} else {
@@ -77,7 +81,7 @@ async.series({
.resize(width, height, {
scaling_method: mapnik.imageScaling.lanczos
})
.save(fixtures.outputJpg, 'jpeg:quality=80', function (err) {
.save(outputJpg, 'jpeg:quality=80', function (err) {
if (err) throw err;
deferred.resolve();
});
@@ -105,7 +109,7 @@ async.series({
fn: function (deferred) {
imagemagick.resize({
srcPath: fixtures.inputJpg,
dstPath: fixtures.outputJpg,
dstPath: outputJpg,
quality: 0.8,
width: width,
height: height,
@@ -128,7 +132,7 @@ async.series({
.filter('Lanczos')
.resize(width, height)
.quality(80)
.write(fixtures.outputJpg, function (err) {
.write(outputJpg, function (err) {
if (err) {
throw err;
} else {
@@ -159,7 +163,7 @@ async.series({
.filter('Lanczos')
.resize(width, height)
.quality(80)
.write(fixtures.outputJpg, function (err) {
.write(outputJpg, function (err) {
if (err) {
throw err;
} else {
@@ -190,7 +194,7 @@ async.series({
fn: function (deferred) {
sharp(inputJpgBuffer)
.resize(width, height)
.toFile(fixtures.outputJpg, function (err) {
.toFile(outputJpg, function (err) {
if (err) {
throw err;
} else {
@@ -217,7 +221,7 @@ async.series({
fn: function (deferred) {
sharp(fixtures.inputJpg)
.resize(width, height)
.toFile(fixtures.outputJpg, function (err) {
.toFile(outputJpg, function (err) {
if (err) {
throw err;
} else {
@@ -229,7 +233,7 @@ async.series({
defer: true,
fn: function (deferred) {
const readable = fs.createReadStream(fixtures.inputJpg);
const writable = fs.createWriteStream(fixtures.outputJpg);
const writable = fs.createWriteStream(outputJpg);
writable.on('finish', function () {
deferred.resolve();
});
@@ -600,7 +604,7 @@ async.series({
.resize(width, height)
.deflateLevel(6)
.filterType(0)
.write(fixtures.outputPng, function (err) {
.write(outputPng, function (err) {
if (err) {
throw err;
} else {
@@ -625,7 +629,7 @@ async.series({
if (err) throw err;
img.demultiply(function (err, img) {
if (err) throw err;
img.save(fixtures.outputPng, 'png', function (err) {
img.save(outputPng, 'png', function (err) {
if (err) throw err;
deferred.resolve();
});
@@ -663,7 +667,7 @@ async.series({
fn: function (deferred) {
imagemagick.resize({
srcPath: fixtures.inputPngAlphaPremultiplicationLarge,
dstPath: fixtures.outputPng,
dstPath: outputPng,
width: width,
height: height,
filter: 'Lanczos',
@@ -689,7 +693,7 @@ async.series({
.resize(width, height)
.define('PNG:compression-level=6')
.define('PNG:compression-filter=0')
.write(fixtures.outputPng, function (err) {
.write(outputPng, function (err) {
if (err) {
throw err;
} else {
@@ -723,7 +727,7 @@ async.series({
sharp(inputPngBuffer)
.resize(width, height)
.png({ compressionLevel: 6 })
.toFile(fixtures.outputPng, function (err) {
.toFile(outputPng, function (err) {
if (err) {
throw err;
} else {
@@ -754,7 +758,7 @@ async.series({
sharp(fixtures.inputPngAlphaPremultiplicationLarge)
.resize(width, height)
.png({ compressionLevel: 6 })
.toFile(fixtures.outputPng, function (err) {
.toFile(outputPng, function (err) {
if (err) {
throw err;
} else {
@@ -841,7 +845,7 @@ async.series({
fn: function (deferred) {
sharp(inputWebPBuffer)
.resize(width, height)
.toFile(fixtures.outputWebP, function (err) {
.toFile(outputWebP, function (err) {
if (err) {
throw err;
} else {
@@ -868,7 +872,7 @@ async.series({
fn: function (deferred) {
sharp(fixtures.inputWebP)
.resize(width, height)
.toFile(fixtures.outputWebP, function (err) {
.toFile(outputWebP, function (err) {
if (err) {
throw err;
} else {

View File

@@ -23,7 +23,7 @@ new Benchmark.Suite('random').add('imagemagick', {
fn: function (deferred) {
imagemagick.resize({
srcPath: fixtures.inputJpg,
dstPath: fixtures.outputJpg,
dstPath: fixtures.path('output.jpg'),
quality: 0.8,
width: randomDimension(),
height: randomDimension(),

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 MiB

After

Width:  |  Height:  |  Size: 424 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 MiB

After

Width:  |  Height:  |  Size: 943 KiB

View File

@@ -120,13 +120,6 @@ module.exports = {
inputV: getPath('vfile.v'),
outputJpg: getPath('output.jpg'),
outputPng: getPath('output.png'),
outputWebP: getPath('output.webp'),
outputV: getPath('output.v'),
outputTiff: getPath('output.tiff'),
outputZoinks: getPath('output.zoinks'), // an 'unknown' file extension
testPattern: getPath('test-pattern.png'),
// Path for tests requiring human inspection

View File

@@ -18,7 +18,7 @@ describe('AVIF', () => {
.toBuffer();
const metadata = await sharp(data)
.metadata();
const { size, ...metadataWithoutSize } = metadata;
const { compression, size, ...metadataWithoutSize } = metadata;
assert.deepStrictEqual(metadataWithoutSize, {
channels: 3,
depth: 'uchar',
@@ -42,7 +42,7 @@ describe('AVIF', () => {
.toBuffer();
const metadata = await sharp(data)
.metadata();
const { size, ...metadataWithoutSize } = metadata;
const { compression, size, ...metadataWithoutSize } = metadata;
assert.deepStrictEqual(metadataWithoutSize, {
channels: 3,
chromaSubsampling: '4:2:0',
@@ -65,7 +65,7 @@ describe('AVIF', () => {
.toBuffer();
const metadata = await sharp(data)
.metadata();
const { size, ...metadataWithoutSize } = metadata;
const { compression, size, ...metadataWithoutSize } = metadata;
assert.deepStrictEqual(metadataWithoutSize, {
channels: 3,
depth: 'uchar',

View File

@@ -53,7 +53,7 @@ describe('failOnError', function () {
it('returns errors to callback for truncated JPEG', function (done) {
sharp(fixtures.inputJpgTruncated).toBuffer(function (err, data, info) {
assert.ok(err.message.includes('VipsJpeg: Premature end of JPEG file'), err);
assert.ok(err.message.includes('VipsJpeg: Premature end of'), err);
assert.strictEqual(data, undefined);
assert.strictEqual(info, undefined);
done();
@@ -76,7 +76,7 @@ describe('failOnError', function () {
throw new Error('Expected rejection');
})
.catch(err => {
done(err.message.includes('VipsJpeg: Premature end of JPEG file') ? undefined : err);
done(err.message.includes('VipsJpeg: Premature end of') ? undefined : err);
});
});

View File

@@ -65,4 +65,14 @@ describe('HEIF', () => {
sharp().heif({ compression: 'fail' });
});
});
it('invalid chromaSubsampling should throw an error', () => {
assert.throws(() => {
sharp().heif({ chromaSubsampling: 'fail' });
});
});
it('valid chromaSubsampling does not throw an error', () => {
assert.doesNotThrow(() => {
sharp().heif({ chromaSubsampling: '4:4:4' });
});
});
});

View File

@@ -7,6 +7,8 @@ const rimraf = require('rimraf');
const sharp = require('../../');
const fixtures = require('../fixtures');
const outputJpg = fixtures.path('output.jpg');
describe('Input/output', function () {
beforeEach(function () {
sharp.cache(false);
@@ -16,16 +18,16 @@ describe('Input/output', function () {
});
it('Read from File and write to Stream', function (done) {
const writable = fs.createWriteStream(fixtures.outputJpg);
const writable = fs.createWriteStream(outputJpg);
writable.on('close', function () {
sharp(fixtures.outputJpg).toBuffer(function (err, data, info) {
sharp(outputJpg).toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
rimraf(fixtures.outputJpg, done);
rimraf(outputJpg, done);
});
});
sharp(fixtures.inputJpg).resize(320, 240).pipe(writable);
@@ -33,16 +35,16 @@ describe('Input/output', function () {
it('Read from Buffer and write to Stream', function (done) {
const inputJpgBuffer = fs.readFileSync(fixtures.inputJpg);
const writable = fs.createWriteStream(fixtures.outputJpg);
const writable = fs.createWriteStream(outputJpg);
writable.on('close', function () {
sharp(fixtures.outputJpg).toBuffer(function (err, data, info) {
sharp(outputJpg).toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
rimraf(fixtures.outputJpg, done);
rimraf(outputJpg, done);
});
});
sharp(inputJpgBuffer).resize(320, 240).pipe(writable);
@@ -50,13 +52,13 @@ describe('Input/output', function () {
it('Read from Stream and write to File', function (done) {
const readable = fs.createReadStream(fixtures.inputJpg);
const pipeline = sharp().resize(320, 240).toFile(fixtures.outputJpg, function (err, info) {
const pipeline = sharp().resize(320, 240).toFile(outputJpg, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
rimraf(fixtures.outputJpg, done);
rimraf(outputJpg, done);
});
readable.pipe(pipeline);
});
@@ -131,25 +133,57 @@ describe('Input/output', function () {
it('Read from Stream and write to Stream', function (done) {
const readable = fs.createReadStream(fixtures.inputJpg);
const writable = fs.createWriteStream(fixtures.outputJpg);
const writable = fs.createWriteStream(outputJpg);
writable.on('close', function () {
sharp(fixtures.outputJpg).toBuffer(function (err, data, info) {
sharp(outputJpg).toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
rimraf(fixtures.outputJpg, done);
rimraf(outputJpg, done);
});
});
const pipeline = sharp().resize(320, 240);
readable.pipe(pipeline).pipe(writable);
});
it('Read from Uint8Array and write to Buffer', async () => {
const uint8array = Uint8Array.from([255, 255, 255, 0, 0, 0]);
const { data, info } = await sharp(uint8array, {
raw: {
width: 2,
height: 1,
channels: 3
}
}).toBuffer({ resolveWithObject: true });
assert.deepStrictEqual(uint8array, new Uint8Array(data));
assert.strictEqual(info.width, 2);
assert.strictEqual(info.height, 1);
});
it('Read from Uint8ClampedArray and output to Buffer', async () => {
// since a Uint8ClampedArray is the same as Uint8Array but clamps the values
// between 0-255 it seemed good to add this also
const uint8array = Uint8ClampedArray.from([255, 255, 255, 0, 0, 0]);
const { data, info } = await sharp(uint8array, {
raw: {
width: 2,
height: 1,
channels: 3
}
}).toBuffer({ resolveWithObject: true });
assert.deepStrictEqual(uint8array, new Uint8ClampedArray(data));
assert.strictEqual(info.width, 2);
assert.strictEqual(info.height, 1);
});
it('Stream should emit info event', function (done) {
const readable = fs.createReadStream(fixtures.inputJpg);
const writable = fs.createWriteStream(fixtures.outputJpg);
const writable = fs.createWriteStream(outputJpg);
const pipeline = sharp().resize(320, 240);
let infoEventEmitted = false;
pipeline.on('info', function (info) {
@@ -161,7 +195,7 @@ describe('Input/output', function () {
});
writable.on('close', function () {
assert.strictEqual(true, infoEventEmitted);
rimraf(fixtures.outputJpg, done);
rimraf(outputJpg, done);
});
readable.pipe(pipeline).pipe(writable);
});
@@ -173,10 +207,10 @@ describe('Input/output', function () {
anErrorWasEmitted = !!err;
}).on('end', function () {
assert(anErrorWasEmitted);
rimraf(fixtures.outputJpg, done);
rimraf(outputJpg, done);
});
const readableButNotAnImage = fs.createReadStream(__filename);
const writable = fs.createWriteStream(fixtures.outputJpg);
const writable = fs.createWriteStream(outputJpg);
readableButNotAnImage.pipe(pipeline).pipe(writable);
});
@@ -187,24 +221,24 @@ describe('Input/output', function () {
anErrorWasEmitted = !!err;
}).on('end', function () {
assert(anErrorWasEmitted);
rimraf(fixtures.outputJpg, done);
rimraf(outputJpg, done);
});
const writable = fs.createWriteStream(fixtures.outputJpg);
const writable = fs.createWriteStream(outputJpg);
readableButNotAnImage.pipe(writable);
});
it('Readable side of Stream can start flowing after Writable side has finished', function (done) {
const readable = fs.createReadStream(fixtures.inputJpg);
const writable = fs.createWriteStream(fixtures.outputJpg);
const writable = fs.createWriteStream(outputJpg);
writable.on('close', function () {
sharp(fixtures.outputJpg).toBuffer(function (err, data, info) {
sharp(outputJpg).toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(true, data.length > 0);
assert.strictEqual(data.length, info.size);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
rimraf(fixtures.outputJpg, done);
rimraf(outputJpg, done);
});
});
const pipeline = sharp().resize(320, 240);
@@ -411,68 +445,70 @@ describe('Input/output', function () {
});
describe('Output filename with unknown extension', function () {
const outputZoinks = fixtures.path('output.zoinks');
it('Match JPEG input', function (done) {
sharp(fixtures.inputJpg)
.resize(320, 80)
.toFile(fixtures.outputZoinks, function (err, info) {
.toFile(outputZoinks, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
rimraf(fixtures.outputZoinks, done);
rimraf(outputZoinks, done);
});
});
it('Match PNG input', function (done) {
sharp(fixtures.inputPng)
.resize(320, 80)
.toFile(fixtures.outputZoinks, function (err, info) {
.toFile(outputZoinks, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
rimraf(fixtures.outputZoinks, done);
rimraf(outputZoinks, done);
});
});
it('Match WebP input', function (done) {
sharp(fixtures.inputWebP)
.resize(320, 80)
.toFile(fixtures.outputZoinks, function (err, info) {
.toFile(outputZoinks, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('webp', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
rimraf(fixtures.outputZoinks, done);
rimraf(outputZoinks, done);
});
});
it('Match TIFF input', function (done) {
sharp(fixtures.inputTiff)
.resize(320, 80)
.toFile(fixtures.outputZoinks, function (err, info) {
.toFile(outputZoinks, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('tiff', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
rimraf(fixtures.outputZoinks, done);
rimraf(outputZoinks, done);
});
});
it('Autoconvert GIF input to PNG output', function (done) {
sharp(fixtures.inputGif)
.resize(320, 80)
.toFile(fixtures.outputZoinks, function (err, info) {
.toFile(outputZoinks, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual(sharp.format.magick.input.buffer ? 'gif' : 'png', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
rimraf(fixtures.outputZoinks, done);
rimraf(outputZoinks, done);
});
});
@@ -480,13 +516,13 @@ describe('Input/output', function () {
sharp(fixtures.inputPng)
.resize(320, 80)
.jpeg()
.toFile(fixtures.outputZoinks, function (err, info) {
.toFile(outputZoinks, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
rimraf(fixtures.outputZoinks, done);
rimraf(outputZoinks, done);
});
});
});
@@ -516,19 +552,20 @@ describe('Input/output', function () {
});
it('toFormat=JPEG takes precedence over WebP extension', function (done) {
const outputWebP = fixtures.path('output.webp');
sharp(fixtures.inputPng)
.jpeg()
.toFile(fixtures.outputWebP, function (err, info) {
.toFile(outputWebP, function (err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
done();
rimraf(outputWebP, done);
});
});
it('toFormat=WebP takes precedence over JPEG extension', function (done) {
sharp(fixtures.inputPng)
.webp()
.toFile(fixtures.outputJpg, function (err, info) {
.toFile(outputJpg, function (err, info) {
if (err) throw err;
assert.strictEqual('webp', info.format);
done();
@@ -549,15 +586,16 @@ describe('Input/output', function () {
});
it('Save Vips V file', function (done) {
const outputV = fixtures.path('output.v');
sharp(fixtures.inputJpg)
.extract({ left: 910, top: 1105, width: 70, height: 60 })
.toFile(fixtures.outputV, function (err, info) {
.toFile(outputV, function (err, info) {
if (err) throw err;
assert.strictEqual(true, info.size > 0);
assert.strictEqual('v', info.format);
assert.strictEqual(70, info.width);
assert.strictEqual(60, info.height);
rimraf(fixtures.outputV, done);
rimraf(outputV, done);
});
});

View File

@@ -504,6 +504,7 @@ describe('Image metadata', function () {
it('Apply CMYK output ICC profile', function (done) {
const output = fixtures.path('output.icc-cmyk.jpg');
sharp(fixtures.inputJpg)
.resize(64)
.withMetadata({ icc: 'cmyk' })
.toFile(output, function (err, info) {
if (err) throw err;
@@ -528,11 +529,11 @@ describe('Image metadata', function () {
it('Apply custom output ICC profile', function (done) {
const output = fixtures.path('output.hilutite.jpg');
sharp(fixtures.inputJpg)
.resize(64)
.withMetadata({ icc: fixtures.path('hilutite.icm') })
.toFile(output, function (err, info) {
if (err) throw err;
fixtures.assertMaxColourDistance(output, fixtures.path('expected/hilutite.jpg'), 0);
fixtures.assertMaxColourDistance(output, fixtures.inputJpg, 16.5);
fixtures.assertMaxColourDistance(output, fixtures.path('expected/hilutite.jpg'), 9);
done();
});
});
@@ -667,7 +668,7 @@ describe('Image metadata', function () {
sharp(fixtures.inputJpgWithCorruptHeader)
.metadata(function (err) {
assert.strictEqual(true, !!err);
assert.strictEqual(true, /Input file has corrupt header: VipsJpeg: Premature end of JPEG file/.test(err.message));
assert.ok(err.message.includes('Input file has corrupt header: VipsJpeg: Premature end of'), err);
done();
});
});
@@ -676,7 +677,7 @@ describe('Image metadata', function () {
sharp(fs.readFileSync(fixtures.inputJpgWithCorruptHeader))
.metadata(function (err) {
assert.strictEqual(true, !!err);
assert.strictEqual(true, /Input buffer has corrupt header: VipsJpeg: Premature end of JPEG file/.test(err.message));
assert.ok(err.message.includes('Input buffer has corrupt header: VipsJpeg: Premature end of'), err);
done();
});
});

258
test/unit/noise.js Normal file
View File

@@ -0,0 +1,258 @@
'use strict';
const assert = require('assert');
const sharp = require('../../');
const fixtures = require('../fixtures');
describe('Gaussian noise', function () {
it('generate single-channel gaussian noise', function (done) {
const output = fixtures.path('output.noise-1-channel.png');
const noise = sharp({
create: {
width: 1024,
height: 768,
channels: 1, // b-w
noise: {
type: 'gaussian',
mean: 128,
sigma: 30
}
}
});
noise.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(1024, info.width);
assert.strictEqual(768, info.height);
assert.strictEqual(1, info.channels);
sharp(output).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual('b-w', metadata.space);
assert.strictEqual('uchar', metadata.depth);
done();
});
});
});
it('generate 3-channels gaussian noise', function (done) {
const output = fixtures.path('output.noise-3-channels.png');
const noise = sharp({
create: {
width: 1024,
height: 768,
channels: 3, // sRGB
noise: {
type: 'gaussian',
mean: 128,
sigma: 30
}
}
});
noise.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(1024, info.width);
assert.strictEqual(768, info.height);
assert.strictEqual(3, info.channels);
sharp(output).metadata(function (err, metadata) {
if (err) throw err;
assert.strictEqual('srgb', metadata.space);
assert.strictEqual('uchar', metadata.depth);
done();
});
});
});
it('overlay 3-channels gaussian noise over image', function (done) {
const output = fixtures.path('output.noise-image.jpg');
const noise = sharp({
create: {
width: 320,
height: 240,
channels: 3,
noise: {
type: 'gaussian',
mean: 0,
sigma: 5
}
}
});
noise.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(3, info.channels);
sharp(fixtures.inputJpg)
.resize(320, 240)
.composite([
{
input: data,
blend: 'exclusion',
raw: {
width: info.width,
height: info.height,
channels: info.channels
}
}
])
.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('jpeg', info.format);
assert.strictEqual(320, info.width);
assert.strictEqual(240, info.height);
assert.strictEqual(3, info.channels);
// perceptual hashing detects that images are the same (difference is <=1%)
fixtures.assertSimilar(output, fixtures.inputJpg, function (err) {
if (err) throw err;
done();
});
});
});
});
it('overlay strong single-channel (sRGB) gaussian noise with 25% transparency over transparent png image', function (done) {
const output = fixtures.path('output.noise-image-transparent.png');
const width = 320;
const height = 240;
const rawData = {
width,
height,
channels: 1
};
const noise = sharp({
create: {
width,
height,
channels: 1,
noise: {
type: 'gaussian',
mean: 200,
sigma: 30
}
}
});
noise
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(1, info.channels);
sharp(data, { raw: rawData })
.joinChannel(data, { raw: rawData }) // r channel
.joinChannel(data, { raw: rawData }) // b channel
.joinChannel(Buffer.alloc(width * height, 64), { raw: rawData }) // alpha channel
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual(4, info.channels);
sharp(fixtures.inputPngRGBWithAlpha)
.resize(width, height)
.composite([
{
input: data,
blend: 'exclusion',
raw: {
width: info.width,
height: info.height,
channels: info.channels
}
}
])
.toFile(output, function (err, info) {
if (err) throw err;
assert.strictEqual('png', info.format);
assert.strictEqual(width, info.width);
assert.strictEqual(height, info.height);
assert.strictEqual(4, info.channels);
fixtures.assertSimilar(output, fixtures.inputPngRGBWithAlpha, { threshold: 10 }, function (err) {
if (err) throw err;
done();
});
});
});
});
});
it('no create object properties specified', function () {
assert.throws(function () {
sharp({
create: {}
});
});
});
it('invalid noise object', function () {
assert.throws(function () {
sharp({
create: {
width: 100,
height: 100,
channels: 3,
noise: 'gaussian'
}
});
});
});
it('unknown type of noise', function () {
assert.throws(function () {
sharp({
create: {
width: 100,
height: 100,
channels: 3,
noise: {
type: 'unknown'
}
}
});
});
});
it('gaussian noise, invalid amount of channels', function () {
assert.throws(function () {
sharp({
create: {
width: 100,
height: 100,
channels: 5,
noise: {
type: 'gaussian',
mean: 5,
sigma: 10
}
}
});
});
});
it('gaussian noise, invalid mean', function () {
assert.throws(function () {
sharp({
create: {
width: 100,
height: 100,
channels: 1,
noise: {
type: 'gaussian',
mean: -1.5,
sigma: 10
}
}
});
});
});
it('gaussian noise, invalid sigma', function () {
assert.throws(function () {
sharp({
create: {
width: 100,
height: 100,
channels: 1,
noise: {
type: 'gaussian',
mean: 0,
sigma: -1.5
}
}
});
});
});
});

View File

@@ -8,6 +8,8 @@ const rimraf = require('rimraf');
const sharp = require('../../');
const fixtures = require('../fixtures');
const outputTiff = fixtures.path('output.tiff');
describe('TIFF', function () {
it('Load TIFF from Buffer', function (done) {
const inputTiffBuffer = fs.readFileSync(fixtures.inputTiff);
@@ -120,11 +122,11 @@ describe('TIFF', function () {
compression: 'none',
predictor: 'none'
})
.toFile(fixtures.outputTiff, (err, info) => {
.toFile(outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert.strictEqual(startSize, info.size);
rimraf(fixtures.outputTiff, done);
rimraf(outputTiff, done);
});
});
@@ -137,11 +139,11 @@ describe('TIFF', function () {
compression: 'none',
predictor: 'none'
})
.toFile(fixtures.outputTiff, (err, info) => {
.toFile(outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < (startSize / 2));
rimraf(fixtures.outputTiff, done);
rimraf(outputTiff, done);
});
});
@@ -157,12 +159,12 @@ describe('TIFF', function () {
xres: 1000,
yres: 1000
})
.toFile(fixtures.outputTiff)
.then(() => sharp(fixtures.outputTiff)
.toFile(outputTiff)
.then(() => sharp(outputTiff)
.metadata()
.then(({ density }) => {
assert.strictEqual(25400, density);
return promisify(rimraf)(fixtures.outputTiff);
return promisify(rimraf)(outputTiff);
})
)
);
@@ -201,12 +203,12 @@ describe('TIFF', function () {
compression: 'lzw',
predictor: 'horizontal'
})
.toFile(fixtures.outputTiff, (err, info) => {
.toFile(outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert.strictEqual(3, info.channels);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
rimraf(outputTiff, done);
});
});
@@ -222,7 +224,7 @@ describe('TIFF', function () {
.tiff({
compression: 'lzw'
})
.toFile(fixtures.outputTiff)
.toFile(outputTiff)
.then(info => {
assert.strictEqual(4, info.channels);
})
@@ -254,11 +256,11 @@ describe('TIFF', function () {
bitdepth: 1,
compression: 'ccittfax4'
})
.toFile(fixtures.outputTiff, (err, info) => {
.toFile(outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
rimraf(outputTiff, done);
});
});
@@ -269,26 +271,26 @@ describe('TIFF', function () {
compression: 'deflate',
predictor: 'horizontal'
})
.toFile(fixtures.outputTiff, (err, info) => {
.toFile(outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
rimraf(outputTiff, done);
});
});
it('TIFF deflate compression with float predictor shrinks test file', function (done) {
it('TIFF deflate compression of integral input with float predictor increases file size', function (done) {
const startSize = fs.statSync(fixtures.inputTiffUncompressed).size;
sharp(fixtures.inputTiffUncompressed)
.tiff({
compression: 'deflate',
predictor: 'float'
})
.toFile(fixtures.outputTiff, (err, info) => {
.toFile(outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
assert(info.size > startSize);
rimraf(outputTiff, done);
});
});
@@ -299,11 +301,11 @@ describe('TIFF', function () {
compression: 'deflate',
predictor: 'none'
})
.toFile(fixtures.outputTiff, (err, info) => {
.toFile(outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
rimraf(outputTiff, done);
});
});
@@ -313,11 +315,11 @@ describe('TIFF', function () {
.tiff({
compression: 'jpeg'
})
.toFile(fixtures.outputTiff, (err, info) => {
.toFile(outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size < startSize);
rimraf(fixtures.outputTiff, done);
rimraf(outputTiff, done);
});
});
@@ -385,11 +387,11 @@ describe('TIFF', function () {
tileHeight: 256,
tileWidth: 256
})
.toFile(fixtures.outputTiff, (err, info) => {
.toFile(outputTiff, (err, info) => {
if (err) throw err;
assert.strictEqual('tiff', info.format);
assert(info.size > startSize);
rimraf(fixtures.outputTiff, done);
rimraf(outputTiff, done);
});
});

View File

@@ -6,22 +6,19 @@ const sharp = require('../../');
const fixtures = require('../fixtures');
describe('toBuffer', () => {
it('reusing same sharp object does not reset previously passed parameters to toBuffer', (done) => {
it('reusing same sharp object does not reset previously passed parameters to toBuffer', async () => {
const image = sharp(fixtures.inputJpg);
image.toBuffer({ resolveWithObject: true }).then((obj) => {
image.toBuffer().then((buff) => {
assert.strictEqual(Buffer.isBuffer(buff), true);
assert.strictEqual(typeof obj, 'object');
done();
});
});
const obj = await image.toBuffer({ resolveWithObject: true });
assert.strictEqual(typeof obj, 'object');
assert.strictEqual(typeof obj.info, 'object');
assert.strictEqual(Buffer.isBuffer(obj.data), true);
const data = await image.toBuffer();
assert.strictEqual(Buffer.isBuffer(data), true);
});
it('correctly process animated webp with height > 16383', (done) => {
const image = sharp(fixtures.inputWebPAnimatedBigHeight, { animated: true });
image.toBuffer().then((buff) => {
assert.strictEqual(Buffer.isBuffer(buff), true);
done();
});
it('correctly process animated webp with height > 16383', async () => {
const data = await sharp(fixtures.inputWebPAnimatedBigHeight, { animated: true })
.toBuffer();
assert.strictEqual(Buffer.isBuffer(data), true);
});
});