diff --git a/binding.gyp b/binding.gyp index e561c761..8f87dd13 100644 --- a/binding.gyp +++ b/binding.gyp @@ -221,7 +221,7 @@ '<(module_root_dir)/lib/libintl-8.dll', '<(module_root_dir)/lib/libjpeg-62.dll', '<(module_root_dir)/lib/liblcms2-2.dll', - '<(module_root_dir)/lib/libopenjpeg-1.dll', + '<(module_root_dir)/lib/libopenjp2.dll', '<(module_root_dir)/lib/libopenslide-0.dll', '<(module_root_dir)/lib/libpango-1.0-0.dll', '<(module_root_dir)/lib/libpangocairo-1.0-0.dll', diff --git a/docs/api.md b/docs/api.md index e2c9630d..e422e012 100644 --- a/docs/api.md +++ b/docs/api.md @@ -27,6 +27,7 @@ The object returned by the constructor implements the [stream.Duplex](http://nodejs.org/api/stream.html#stream_class_stream_duplex) class. JPEG, PNG or WebP format image data can be streamed out from this object. +When using Stream based output, derived attributes are available from the `info` event. ```javascript sharp('input.jpg') @@ -37,6 +38,19 @@ sharp('input.jpg') }); ``` +```javascript +// Read image data from readableStream, +// resize to 300 pixels wide, +// emit an 'info' event with calculated dimensions +// and finally write image data to writableStream +var transformer = sharp() + .resize(300) + .on('info', function(info) { + console.log('Image height is ' + info.height); + }); +readableStream.pipe(transformer).pipe(writableStream); +``` + #### metadata([callback]) Fast access to image metadata without decoding any compressed image data. @@ -48,6 +62,7 @@ Fast access to image metadata without decoding any compressed image data. * `height`: Number of pixels high * `space`: Name of colour space interpretation e.g. `srgb`, `rgb`, `scrgb`, `cmyk`, `lab`, `xyz`, `b-w` [...](https://github.com/jcupitt/libvips/blob/master/libvips/iofuncs/enumtypes.c#L522) * `channels`: Number of bands e.g. `3` for sRGB, `4` for CMYK +* `density`: Number of pixels per inch (DPI), if present * `hasProfile`: Boolean indicating the presence of an embedded ICC profile * `hasAlpha`: Boolean indicating the presence of an alpha transparency channel * `orientation`: Number value of the EXIF Orientation header, if present @@ -110,23 +125,37 @@ Scale output to `width` x `height`. By default, the resized image is cropped to `height` is the integral Number of pixels high the resultant image should be, between 1 and 16383. Use `null` or `undefined` to auto-scale the height to match the width. -#### crop([gravity]) +#### crop([option]) Crop the resized image to the exact size specified, the default behaviour. -`gravity`, if present, is a String or an attribute of the `sharp.gravity` Object e.g. `sharp.gravity.north`. +`option`, if present, is an attribute of: -Possible values are `north`, `northeast`, `east`, `southeast`, `south`, `southwest`, `west`, `northwest`, `center` and `centre`. -The default gravity is `center`/`centre`. +* `sharp.gravity` e.g. `sharp.gravity.north`, to crop to an edge or corner, or +* `sharp.strategy` e.g. `sharp.strategy.entropy`, to crop dynamically. + +Possible attributes of `sharp.gravity` are +`north`, `northeast`, `east`, `southeast`, `south`, +`southwest`, `west`, `northwest`, `center` and `centre`. + +Possible attributes of the experimental `sharp.strategy` are: + +* `entropy`: resize so one dimension is at its target size +then repeatedly remove pixels from the edge with the lowest +[Shannon entropy](https://en.wikipedia.org/wiki/Entropy_%28information_theory%29) +until it too reaches the target size. + +The default crop option is a `center`/`centre` gravity. ```javascript var transformer = sharp() - .resize(300, 200) - .crop(sharp.gravity.north) + .resize(200, 200) + .crop(sharp.strategy.entropy) .on('error', function(err) { console.log(err); }); -// Read image data from readableStream, resize and write image data to writableStream +// Read image data from readableStream +// Write 200px square auto-cropped image data to writableStream readableStream.pipe(transformer).pipe(writableStream); ``` @@ -265,7 +294,7 @@ sharp(input) #### background(rgba) -Set the background for the `embed` and `flatten` operations. +Set the background for the `embed`, `flatten` and `extend` operations. `rgba` is parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha. @@ -277,6 +306,25 @@ The default background is `{r: 0, g: 0, b: 0, a: 1}`, black without transparency Merge alpha transparency channel, if any, with `background`. +#### extend(extension) + +Extends/pads the edges of the image with `background`, where `extension` is one of: + +* a Number representing the pixel count to add to each edge, or +* an Object containing `top`, `left`, `bottom` and `right` attributes, each a Number of pixels to add to that edge. + +This operation will always occur after resizing and extraction, if any. + +```javascript +// Resize to 140 pixels wide, then add 10 transparent pixels +// to the top, left and right edges and 20 to the bottom edge +sharp(input) + .resize(140) + .background({r: 0, g: 0, b: 0, a: 0}) + .extend({top: 10, bottom: 20, left: 10, right: 10}) + ... +``` + #### negate() Produces the "negative" of the image. White => Black, Black => White, Blue => Yellow, etc. @@ -365,13 +413,18 @@ The output image will still be web-friendly sRGB and contain three (identical) c Enhance output image contrast by stretching its luminance to cover the full dynamic range. This typically reduces performance by 30%. -#### overlayWith(path) +#### overlayWith(image, [options]) -_Experimental_ +Overlay (composite) a image containing an alpha channel over the processed (resized, extracted etc.) image. -Alpha composite image at `path` over the processed (resized, extracted) image. The dimensions of the two images must match. +`image` is one of the following, and must be the same size or smaller than the processed image: -* `path` is a String containing the path to an image file with an alpha channel. +* Buffer containing PNG, WebP, GIF or SVG image data, or +* String containing the path to an image file, with most major transparency formats supported. + +`options`, if present, is an Object with the following optional attributes: + +* `gravity` is a String or an attribute of the `sharp.gravity` Object e.g. `sharp.gravity.north` at which to place the overlay, defaulting to `center`/`centre`. ```javascript sharp('input.png') @@ -379,7 +432,7 @@ sharp('input.png') .resize(300) .flatten() .background('#ff6600') - .overlayWith('overlay.png') + .overlayWith('overlay.png', { gravity: sharp.gravity.southeast } ) .sharpen() .withMetadata() .quality(90) @@ -387,8 +440,8 @@ sharp('input.png') .toBuffer() .then(function(outputBuffer) { // outputBuffer contains upside down, 300px wide, alpha channel flattened - // onto orange background, composited with overlay.png, sharpened, - // with metadata, 90% quality WebP image data. Phew! + // onto orange background, composited with overlay.png with SE gravity, + // sharpened, with metadata, 90% quality WebP image data. Phew! }); ``` @@ -471,18 +524,25 @@ This has no effect if the input image does not have an EXIF `Orientation` tag. The default behaviour, when `withMetadata` is not used, is to strip all metadata and convert to the device-independent sRGB colour space. -#### tile([size], [overlap]) +#### tile(options) -The size and overlap, in pixels, of square Deep Zoom image pyramid tiles. +The size, overlap and directory layout to use when generating square Deep Zoom image pyramid tiles. + +`options` is an Object with one or more of the following attributes: * `size` is an integral Number between 1 and 8192. The default value is 256 pixels. * `overlap` is an integral Number between 0 and 8192. The default value is 0 pixels. +* `layout` is a String, with value `dz`, `zoomify` or `google`. The default value is `dz`. ```javascript -sharp('input.tiff').tile(256).toFile('output.dzi', function(err, info) { - // The output.dzi file is the XML format Deep Zoom definition - // The output_files directory contains 256x256 pixel tiles grouped by zoom level -}); +sharp('input.tiff') + .tile({ + size: 512 + }) + .toFile('output.dzi', function(err, info) { + // output.dzi is the Deep Zoom XML definition + // output_files contains 512x512 tiles grouped by zoom level + }); ``` #### withoutChromaSubsampling() diff --git a/docs/changelog.md b/docs/changelog.md index 46f93c63..09ea8ec4 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,31 @@ # Changelog +### v0.14 - "*needle*" + +* Add ability to extend (pad) the edges of an image. + [#128](https://github.com/lovell/sharp/issues/128) + [@blowsie](https://github.com/blowsie) + +* Add support for Zoomify and Google tile layouts. Breaks existing tile API. + [#223](https://github.com/lovell/sharp/issues/223) + [@bdunnette](https://github.com/bdunnette) + +* Improvements to overlayWith: differing sizes/formats, gravity, buffer input. + [#239](https://github.com/lovell/sharp/issues/239) + [@chrisriley](https://github.com/chrisriley) + +* Add entropy-based crop strategy to remove least interesting edges. + [#295](https://github.com/lovell/sharp/issues/295) + [@rightaway](https://github.com/rightaway) + +* Expose density metadata; set density of images from vector input. + [#338](https://github.com/lovell/sharp/issues/338) + [@lookfirst](https://github.com/lookfirst) + +* Emit post-processing 'info' event for Stream output. + [#367](https://github.com/lovell/sharp/issues/367) + [@salzhrani](https://github.com/salzhrani) + ### v0.13 - "*mind*" #### v0.13.1 - 27th February 2016 diff --git a/index.js b/index.js index 583b84a3..dbb70a48 100644 --- a/index.js +++ b/index.js @@ -46,7 +46,7 @@ var Sharp = function(input, options) { streamIn: false, sequentialRead: false, limitInputPixels: maximum.pixels, - density: '72', + density: 72, rawWidth: 0, rawHeight: 0, rawChannels: 0, @@ -64,11 +64,15 @@ var Sharp = function(input, options) { width: -1, height: -1, canvas: 'crop', - gravity: 0, + crop: 0, angle: 0, rotateBeforePreExtract: false, flip: false, flop: false, + extendTop: 0, + extendBottom: 0, + extendLeft: 0, + extendRight: 0, withoutEnlargement: false, interpolator: 'bicubic', // operations @@ -84,7 +88,9 @@ var Sharp = function(input, options) { greyscale: false, normalize: 0, // overlay - overlayPath: '', + overlayFileIn: '', + overlayBufferIn: null, + overlayGravity: 0, // output options formatOut: 'input', fileOut: '', @@ -106,13 +112,13 @@ var Sharp = function(input, options) { module.exports.queue.emit('change', queueLength); } }; - if (typeof input === 'string') { + if (isString(input)) { // input=file this.options.fileIn = input; - } else if (typeof input === 'object' && input instanceof Buffer) { + } else if (isBuffer(input)) { // input=buffer this.options.bufferIn = input; - } else if (typeof input === 'undefined' || input === null) { + } else if (!isDefined(input)) { // input=stream this.options.streamIn = true; } else { @@ -148,12 +154,21 @@ var isDefined = function(val) { var isObject = function(val) { return typeof val === 'object'; }; +var isBuffer = function(val) { + return typeof val === 'object' && val instanceof Buffer; +}; +var isString = function(val) { + return typeof val === 'string' && val.length > 0; +}; var isInteger = function(val) { return typeof val === 'number' && !Number.isNaN(val) && val % 1 === 0; }; var inRange = function(val, min, max) { return val >= min && val <= max; }; +var contains = function(val, list) { + return list.indexOf(val) !== -1; +}; /* Set input-related options @@ -164,7 +179,7 @@ Sharp.prototype._inputOptions = function(options) { // Density if (isDefined(options.density)) { if (isInteger(options.density) && inRange(options.density, 1, 2400)) { - this.options.density = options.density.toString(); + this.options.density = options.density; } else { throw new Error('Invalid density (1 to 2400) ' + options.density); } @@ -216,48 +231,53 @@ Sharp.prototype._write = function(chunk, encoding, callback) { } }; -// Crop this part of the resized image (Center/Centre, North, East, South, West) +// Weighting to apply to image crop module.exports.gravity = { - 'center': 0, - 'centre': 0, - 'north': 1, - 'east': 2, - 'south': 3, - 'west': 4, - 'northeast': 5, - 'southeast': 6, - 'southwest': 7, - 'northwest': 8 + center: 0, + centre: 0, + north: 1, + east: 2, + south: 3, + west: 4, + northeast: 5, + southeast: 6, + southwest: 7, + northwest: 8 }; -Sharp.prototype.crop = function(gravity) { +// Strategies for automagic behaviour +module.exports.strategy = { + entropy: 16 +}; + +/* + What part of the image should be retained when cropping? +*/ +Sharp.prototype.crop = function(crop) { this.options.canvas = 'crop'; - if (typeof gravity === 'undefined') { - this.options.gravity = module.exports.gravity.center; - } else if (typeof gravity === 'number' && !Number.isNaN(gravity) && gravity >= 0 && gravity <= 8) { - this.options.gravity = gravity; - } else if (typeof gravity === 'string' && typeof module.exports.gravity[gravity] === 'number') { - this.options.gravity = module.exports.gravity[gravity]; + if (!isDefined(crop)) { + // Default + this.options.crop = module.exports.gravity.center; + } else if (isInteger(crop) && inRange(crop, 0, 8)) { + // Gravity (numeric) + this.options.crop = crop; + } else if (isString(crop) && isInteger(module.exports.gravity[crop])) { + // Gravity (string) + this.options.crop = module.exports.gravity[crop]; + } else if (isInteger(crop) && crop === module.exports.strategy.entropy) { + // Strategy + this.options.crop = crop; } else { - throw new Error('Unsupported crop gravity ' + gravity); + throw new Error('Unsupported crop ' + crop); } return this; }; Sharp.prototype.extract = function(options) { - if (!options || typeof options !== 'object') { - // Legacy extract(top,left,width,height) syntax - options = { - left: arguments[1], - top: arguments[0], - width: arguments[2], - height: arguments[3] - }; - } var suffix = this.options.width === -1 && this.options.height === -1 ? 'Pre' : 'Post'; ['left', 'top', 'width', 'height'].forEach(function (name) { var value = options[name]; - if (typeof value === 'number' && !Number.isNaN(value) && value % 1 === 0 && value >= 0) { + if (isInteger(value) && value >= 0) { this.options[name + (name === 'left' || name === 'top' ? 'Offset' : '') + suffix] = value; } else { throw new Error('Non-integer value for ' + name + ' of ' + value); @@ -316,14 +336,26 @@ Sharp.prototype.negate = function(negate) { return this; }; -Sharp.prototype.overlayWith = function(overlayPath) { - if (typeof overlayPath !== 'string') { - throw new Error('The overlay path must be a string'); +/* + Overlay with another image, using an optional gravity +*/ +Sharp.prototype.overlayWith = function(overlay, options) { + if (isString(overlay)) { + this.options.overlayFileIn = overlay; + } else if (isBuffer(overlay)) { + this.options.overlayBufferIn = overlay; + } else { + throw new Error('Unsupported overlay ' + typeof overlay); } - if (overlayPath === '') { - throw new Error('The overlay path cannot be empty'); + if (isObject(options)) { + if (isInteger(options.gravity) && inRange(options.gravity, 0, 8)) { + this.options.overlayGravity = options.gravity; + } else if (isString(options.gravity) && isInteger(module.exports.gravity[options.gravity])) { + this.options.overlayGravity = module.exports.gravity[options.gravity]; + } else if (isDefined(options.gravity)) { + throw new Error('Unsupported overlay gravity ' + options.gravity); + } } - this.options.overlayPath = overlayPath; return this; }; @@ -605,27 +637,63 @@ Sharp.prototype.withMetadata = function(withMetadata) { }; /* - Tile size and overlap for Deep Zoom output + Tile-based deep zoom output options: size, overlap, layout */ -Sharp.prototype.tile = function(size, overlap) { - // Size of square tiles, in pixels - if (typeof size !== 'undefined' && size !== null) { - if (!Number.isNaN(size) && size % 1 === 0 && size >= 1 && size <= 8192) { - this.options.tileSize = size; - } else { - throw new Error('Invalid tile size (1 to 8192) ' + size); +Sharp.prototype.tile = function(tile) { + if (isObject(tile)) { + // Size of square tiles, in pixels + if (isDefined(tile.size)) { + if (isInteger(tile.size) && inRange(tile.size, 1, 8192)) { + this.options.tileSize = tile.size; + } else { + throw new Error('Invalid tile size (1 to 8192) ' + tile.size); + } + } + // Overlap of tiles, in pixels + if (isDefined(tile.overlap)) { + if (isInteger(tile.overlap) && inRange(tile.overlap, 0, 8192)) { + if (tile.overlap > this.options.tileSize) { + throw new Error('Tile overlap ' + tile.overlap + ' cannot be larger than tile size ' + this.options.tileSize); + } + this.options.tileOverlap = tile.overlap; + } else { + throw new Error('Invalid tile overlap (0 to 8192) ' + tile.overlap); + } + } + // Layout + if (isDefined(tile.layout)) { + if (isString(tile.layout) && contains(tile.layout, ['dz', 'google', 'zoomify'])) { + this.options.tileLayout = tile.layout; + } else { + throw new Error('Invalid tile layout ' + tile.layout); + } } } - // Overlap of tiles, in pixels - if (typeof overlap !== 'undefined' && overlap !== null) { - if (!Number.isNaN(overlap) && overlap % 1 === 0 && overlap >= 0 && overlap <= 8192) { - if (overlap > this.options.tileSize) { - throw new Error('Tile overlap ' + overlap + ' cannot be larger than tile size ' + this.options.tileSize); - } - this.options.tileOverlap = overlap; - } else { - throw new Error('Invalid tile overlap (0 to 8192) ' + overlap); - } + return this; +}; + +/* + Extend edges +*/ +Sharp.prototype.extend = function(extend) { + if (isInteger(extend) && extend > 0) { + this.options.extendTop = extend; + this.options.extendBottom = extend; + this.options.extendLeft = extend; + this.options.extendRight = extend; + } else if ( + isObject(extend) && + isInteger(extend.top) && extend.top >= 0 && + isInteger(extend.bottom) && extend.bottom >= 0 && + isInteger(extend.left) && extend.left >= 0 && + isInteger(extend.right) && extend.right >= 0 + ) { + this.options.extendTop = extend.top; + this.options.extendBottom = extend.bottom; + this.options.extendLeft = extend.left; + this.options.extendRight = extend.right; + } else { + throw new Error('Invalid edge extension ' + extend); } return this; }; @@ -783,10 +851,11 @@ Sharp.prototype._pipeline = function(callback) { if (this.options.streamIn) { // output=stream, input=stream this.on('finish', function() { - sharp.pipeline(that.options, function(err, data) { + sharp.pipeline(that.options, function(err, data, info) { if (err) { that.emit('error', err); } else { + that.emit('info', info); that.push(data); } that.push(null); @@ -794,10 +863,11 @@ Sharp.prototype._pipeline = function(callback) { }); } else { // output=stream, input=file/buffer - sharp.pipeline(this.options, function(err, data) { + sharp.pipeline(this.options, function(err, data, info) { if (err) { that.emit('error', err); } else { + that.emit('info', info); that.push(data); } that.push(null); diff --git a/package.json b/package.json index c9319cc6..3dd1b01c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "sharp", - "version": "0.13.1", + "version": "0.14.0", "author": "Lovell Fuller ", "contributors": [ "Pierre Inglebert ", @@ -47,7 +47,7 @@ "vips" ], "dependencies": { - "bluebird": "^3.3.3", + "bluebird": "^3.3.4", "color": "^0.11.1", "nan": "^2.2.0", "semver": "^5.1.0", @@ -68,7 +68,7 @@ }, "license": "Apache-2.0", "config": { - "libvips": "8.2.2" + "libvips": "8.2.3" }, "engines": { "node": ">=0.10" diff --git a/packaging/arm/build.sh b/packaging/arm/build.sh index e677dab9..3b1e3b1a 100755 --- a/packaging/arm/build.sh +++ b/packaging/arm/build.sh @@ -19,7 +19,7 @@ export CXXFLAGS="-O3" # Dependency version numbers VERSION_ZLIB=1.2.8 VERSION_FFI=3.2.1 -VERSION_GLIB=2.47.5 +VERSION_GLIB=2.47.6 VERSION_XML2=2.9.3 VERSION_GSF=1.14.34 VERSION_EXIF=0.6.21 @@ -29,8 +29,8 @@ VERSION_JPEG=1.4.2 VERSION_PNG16=1.6.21 VERSION_WEBP=0.5.0 VERSION_TIFF=4.0.6 -VERSION_ORC=0.4.24 -VERSION_VIPS=8.2.2 +VERSION_ORC=0.4.25 +VERSION_VIPS=8.2.3 mkdir ${DEPS}/zlib curl -Ls http://zlib.net/zlib-${VERSION_ZLIB}.tar.xz | tar xJC ${DEPS}/zlib --strip-components=1 diff --git a/packaging/build.sh b/packaging/build.sh index 24f8f849..2d8d619a 100755 --- a/packaging/build.sh +++ b/packaging/build.sh @@ -1,5 +1,7 @@ #!/bin/sh +VERSION_VIPS=8.2.3 + # Is docker available? if ! type docker >/dev/null; then @@ -13,15 +15,15 @@ fi docker build -t vips-dev-win win WIN_CONTAINER_ID=$(docker run -d vips-dev-win) -docker cp $WIN_CONTAINER_ID:/libvips-8.2.2-win.tar.gz . -docker rm $WIN_CONTAINER_ID +docker cp "${WIN_CONTAINER_ID}:/libvips-${VERSION_VIPS}-win.tar.gz" . +docker rm "${WIN_CONTAINER_ID}" # Linux docker build -t vips-dev-lin lin LIN_CONTAINER_ID=$(docker run -d vips-dev-lin) -docker cp $LIN_CONTAINER_ID:/libvips-8.2.2-lin.tar.gz . -docker rm $LIN_CONTAINER_ID +docker cp "${LIN_CONTAINER_ID}:/libvips-${VERSION_VIPS}-lin.tar.gz" . +docker rm "${LIN_CONTAINER_ID}" # Checksums diff --git a/packaging/lin/Dockerfile b/packaging/lin/Dockerfile index 7c19d927..c057e4a2 100644 --- a/packaging/lin/Dockerfile +++ b/packaging/lin/Dockerfile @@ -20,7 +20,7 @@ ENV PKG_CONFIG_PATH=${PKG_CONFIG_PATH}:${TARGET}/lib/pkgconfig \ # Dependency version numbers ENV VERSION_ZLIB=1.2.8 \ VERSION_FFI=3.2.1 \ - VERSION_GLIB=2.47.5 \ + VERSION_GLIB=2.47.6 \ VERSION_XML2=2.9.3 \ VERSION_GSF=1.14.34 \ VERSION_EXIF=0.6.21 \ @@ -30,8 +30,8 @@ ENV VERSION_ZLIB=1.2.8 \ VERSION_PNG16=1.6.21 \ VERSION_WEBP=0.5.0 \ VERSION_TIFF=4.0.6 \ - VERSION_ORC=0.4.24 \ - VERSION_VIPS=8.2.2 + VERSION_ORC=0.4.25 \ + VERSION_VIPS=8.2.3 RUN mkdir ${DEPS}/zlib RUN curl -Ls http://zlib.net/zlib-${VERSION_ZLIB}.tar.xz | tar xJC ${DEPS}/zlib --strip-components=1 diff --git a/packaging/win/Dockerfile b/packaging/win/Dockerfile index 74ddc3d6..07c49c9c 100644 --- a/packaging/win/Dockerfile +++ b/packaging/win/Dockerfile @@ -3,11 +3,13 @@ MAINTAINER Lovell Fuller RUN apt-get update && apt-get install -y curl zip +ENV VERSION_VIPS=8.2.3 + # Fetch and unzip RUN mkdir /vips WORKDIR /vips -RUN curl -O http://www.vips.ecs.soton.ac.uk/supported/8.2/win32/vips-dev-w64-8.2.2.zip -RUN unzip vips-dev-w64-8.2.2.zip +RUN curl -O http://www.vips.ecs.soton.ac.uk/supported/8.2/win32/vips-dev-w64-8.2.zip +RUN unzip vips-dev-w64-8.2.zip # Clean and zip WORKDIR /vips/vips-dev-8.2 @@ -15,4 +17,4 @@ RUN rm bin/libvipsCC-42.dll bin/libvips-cpp-42.dll bin/libgsf-win32-1-114.dll bi RUN cp bin/*.dll lib/ RUN cp -r lib64/* lib/ -RUN GZIP=-9 tar czf /libvips-8.2.2-win.tar.gz include lib/glib-2.0 lib/libvips.lib lib/libglib-2.0.lib lib/libgobject-2.0.lib lib/*.dll +RUN GZIP=-9 tar czf /libvips-${VERSION_VIPS}-win.tar.gz include lib/glib-2.0 lib/libvips.lib lib/libglib-2.0.lib lib/libgobject-2.0.lib lib/*.dll diff --git a/src/common.cc b/src/common.cc index 8fce1af4..46872aa8 100644 --- a/src/common.cc +++ b/src/common.cc @@ -176,6 +176,30 @@ namespace sharp { SetExifOrientation(image, 0); } + /* + Does this image have a non-default density? + */ + bool HasDensity(VImage image) { + return image.xres() > 1.0; + } + + /* + Get pixels/mm resolution as pixels/inch density. + */ + int GetDensity(VImage image) { + return static_cast(round(image.xres() * 25.4)); + } + + /* + Set pixels/mm resolution based on a pixels/inch density. + */ + void SetDensity(VImage image, const int density) { + const double pixelsPerMm = static_cast(density) / 25.4; + image.set("Xres", pixelsPerMm); + image.set("Yres", pixelsPerMm); + image.set(VIPS_META_RESOLUTION_UNIT, "in"); + } + /* Called when a Buffer undergoes GC, required to support mixed runtime libraries in Windows */ @@ -185,4 +209,45 @@ namespace sharp { } } + /* + Calculate the (left, top) coordinates of the output image + within the input image, applying the given gravity. + */ + std::tuple CalculateCrop(int const inWidth, int const inHeight, + int const outWidth, int const outHeight, int const gravity) { + + int left = 0; + int top = 0; + switch (gravity) { + case 1: // North + left = (inWidth - outWidth + 1) / 2; + break; + case 2: // East + left = inWidth - outWidth; + top = (inHeight - outHeight + 1) / 2; + break; + case 3: // South + left = (inWidth - outWidth + 1) / 2; + top = inHeight - outHeight; + break; + case 4: // West + top = (inHeight - outHeight + 1) / 2; + break; + case 5: // Northeast + left = inWidth - outWidth; + break; + case 6: // Southeast + left = inWidth - outWidth; + top = inHeight - outHeight; + case 7: // Southwest + top = inHeight - outHeight; + case 8: // Northwest + break; + default: // Centre + left = (inWidth - outWidth + 1) / 2; + top = (inHeight - outHeight + 1) / 2; + } + return std::make_tuple(left, top); + } + } // namespace sharp diff --git a/src/common.h b/src/common.h index 083e58c5..be7a2e78 100644 --- a/src/common.h +++ b/src/common.h @@ -2,6 +2,8 @@ #define SRC_COMMON_H_ #include +#include + #include using vips::VImage; @@ -75,11 +77,33 @@ namespace sharp { */ void RemoveExifOrientation(VImage image); + /* + Does this image have a non-default density? + */ + bool HasDensity(VImage image); + + /* + Get pixels/mm resolution as pixels/inch density. + */ + int GetDensity(VImage image); + + /* + Set pixels/mm resolution based on a pixels/inch density. + */ + void SetDensity(VImage image, const int density); + /* Called when a Buffer undergoes GC, required to support mixed runtime libraries in Windows */ void FreeCallback(char* data, void* hint); + /* + Calculate the (left, top) coordinates of the output image + within the input image, applying the given gravity. + */ + std::tuple CalculateCrop(int const inWidth, int const inHeight, + int const outWidth, int const outHeight, int const gravity); + } // namespace sharp #endif // SRC_COMMON_H_ diff --git a/src/metadata.cc b/src/metadata.cc index 196b6f54..f451fedc 100644 --- a/src/metadata.cc +++ b/src/metadata.cc @@ -38,6 +38,8 @@ using sharp::DetermineImageType; using sharp::HasProfile; using sharp::HasAlpha; using sharp::ExifOrientation; +using sharp::HasDensity; +using sharp::GetDensity; using sharp::FreeCallback; using sharp::counterQueue; @@ -52,6 +54,7 @@ struct MetadataBaton { int height; std::string space; int channels; + int density; bool hasProfile; bool hasAlpha; int orientation; @@ -63,6 +66,7 @@ struct MetadataBaton { MetadataBaton(): bufferInLength(0), + density(0), orientation(0), exifLength(0), iccLength(0) {} @@ -120,6 +124,9 @@ class MetadataWorker : public AsyncWorker { baton->height = image.height(); baton->space = vips_enum_nick(VIPS_TYPE_INTERPRETATION, image.interpretation()); baton->channels = image.bands(); + if (HasDensity(image)) { + baton->density = GetDensity(image); + } baton->hasProfile = HasProfile(image); // Derived attributes baton->hasAlpha = HasAlpha(image); @@ -161,6 +168,9 @@ class MetadataWorker : public AsyncWorker { Set(info, New("height").ToLocalChecked(), New(baton->height)); Set(info, New("space").ToLocalChecked(), New(baton->space).ToLocalChecked()); Set(info, New("channels").ToLocalChecked(), New(baton->channels)); + if (baton->density > 0) { + Set(info, New("density").ToLocalChecked(), New(baton->density)); + } Set(info, New("hasProfile").ToLocalChecked(), New(baton->hasProfile)); Set(info, New("hasAlpha").ToLocalChecked(), New(baton->hasAlpha)); if (baton->orientation > 0) { diff --git a/src/operations.cc b/src/operations.cc index 16fdec9e..555f183d 100644 --- a/src/operations.cc +++ b/src/operations.cc @@ -1,37 +1,54 @@ +#include +#include #include #include "common.h" #include "operations.h" using vips::VImage; +using vips::VError; namespace sharp { /* - Alpha composite src over dst - Assumes alpha channels are already premultiplied and will be unpremultiplied after + Alpha composite src over dst with given gravity. + Assumes alpha channels are already premultiplied and will be unpremultiplied after. */ - VImage Composite(VImage src, VImage dst) { + VImage Composite(VImage src, VImage dst, const int gravity) { + using sharp::CalculateCrop; using sharp::HasAlpha; - // Split src into non-alpha and alpha + if (!HasAlpha(src)) { + throw VError("Overlay image must have an alpha channel"); + } + if (!HasAlpha(dst)) { + throw VError("Image to be overlaid must have an alpha channel"); + } + if (src.width() > dst.width() || src.height() > dst.height()) { + throw VError("Overlay image must have same dimensions or smaller"); + } + + // Enlarge overlay src, if required + if (src.width() < dst.width() || src.height() < dst.height()) { + // Calculate the (left, top) coordinates of the output image within the input image, applying the given gravity. + int left; + int top; + std::tie(left, top) = CalculateCrop(dst.width(), dst.height(), src.width(), src.height(), gravity); + // Embed onto transparent background + std::vector background { 0.0, 0.0, 0.0, 0.0 }; + src = src.embed(left, top, dst.width(), dst.height(), VImage::option() + ->set("extend", VIPS_EXTEND_BACKGROUND) + ->set("background", background) + ); + } + + // Split src into non-alpha and alpha channels VImage srcWithoutAlpha = src.extract_band(0, VImage::option()->set("n", src.bands() - 1)); VImage srcAlpha = src[src.bands() - 1] * (1.0 / 255.0); // Split dst into non-alpha and alpha channels - VImage dstWithoutAlpha; - VImage dstAlpha; - if (HasAlpha(dst)) { - // Non-alpha: extract all-but-last channel - dstWithoutAlpha = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1)); - // Alpha: Extract last channel - dstAlpha = dst[dst.bands() - 1] * (1.0 / 255.0); - } else { - // Non-alpha: Copy reference - dstWithoutAlpha = dst; - // Alpha: Use blank, opaque (0xFF) image - dstAlpha = VImage::black(dst.width(), dst.height()).invert(); - } + VImage dstWithoutAlpha = dst.extract_band(0, VImage::option()->set("n", dst.bands() - 1)); + VImage dstAlpha = dst[dst.bands() - 1] * (1.0 / 255.0); // // Compute normalized output alpha channel: @@ -154,4 +171,83 @@ namespace sharp { ); } } + + /* + Calculate crop area based on image entropy + */ + std::tuple EntropyCrop(VImage image, int const outWidth, int const outHeight) { + int left = 0; + int top = 0; + int const inWidth = image.width(); + int const inHeight = image.height(); + if (inWidth > outWidth) { + // Reduce width by repeated removing slices from edge with lowest entropy + int width = inWidth; + double leftEntropy = 0.0; + double rightEntropy = 0.0; + // Max width of each slice + int const maxSliceWidth = static_cast(ceil((inWidth - outWidth) / 8.0)); + while (width > outWidth) { + // Width of current slice + int const slice = std::min(width - outWidth, maxSliceWidth); + if (leftEntropy == 0.0) { + // Update entropy of left slice + leftEntropy = Entropy(image.extract_area(left, 0, slice, inHeight)); + } + if (rightEntropy == 0.0) { + // Update entropy of right slice + rightEntropy = Entropy(image.extract_area(width - slice - 1, 0, slice, inHeight)); + } + // Keep slice with highest entropy + if (leftEntropy >= rightEntropy) { + // Discard right slice + rightEntropy = 0.0; + } else { + // Discard left slice + leftEntropy = 0.0; + left = left + slice; + } + width = width - slice; + } + } + if (inHeight > outHeight) { + // Reduce height by repeated removing slices from edge with lowest entropy + int height = inHeight; + double topEntropy = 0.0; + double bottomEntropy = 0.0; + // Max height of each slice + int const maxSliceHeight = static_cast(ceil((inHeight - outHeight) / 8.0)); + while (height > outHeight) { + // Height of current slice + int const slice = std::min(height - outHeight, maxSliceHeight); + if (topEntropy == 0.0) { + // Update entropy of top slice + topEntropy = Entropy(image.extract_area(0, top, inWidth, slice)); + } + if (bottomEntropy == 0.0) { + // Update entropy of bottom slice + bottomEntropy = Entropy(image.extract_area(0, height - slice - 1, inWidth, slice)); + } + // Keep slice with highest entropy + if (topEntropy >= bottomEntropy) { + // Discard bottom slice + bottomEntropy = 0.0; + } else { + // Discard top slice + topEntropy = 0.0; + top = top + slice; + } + height = height - slice; + } + } + return std::make_tuple(left, top); + } + + /* + Calculate the Shannon entropy for an image + */ + double Entropy(VImage image) { + return image.hist_find().hist_entropy(); + } + } // namespace sharp diff --git a/src/operations.h b/src/operations.h index ba69b25d..48b14834 100644 --- a/src/operations.h +++ b/src/operations.h @@ -1,6 +1,7 @@ #ifndef SRC_OPERATIONS_H_ #define SRC_OPERATIONS_H_ +#include #include using vips::VImage; @@ -8,10 +9,10 @@ using vips::VImage; namespace sharp { /* - Composite images `src` and `dst` with premultiplied alpha channel and output - image with premultiplied alpha. + Alpha composite src over dst with given gravity. + Assumes alpha channels are already premultiplied and will be unpremultiplied after. */ - VImage Composite(VImage src, VImage dst); + VImage Composite(VImage src, VImage dst, const int gravity); /* * Stretch luminance to cover full dynamic range. @@ -32,6 +33,17 @@ namespace sharp { * Sharpen flat and jagged areas. Use radius of -1 for fast sharpen. */ VImage Sharpen(VImage image, int const radius, double const flat, double const jagged); + + /* + Calculate crop area based on image entropy + */ + std::tuple EntropyCrop(VImage image, int const outWidth, int const outHeight); + + /* + Calculate the Shannon entropy for an image + */ + double Entropy(VImage image); + } // namespace sharp #endif // SRC_OPERATIONS_H_ diff --git a/src/pipeline.cc b/src/pipeline.cc index afb19299..41a992c0 100644 --- a/src/pipeline.cc +++ b/src/pipeline.cc @@ -1,10 +1,12 @@ -#include #include -#include #include +#include +#include + +#include + #include #include -#include #include "nan.h" @@ -47,6 +49,7 @@ using sharp::Normalize; using sharp::Gamma; using sharp::Blur; using sharp::Sharpen; +using sharp::EntropyCrop; using sharp::ImageType; using sharp::ImageTypeId; @@ -56,139 +59,29 @@ using sharp::HasAlpha; using sharp::ExifOrientation; using sharp::SetExifOrientation; using sharp::RemoveExifOrientation; +using sharp::SetDensity; using sharp::IsJpeg; using sharp::IsPng; using sharp::IsWebp; using sharp::IsTiff; using sharp::IsDz; using sharp::FreeCallback; +using sharp::CalculateCrop; using sharp::counterProcess; using sharp::counterQueue; -enum class Canvas { - CROP, - EMBED, - MAX, - MIN, - IGNORE_ASPECT -}; - -struct PipelineBaton { - std::string fileIn; - char *bufferIn; - size_t bufferInLength; - std::string iccProfilePath; - int limitInputPixels; - std::string density; - int rawWidth; - int rawHeight; - int rawChannels; - std::string formatOut; - std::string fileOut; - void *bufferOut; - size_t bufferOutLength; - int topOffsetPre; - int leftOffsetPre; - int widthPre; - int heightPre; - int topOffsetPost; - int leftOffsetPost; - int widthPost; - int heightPost; - int width; - int height; - int channels; - Canvas canvas; - int gravity; - std::string interpolator; - double background[4]; - bool flatten; - bool negate; - double blurSigma; - int sharpenRadius; - double sharpenFlat; - double sharpenJagged; - int threshold; - std::string overlayPath; - double gamma; - bool greyscale; - bool normalize; - int angle; - bool rotateBeforePreExtract; - bool flip; - bool flop; - bool progressive; - bool withoutEnlargement; - VipsAccess accessMethod; - int quality; - int compressionLevel; - bool withoutAdaptiveFiltering; - bool withoutChromaSubsampling; - bool trellisQuantisation; - bool overshootDeringing; - bool optimiseScans; - std::string err; - bool withMetadata; - int withMetadataOrientation; - int tileSize; - int tileOverlap; - - PipelineBaton(): - bufferInLength(0), - limitInputPixels(0), - density(""), - rawWidth(0), - rawHeight(0), - rawChannels(0), - formatOut(""), - fileOut(""), - bufferOutLength(0), - topOffsetPre(-1), - topOffsetPost(-1), - channels(0), - canvas(Canvas::CROP), - gravity(0), - flatten(false), - negate(false), - blurSigma(0.0), - sharpenRadius(0), - sharpenFlat(1.0), - sharpenJagged(2.0), - threshold(0), - gamma(0.0), - greyscale(false), - normalize(false), - angle(0), - flip(false), - flop(false), - progressive(false), - withoutEnlargement(false), - quality(80), - compressionLevel(6), - withoutAdaptiveFiltering(false), - withoutChromaSubsampling(false), - trellisQuantisation(false), - overshootDeringing(false), - optimiseScans(false), - withMetadata(false), - withMetadataOrientation(-1), - tileSize(256), - tileOverlap(0) { - background[0] = 0.0; - background[1] = 0.0; - background[2] = 0.0; - background[3] = 255.0; - } -}; - class PipelineWorker : public AsyncWorker { public: - PipelineWorker(Callback *callback, PipelineBaton *baton, Callback *queueListener, const Local &bufferIn) : + PipelineWorker(Callback *callback, PipelineBaton *baton, Callback *queueListener, + const Local &bufferIn, const Local &overlayBufferIn) : AsyncWorker(callback), baton(baton), queueListener(queueListener) { if (baton->bufferInLength > 0) { SaveToPersistent("bufferIn", bufferIn); } + if (baton->overlayBufferInLength > 0) { + SaveToPersistent("overlayBufferIn", overlayBufferIn); + } } ~PipelineWorker() {} @@ -227,9 +120,12 @@ class PipelineWorker : public AsyncWorker { try { VOption *option = VImage::option()->set("access", baton->accessMethod); if (inputImageType == ImageType::MAGICK) { - option->set("density", baton->density.data()); + option->set("density", std::to_string(baton->density).data()); } image = VImage::new_from_buffer(baton->bufferIn, baton->bufferInLength, nullptr, option); + if (inputImageType == ImageType::MAGICK) { + SetDensity(image, baton->density); + } } catch (...) { (baton->err).append("Input buffer has corrupt header"); inputImageType = ImageType::UNKNOWN; @@ -245,9 +141,12 @@ class PipelineWorker : public AsyncWorker { try { VOption *option = VImage::option()->set("access", baton->accessMethod); if (inputImageType == ImageType::MAGICK) { - option->set("density", baton->density.data()); + option->set("density", std::to_string(baton->density).data()); } image = VImage::new_from_file(baton->fileIn.data(), option); + if (inputImageType == ImageType::MAGICK) { + SetDensity(image, baton->density); + } } catch (...) { (baton->err).append("Input file has corrupt header"); inputImageType = ImageType::UNKNOWN; @@ -508,11 +407,19 @@ class PipelineWorker : public AsyncWorker { } } + // Ensure image has an alpha channel when there is an overlay + bool hasOverlay = baton->overlayBufferInLength > 0 || !baton->overlayFileIn.empty(); + if (hasOverlay && !HasAlpha(image)) { + double multiplier = (image.interpretation() == VIPS_INTERPRETATION_RGB16) ? 256.0 : 1.0; + image = image.bandjoin( + VImage::new_matrix(image.width(), image.height()).new_from_image(255 * multiplier) + ); + } + bool shouldAffineTransform = xresidual != 0.0 || yresidual != 0.0; bool shouldBlur = baton->blurSigma != 0.0; bool shouldSharpen = baton->sharpenRadius != 0; bool shouldThreshold = baton->threshold != 0; - bool hasOverlay = !baton->overlayPath.empty(); bool shouldPremultiplyAlpha = HasAlpha(image) && (shouldAffineTransform || shouldBlur || shouldSharpen || hasOverlay); @@ -600,9 +507,15 @@ class PipelineWorker : public AsyncWorker { // Crop/max/min int left; int top; - std::tie(left, top) = CalculateCrop( - image.width(), image.height(), baton->width, baton->height, baton->gravity - ); + if (baton->crop < 9) { + // Gravity-based crop + std::tie(left, top) = CalculateCrop( + image.width(), image.height(), baton->width, baton->height, baton->crop + ); + } else { + // Entropy-based crop + std::tie(left, top) = EntropyCrop(image, baton->width, baton->height); + } int width = std::min(image.width(), baton->width); int height = std::min(image.height(), baton->height); image = image.extract_area(left, top, width, height); @@ -616,6 +529,27 @@ class PipelineWorker : public AsyncWorker { ); } + // Extend edges + if (baton->extendTop > 0 || baton->extendBottom > 0 || baton->extendLeft > 0 || baton->extendRight > 0) { + // Scale up 8-bit values to match 16-bit input image + const double multiplier = (image.interpretation() == VIPS_INTERPRETATION_RGB16) ? 256.0 : 1.0; + // Create background colour + std::vector background { + baton->background[0] * multiplier, + baton->background[1] * multiplier, + baton->background[2] * multiplier + }; + // Add alpha channel to background colour + if (HasAlpha(image)) { + background.push_back(baton->background[3] * multiplier); + } + // Embed + baton->width = image.width() + baton->extendLeft + baton->extendRight; + baton->height = image.height() + baton->extendTop + baton->extendBottom; + image = image.embed(baton->extendLeft, baton->extendTop, baton->width, baton->height, + VImage::option()->set("extend", VIPS_EXTEND_BACKGROUND)->set("background", background)); + } + // Threshold - must happen before blurring, due to the utility of blurring after thresholding if (shouldThreshold) { image = image.colourspace(VIPS_INTERPRETATION_B_W) >= baton->threshold; @@ -634,38 +568,41 @@ class PipelineWorker : public AsyncWorker { // Composite with overlay, if present if (hasOverlay) { VImage overlayImage; - ImageType overlayImageType = DetermineImageType(baton->overlayPath.data()); - if (overlayImageType != ImageType::UNKNOWN) { - overlayImage = VImage::new_from_file( - baton->overlayPath.data(), - VImage::option()->set("access", baton->accessMethod) - ); + ImageType overlayImageType = ImageType::UNKNOWN; + if (baton->overlayBufferInLength > 0) { + // Overlay with image from buffer + overlayImageType = DetermineImageType(baton->overlayBufferIn, baton->overlayBufferInLength); + if (overlayImageType != ImageType::UNKNOWN) { + try { + overlayImage = VImage::new_from_buffer(baton->overlayBufferIn, baton->overlayBufferInLength, + nullptr, VImage::option()->set("access", baton->accessMethod)); + } catch (...) { + (baton->err).append("Overlay buffer has corrupt header"); + overlayImageType = ImageType::UNKNOWN; + } + } else { + (baton->err).append("Overlay buffer contains unsupported image format"); + } } else { - (baton->err).append("Overlay image is of an unsupported image format"); + // Overlay with image from file + overlayImageType = DetermineImageType(baton->overlayFileIn.data()); + if (overlayImageType != ImageType::UNKNOWN) { + try { + overlayImage = VImage::new_from_file(baton->overlayFileIn.data(), + VImage::option()->set("access", baton->accessMethod)); + } catch (...) { + (baton->err).append("Overlay file has corrupt header"); + overlayImageType = ImageType::UNKNOWN; + } + } + } + if (overlayImageType == ImageType::UNKNOWN) { return Error(); } - if (image.format() != VIPS_FORMAT_UCHAR && image.format() != VIPS_FORMAT_FLOAT) { - (baton->err).append("Expected image band format to be uchar or float: "); - (baton->err).append(vips_enum_nick(VIPS_TYPE_BAND_FORMAT, image.format())); - return Error(); - } - if (overlayImage.format() != VIPS_FORMAT_UCHAR && overlayImage.format() != VIPS_FORMAT_FLOAT) { - (baton->err).append("Expected overlay image band format to be uchar or float: "); - (baton->err).append(vips_enum_nick(VIPS_TYPE_BAND_FORMAT, overlayImage.format())); - return Error(); - } - if (!HasAlpha(overlayImage)) { - (baton->err).append("Overlay image must have an alpha channel"); - return Error(); - } - if (overlayImage.width() != image.width() && overlayImage.height() != image.height()) { - (baton->err).append("Overlay image must have same dimensions as resized image"); - return Error(); - } - // Ensure overlay is sRGB and premutiplied + // Ensure overlay is premultiplied sRGB overlayImage = overlayImage.colourspace(VIPS_INTERPRETATION_sRGB).premultiply(); - - image = Composite(overlayImage, image); + // Composite images with given gravity + image = Composite(overlayImage, image, baton->overlayGravity); } // Reverse premultiplication after all transformations: @@ -708,6 +645,9 @@ class PipelineWorker : public AsyncWorker { SetExifOrientation(image, baton->withMetadataOrientation); } + // Number of channels used in output image + baton->channels = image.bands(); + // Output if (baton->fileOut == "") { // Buffer output @@ -728,6 +668,7 @@ class PipelineWorker : public AsyncWorker { area->free_fn = nullptr; vips_area_unref(area); baton->formatOut = "jpeg"; + baton->channels = std::min(baton->channels, 3); } else if (baton->formatOut == "png" || (baton->formatOut == "input" && inputImageType == ImageType::PNG)) { // Write PNG to buffer VipsArea *area = VIPS_AREA(image.pngsave_buffer(VImage::option() @@ -800,6 +741,7 @@ class PipelineWorker : public AsyncWorker { ->set("interlace", baton->progressive) ); baton->formatOut = "jpeg"; + baton->channels = std::min(baton->channels, 3); } else if (baton->formatOut == "png" || isPng || (matchInput && inputImageType == ImageType::PNG)) { // Write PNG to file image.pngsave(const_cast(baton->fileOut.data()), VImage::option() @@ -824,12 +766,14 @@ class PipelineWorker : public AsyncWorker { ->set("compression", VIPS_FOREIGN_TIFF_COMPRESSION_JPEG) ); baton->formatOut = "tiff"; + baton->channels = std::min(baton->channels, 3); } else if (baton->formatOut == "dz" || IsDz(baton->fileOut)) { // Write DZ to file image.dzsave(const_cast(baton->fileOut.data()), VImage::option() ->set("strip", !baton->withMetadata) ->set("tile_size", baton->tileSize) ->set("overlap", baton->tileOverlap) + ->set("layout", baton->tileLayout) ); baton->formatOut = "dz"; } else { @@ -838,8 +782,6 @@ class PipelineWorker : public AsyncWorker { return Error(); } } - // Number of channels used in output image - baton->channels = image.bands(); } catch (VError const &err) { (baton->err).append(err.what()); } @@ -890,10 +832,13 @@ class PipelineWorker : public AsyncWorker { } } - // Dispose of Persistent wrapper around input Buffer so it can be garbage collected + // Dispose of Persistent wrapper around input Buffers so they can be garbage collected if (baton->bufferInLength > 0) { GetFromPersistent("bufferIn"); } + if (baton->overlayBufferInLength > 0) { + GetFromPersistent("overlayBufferIn"); + } delete baton; // Decrement processing task counter @@ -944,46 +889,6 @@ class PipelineWorker : public AsyncWorker { return std::make_tuple(rotate, flip, flop); } - /* - Calculate the (left, top) coordinates of the output image - within the input image, applying the given gravity. - */ - std::tuple - CalculateCrop(int const inWidth, int const inHeight, int const outWidth, int const outHeight, int const gravity) { - int left = 0; - int top = 0; - switch (gravity) { - case 1: // North - left = (inWidth - outWidth + 1) / 2; - break; - case 2: // East - left = inWidth - outWidth; - top = (inHeight - outHeight + 1) / 2; - break; - case 3: // South - left = (inWidth - outWidth + 1) / 2; - top = inHeight - outHeight; - break; - case 4: // West - top = (inHeight - outHeight + 1) / 2; - break; - case 5: // Northeast - left = inWidth - outWidth; - break; - case 6: // Southeast - left = inWidth - outWidth; - top = inHeight - outHeight; - case 7: // Southwest - top = inHeight - outHeight; - case 8: // Northwest - break; - default: // Centre - left = (inWidth - outWidth + 1) / 2; - top = (inHeight - outHeight + 1) / 2; - } - return std::make_tuple(left, top); - } - /* Calculate integral shrink given factor and interpolator window size */ @@ -1052,7 +957,7 @@ NAN_METHOD(pipeline) { // Limit input images to a given number of pixels, where pixels = width * height baton->limitInputPixels = attrAs(options, "limitInputPixels"); // Density/DPI at which to load vector images via libmagick - baton->density = attrAsStr(options, "density"); + baton->density = attrAs(options, "density"); // Raw pixel input baton->rawWidth = attrAs(options, "rawWidth"); baton->rawHeight = attrAs(options, "rawHeight"); @@ -1088,10 +993,17 @@ NAN_METHOD(pipeline) { baton->background[i] = To(Get(background, i).ToLocalChecked()).FromJust(); } // Overlay options - baton->overlayPath = attrAsStr(options, "overlayPath"); + baton->overlayFileIn = attrAsStr(options, "overlayFileIn"); + Local overlayBufferIn; + if (node::Buffer::HasInstance(Get(options, New("overlayBufferIn").ToLocalChecked()).ToLocalChecked())) { + overlayBufferIn = Get(options, New("overlayBufferIn").ToLocalChecked()).ToLocalChecked().As(); + baton->overlayBufferInLength = node::Buffer::Length(overlayBufferIn); + baton->overlayBufferIn = node::Buffer::Data(overlayBufferIn); + } + baton->overlayGravity = attrAs(options, "overlayGravity"); // Resize options baton->withoutEnlargement = attrAs(options, "withoutEnlargement"); - baton->gravity = attrAs(options, "gravity"); + baton->crop = attrAs(options, "crop"); baton->interpolator = attrAsStr(options, "interpolator"); // Operators baton->flatten = attrAs(options, "flatten"); @@ -1108,6 +1020,10 @@ NAN_METHOD(pipeline) { baton->rotateBeforePreExtract = attrAs(options, "rotateBeforePreExtract"); baton->flip = attrAs(options, "flip"); baton->flop = attrAs(options, "flop"); + baton->extendTop = attrAs(options, "extendTop"); + baton->extendBottom = attrAs(options, "extendBottom"); + baton->extendLeft = attrAs(options, "extendLeft"); + baton->extendRight = attrAs(options, "extendRight"); // Output options baton->progressive = attrAs(options, "progressive"); baton->quality = attrAs(options, "quality"); @@ -1122,8 +1038,18 @@ NAN_METHOD(pipeline) { // Output baton->formatOut = attrAsStr(options, "formatOut"); baton->fileOut = attrAsStr(options, "fileOut"); + // Tile output baton->tileSize = attrAs(options, "tileSize"); baton->tileOverlap = attrAs(options, "tileOverlap"); + std::string tileLayout = attrAsStr(options, "tileLayout"); + if (tileLayout == "google") { + baton->tileLayout = VIPS_FOREIGN_DZ_LAYOUT_GOOGLE; + } else if (tileLayout == "zoomify") { + baton->tileLayout = VIPS_FOREIGN_DZ_LAYOUT_ZOOMIFY; + } else { + baton->tileLayout = VIPS_FOREIGN_DZ_LAYOUT_DZ; + } + // Function to notify of queue length changes Callback *queueListener = new Callback( Get(options, New("queueListener").ToLocalChecked()).ToLocalChecked().As() @@ -1131,7 +1057,7 @@ NAN_METHOD(pipeline) { // Join queue for worker thread Callback *callback = new Callback(info[1].As()); - AsyncQueueWorker(new PipelineWorker(callback, baton, queueListener, bufferIn)); + AsyncQueueWorker(new PipelineWorker(callback, baton, queueListener, bufferIn, overlayBufferIn)); // Increment queued task counter g_atomic_int_inc(&counterQueue); diff --git a/src/pipeline.h b/src/pipeline.h index 411ae2c6..eba987e3 100644 --- a/src/pipeline.h +++ b/src/pipeline.h @@ -1,8 +1,141 @@ #ifndef SRC_PIPELINE_H_ #define SRC_PIPELINE_H_ +#include + #include "nan.h" NAN_METHOD(pipeline); +enum class Canvas { + CROP, + EMBED, + MAX, + MIN, + IGNORE_ASPECT +}; + +struct PipelineBaton { + std::string fileIn; + char *bufferIn; + size_t bufferInLength; + std::string iccProfilePath; + int limitInputPixels; + int density; + int rawWidth; + int rawHeight; + int rawChannels; + std::string formatOut; + std::string fileOut; + void *bufferOut; + size_t bufferOutLength; + std::string overlayFileIn; + char *overlayBufferIn; + size_t overlayBufferInLength; + int overlayGravity; + int topOffsetPre; + int leftOffsetPre; + int widthPre; + int heightPre; + int topOffsetPost; + int leftOffsetPost; + int widthPost; + int heightPost; + int width; + int height; + int channels; + Canvas canvas; + int crop; + std::string interpolator; + double background[4]; + bool flatten; + bool negate; + double blurSigma; + int sharpenRadius; + double sharpenFlat; + double sharpenJagged; + int threshold; + double gamma; + bool greyscale; + bool normalize; + int angle; + bool rotateBeforePreExtract; + bool flip; + bool flop; + int extendTop; + int extendBottom; + int extendLeft; + int extendRight; + bool progressive; + bool withoutEnlargement; + VipsAccess accessMethod; + int quality; + int compressionLevel; + bool withoutAdaptiveFiltering; + bool withoutChromaSubsampling; + bool trellisQuantisation; + bool overshootDeringing; + bool optimiseScans; + std::string err; + bool withMetadata; + int withMetadataOrientation; + int tileSize; + int tileOverlap; + VipsForeignDzLayout tileLayout; + + PipelineBaton(): + bufferInLength(0), + limitInputPixels(0), + density(72), + rawWidth(0), + rawHeight(0), + rawChannels(0), + formatOut(""), + fileOut(""), + bufferOutLength(0), + overlayBufferInLength(0), + overlayGravity(0), + topOffsetPre(-1), + topOffsetPost(-1), + channels(0), + canvas(Canvas::CROP), + crop(0), + flatten(false), + negate(false), + blurSigma(0.0), + sharpenRadius(0), + sharpenFlat(1.0), + sharpenJagged(2.0), + threshold(0), + gamma(0.0), + greyscale(false), + normalize(false), + angle(0), + flip(false), + flop(false), + extendTop(0), + extendBottom(0), + extendLeft(0), + extendRight(0), + progressive(false), + withoutEnlargement(false), + quality(80), + compressionLevel(6), + withoutAdaptiveFiltering(false), + withoutChromaSubsampling(false), + trellisQuantisation(false), + overshootDeringing(false), + optimiseScans(false), + withMetadata(false), + withMetadataOrientation(-1), + tileSize(256), + tileOverlap(0), + tileLayout(VIPS_FOREIGN_DZ_LAYOUT_DZ) { + background[0] = 0.0; + background[1] = 0.0; + background[2] = 0.0; + background[3] = 255.0; + } +}; + #endif // SRC_PIPELINE_H_ diff --git a/test/fixtures/expected/crop-entropy.jpg b/test/fixtures/expected/crop-entropy.jpg new file mode 100644 index 00000000..c1c08061 Binary files /dev/null and b/test/fixtures/expected/crop-entropy.jpg differ diff --git a/test/fixtures/expected/crop-entropy.png b/test/fixtures/expected/crop-entropy.png new file mode 100644 index 00000000..1c8a3780 Binary files /dev/null and b/test/fixtures/expected/crop-entropy.png differ diff --git a/test/fixtures/expected/extend-equal.jpg b/test/fixtures/expected/extend-equal.jpg new file mode 100644 index 00000000..800f32e9 Binary files /dev/null and b/test/fixtures/expected/extend-equal.jpg differ diff --git a/test/fixtures/expected/extend-unequal.png b/test/fixtures/expected/extend-unequal.png new file mode 100644 index 00000000..1454f016 Binary files /dev/null and b/test/fixtures/expected/extend-unequal.png differ diff --git a/test/fixtures/expected/overlay-gravity-center.jpg b/test/fixtures/expected/overlay-gravity-center.jpg new file mode 100644 index 00000000..8b65ebcf Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-center.jpg differ diff --git a/test/fixtures/expected/overlay-gravity-centre.jpg b/test/fixtures/expected/overlay-gravity-centre.jpg new file mode 100644 index 00000000..8b65ebcf Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-centre.jpg differ diff --git a/test/fixtures/expected/overlay-gravity-east.jpg b/test/fixtures/expected/overlay-gravity-east.jpg new file mode 100644 index 00000000..756a82c1 Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-east.jpg differ diff --git a/test/fixtures/expected/overlay-gravity-north.jpg b/test/fixtures/expected/overlay-gravity-north.jpg new file mode 100644 index 00000000..b61fc837 Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-north.jpg differ diff --git a/test/fixtures/expected/overlay-gravity-northeast.jpg b/test/fixtures/expected/overlay-gravity-northeast.jpg new file mode 100644 index 00000000..9523792b Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-northeast.jpg differ diff --git a/test/fixtures/expected/overlay-gravity-northwest.jpg b/test/fixtures/expected/overlay-gravity-northwest.jpg new file mode 100644 index 00000000..030e0b12 Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-northwest.jpg differ diff --git a/test/fixtures/expected/overlay-gravity-south.jpg b/test/fixtures/expected/overlay-gravity-south.jpg new file mode 100644 index 00000000..96cd451d Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-south.jpg differ diff --git a/test/fixtures/expected/overlay-gravity-southeast.jpg b/test/fixtures/expected/overlay-gravity-southeast.jpg new file mode 100644 index 00000000..852292d8 Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-southeast.jpg differ diff --git a/test/fixtures/expected/overlay-gravity-southwest.jpg b/test/fixtures/expected/overlay-gravity-southwest.jpg new file mode 100644 index 00000000..8876c9fd Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-southwest.jpg differ diff --git a/test/fixtures/expected/overlay-gravity-west.jpg b/test/fixtures/expected/overlay-gravity-west.jpg new file mode 100644 index 00000000..1495500c Binary files /dev/null and b/test/fixtures/expected/overlay-gravity-west.jpg differ diff --git a/test/leak/sharp.supp b/test/leak/sharp.supp index bf224789..680ca215 100644 --- a/test/leak/sharp.supp +++ b/test/leak/sharp.supp @@ -248,6 +248,34 @@ ... fun:_ZN2v88internal12_GLOBAL__N_117CreateICUCollatorEPNS0_7IsolateERKN6icu_556LocaleENS0_6HandleINS0_8JSObjectEEE } +{ + leak_v8_CallInterfaceDescriptorData + Memcheck:Leak + match-leak-kinds: possible + ... + fun:_ZN2v88internal27CallInterfaceDescriptorData26InitializePlatformSpecificEiPNS0_8RegisterEPNS0_27PlatformInterfaceDescriptorE +} +{ + leak_v8_InitializePlatformSpecific14 + Memcheck:Leak + match-leak-kinds: possible + ... + fun:_ZN2v88internal14LoadDescriptor26InitializePlatformSpecificEPNS0_27CallInterfaceDescriptorDataE +} +{ + leak_v8_InitializePlatformSpecific15 + Memcheck:Leak + match-leak-kinds: possible + ... + fun:_ZN2v88internal15StoreDescriptor26InitializePlatformSpecificEPNS0_27CallInterfaceDescriptorDataE +} +{ + leak_v8_Malloced + Memcheck:Leak + match-leak-kinds: possible + ... + fun:_ZN2v88internal8Malloced3NewEm +} # vips__init warnings { @@ -279,7 +307,7 @@ fun:vips__magick_read_header } { - cond_magick_is_palette_image + cond_magick_is_palette_image_get_bands Memcheck:Cond fun:IsPaletteImage ... @@ -292,6 +320,13 @@ ... fun:get_bands } +{ + cond_magick_is_palette_image_parse_header + Memcheck:Cond + fun:IsPaletteImage + ... + fun:parse_header +} # glib g_file_read_link # https://github.com/GNOME/glib/commit/49a5d0f6f2aed99cd78f25655f137f4448e47d92 diff --git a/test/unit/crop.js b/test/unit/crop.js index a3228166..5463ea37 100644 --- a/test/unit/crop.js +++ b/test/unit/crop.js @@ -5,9 +5,9 @@ var assert = require('assert'); var sharp = require('../../index'); var fixtures = require('../fixtures'); -describe('Crop gravities', function() { +describe('Crop', function() { - var testSettings = [ + [ { name: 'North', width: 320, @@ -50,6 +50,13 @@ describe('Crop gravities', function() { gravity: sharp.gravity.centre, fixture: 'gravity-centre.jpg' }, + { + name: 'Default (centre)', + width: 80, + height: 320, + gravity: undefined, + fixture: 'gravity-centre.jpg' + }, { name: 'Northeast', width: 320, @@ -106,10 +113,8 @@ describe('Crop gravities', function() { gravity: sharp.gravity.northwest, fixture: 'gravity-west.jpg' } - ]; - - testSettings.forEach(function(settings) { - it(settings.name, function(done) { + ].forEach(function(settings) { + it(settings.name + ' gravity', function(done) { sharp(fixtures.inputJpg) .resize(settings.width, settings.height) .crop(settings.gravity) @@ -122,7 +127,7 @@ describe('Crop gravities', function() { }); }); - it('allows specifying the gravity as a string', function(done) { + it('Allows specifying the gravity as a string', function(done) { sharp(fixtures.inputJpg) .resize(80, 320) .crop('east') @@ -134,36 +139,57 @@ describe('Crop gravities', function() { }); }); - it('Invalid number', function() { + it('Invalid values fail', function() { assert.throws(function() { - sharp(fixtures.inputJpg).crop(9); + sharp().crop(9); + }); + assert.throws(function() { + sharp().crop(1.1); + }); + assert.throws(function() { + sharp().crop(-1); + }); + assert.throws(function() { + sharp().crop('zoinks'); }); }); - it('Invalid string', function() { - assert.throws(function() { - sharp(fixtures.inputJpg).crop('yadda'); - }); - }); - - it('does not throw if crop gravity is undefined', function() { + it('Uses default value when none specified', function() { assert.doesNotThrow(function() { - sharp(fixtures.inputJpg).crop(); + sharp().crop(); }); }); - it('defaults crop gravity to sharp.gravity.center', function(done) { - var centerGravitySettings = testSettings.filter(function (settings) { - return settings.name === 'Center'; - })[0]; - sharp(fixtures.inputJpg) - .resize(centerGravitySettings.width, centerGravitySettings.height) - .crop() - .toBuffer(function(err, data, info) { - if (err) throw err; - assert.strictEqual(centerGravitySettings.width, info.width); - assert.strictEqual(centerGravitySettings.height, info.height); - fixtures.assertSimilar(fixtures.expected(centerGravitySettings.fixture), data, done); - }); + describe('Entropy-based strategy', function() { + + it('JPEG', function(done) { + sharp(fixtures.inputJpgWithCmykProfile) + .resize(80, 320) + .crop(sharp.strategy.entropy) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual('jpeg', info.format); + assert.strictEqual(3, info.channels); + assert.strictEqual(80, info.width); + assert.strictEqual(320, info.height); + fixtures.assertSimilar(fixtures.expected('crop-entropy.jpg'), data, done); + }); + }); + + it('PNG', function(done) { + sharp(fixtures.inputPngWithTransparency) + .resize(320, 80) + .crop(sharp.strategy.entropy) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual('png', info.format); + assert.strictEqual(4, info.channels); + assert.strictEqual(320, info.width); + assert.strictEqual(80, info.height); + fixtures.assertSimilar(fixtures.expected('crop-entropy.png'), data, done); + }); + }); + }); + }); diff --git a/test/unit/extend.js b/test/unit/extend.js new file mode 100644 index 00000000..2232512d --- /dev/null +++ b/test/unit/extend.js @@ -0,0 +1,52 @@ +'use strict'; + +var assert = require('assert'); + +var sharp = require('../../index'); +var fixtures = require('../fixtures'); + +describe('Extend', function () { + + it('extend all sides equally with RGB', function(done) { + sharp(fixtures.inputJpg) + .resize(120) + .background({r: 255, g: 0, b: 0}) + .extend(10) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(140, info.width); + assert.strictEqual(118, info.height); + fixtures.assertSimilar(fixtures.expected('extend-equal.jpg'), data, done); + }); + }); + + it('extend sides unequally with RGBA', function(done) { + sharp(fixtures.inputPngWithTransparency16bit) + .resize(120) + .background({r: 0, g: 0, b: 0, a: 0}) + .extend({top: 50, bottom: 0, left: 10, right: 35}) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual(165, info.width); + assert.strictEqual(170, info.height); + fixtures.assertSimilar(fixtures.expected('extend-unequal.png'), data, done); + }); + }); + + it('missing parameter fails', function() { + assert.throws(function() { + sharp().extend(); + }); + }); + it('negative fails', function() { + assert.throws(function() { + sharp().extend(-1); + }); + }); + it('partial object fails', function() { + assert.throws(function() { + sharp().extend({top: 1}); + }); + }); + +}); diff --git a/test/unit/extract.js b/test/unit/extract.js index 2191c618..f49c8e92 100644 --- a/test/unit/extract.js +++ b/test/unit/extract.js @@ -6,29 +6,6 @@ var sharp = require('../../index'); var fixtures = require('../fixtures'); describe('Partial image extraction', function() { - describe('using the legacy extract(top,left,width,height) syntax', function () { - it('JPEG', function(done) { - sharp(fixtures.inputJpg) - .extract(2, 2, 20, 20) - .toBuffer(function(err, data, info) { - if (err) throw err; - assert.strictEqual(20, info.width); - assert.strictEqual(20, info.height); - fixtures.assertSimilar(fixtures.expected('extract.jpg'), data, done); - }); - }); - - it('PNG', function(done) { - sharp(fixtures.inputPng) - .extract(300, 200, 400, 200) - .toBuffer(function(err, data, info) { - if (err) throw err; - assert.strictEqual(400, info.width); - assert.strictEqual(200, info.height); - fixtures.assertSimilar(fixtures.expected('extract.png'), data, done); - }); - }); - }); it('JPEG', function(done) { sharp(fixtures.inputJpg) diff --git a/test/unit/io.js b/test/unit/io.js index 4ae2de36..923b4d9e 100644 --- a/test/unit/io.js +++ b/test/unit/io.js @@ -109,6 +109,26 @@ describe('Input/output', function() { readable.pipe(pipeline).pipe(writable); }); + it('Stream should emit info event', function(done) { + var readable = fs.createReadStream(fixtures.inputJpg); + var writable = fs.createWriteStream(fixtures.outputJpg); + var pipeline = sharp().resize(320, 240); + var infoEventEmitted = false; + pipeline.on('info', function(info) { + assert.strictEqual('jpeg', info.format); + assert.strictEqual(320, info.width); + assert.strictEqual(240, info.height); + assert.strictEqual(3, info.channels); + infoEventEmitted = true; + }); + writable.on('finish', function() { + assert.strictEqual(true, infoEventEmitted); + fs.unlinkSync(fixtures.outputJpg); + done(); + }); + readable.pipe(pipeline).pipe(writable); + }); + it('Handle Stream to Stream error ', function(done) { var pipeline = sharp().resize(320, 240); var anErrorWasEmitted = false; @@ -662,7 +682,14 @@ describe('Input/output', function() { assert.strictEqual('png', info.format); assert.strictEqual(40, info.width); assert.strictEqual(40, info.height); - fixtures.assertSimilar(fixtures.expected('svg72.png'), data, done); + fixtures.assertSimilar(fixtures.expected('svg72.png'), data, function(err) { + if (err) throw err; + sharp(data).metadata(function(err, info) { + if (err) throw err; + assert.strictEqual(72, info.density); + done(); + }); + }); } }); }); @@ -679,7 +706,14 @@ describe('Input/output', function() { assert.strictEqual('png', info.format); assert.strictEqual(40, info.width); assert.strictEqual(40, info.height); - fixtures.assertSimilar(fixtures.expected('svg1200.png'), data, done); + fixtures.assertSimilar(fixtures.expected('svg1200.png'), data, function(err) { + if (err) throw err; + sharp(data).metadata(function(err, info) { + if (err) throw err; + assert.strictEqual(1200, info.density); + done(); + }); + }); } }); }); diff --git a/test/unit/metadata.js b/test/unit/metadata.js index 26e48fca..05f9503a 100644 --- a/test/unit/metadata.js +++ b/test/unit/metadata.js @@ -18,6 +18,7 @@ describe('Image metadata', function() { assert.strictEqual(2225, metadata.height); assert.strictEqual('srgb', metadata.space); assert.strictEqual(3, metadata.channels); + assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); @@ -35,6 +36,7 @@ describe('Image metadata', function() { assert.strictEqual(600, metadata.height); assert.strictEqual('srgb', metadata.space); assert.strictEqual(3, metadata.channels); + assert.strictEqual(72, metadata.density); assert.strictEqual(true, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual(8, metadata.orientation); @@ -64,6 +66,7 @@ describe('Image metadata', function() { assert.strictEqual(3248, metadata.height); assert.strictEqual('b-w', metadata.space); assert.strictEqual(1, metadata.channels); + assert.strictEqual(300, metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); @@ -82,6 +85,7 @@ describe('Image metadata', function() { assert.strictEqual(2074, metadata.height); assert.strictEqual('b-w', metadata.space); assert.strictEqual(1, metadata.channels); + assert.strictEqual(300, metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); @@ -99,6 +103,7 @@ describe('Image metadata', function() { assert.strictEqual(1536, metadata.height); assert.strictEqual('srgb', metadata.space); assert.strictEqual(4, metadata.channels); + assert.strictEqual(72, metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(true, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); @@ -117,6 +122,7 @@ describe('Image metadata', function() { assert.strictEqual(772, metadata.height); assert.strictEqual('srgb', metadata.space); assert.strictEqual(3, metadata.channels); + assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); @@ -135,6 +141,7 @@ describe('Image metadata', function() { assert.strictEqual(800, metadata.width); assert.strictEqual(533, metadata.height); assert.strictEqual(3, metadata.channels); + assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); @@ -153,6 +160,7 @@ describe('Image metadata', function() { assert.strictEqual(2220, metadata.width); assert.strictEqual(2967, metadata.height); assert.strictEqual(4, metadata.channels); + assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual('rgb', metadata.space); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(true, metadata.hasAlpha); @@ -171,6 +179,7 @@ describe('Image metadata', function() { assert.strictEqual(2225, metadata.height); assert.strictEqual('srgb', metadata.space); assert.strictEqual(3, metadata.channels); + assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); @@ -198,6 +207,7 @@ describe('Image metadata', function() { assert.strictEqual(2225, metadata.height); assert.strictEqual('srgb', metadata.space); assert.strictEqual(3, metadata.channels); + assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); @@ -219,6 +229,7 @@ describe('Image metadata', function() { assert.strictEqual(2225, metadata.height); assert.strictEqual('srgb', metadata.space); assert.strictEqual(3, metadata.channels); + assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); @@ -238,6 +249,7 @@ describe('Image metadata', function() { assert.strictEqual(2225, metadata.height); assert.strictEqual('srgb', metadata.space); assert.strictEqual(3, metadata.channels); + assert.strictEqual('undefined', typeof metadata.density); assert.strictEqual(false, metadata.hasProfile); assert.strictEqual(false, metadata.hasAlpha); assert.strictEqual('undefined', typeof metadata.orientation); diff --git a/test/unit/overlay.js b/test/unit/overlay.js index b0a94896..36a6f2a2 100644 --- a/test/unit/overlay.js +++ b/test/unit/overlay.js @@ -1,5 +1,6 @@ 'use strict'; +var fs = require('fs'); var assert = require('assert'); var fixtures = require('../fixtures'); var sharp = require('../../index'); @@ -17,7 +18,7 @@ var getPaths = function(baseName, extension) { // Test describe('Overlays', function() { - it('Overlay transparent PNG on solid background', function(done) { + it('Overlay transparent PNG file on solid background', function(done) { var paths = getPaths('alpha-layer-01'); sharp(fixtures.inputPngOverlayLayer0) @@ -29,6 +30,18 @@ describe('Overlays', function() { }); }); + it('Overlay transparent PNG Buffer on solid background', function(done) { + var paths = getPaths('alpha-layer-01'); + + sharp(fixtures.inputPngOverlayLayer0) + .overlayWith(fs.readFileSync(fixtures.inputPngOverlayLayer1)) + .toFile(paths.actual, function (error) { + if (error) return done(error); + fixtures.assertMaxColourDistance(paths.actual, paths.expected); + done(); + }); + }); + it('Overlay low-alpha transparent PNG on solid background', function(done) { var paths = getPaths('alpha-layer-01-low-alpha'); @@ -141,18 +154,19 @@ describe('Overlays', function() { }); } - it('Fail when compositing images with different dimensions', function(done) { - sharp(fixtures.inputJpg) - .overlayWith(fixtures.inputPngWithGreyAlpha) + it('Fail when overlay does not contain alpha channel', function(done) { + sharp(fixtures.inputPngOverlayLayer1) + .overlayWith(fixtures.inputJpg) .toBuffer(function(error) { assert.strictEqual(true, error instanceof Error); done(); }); }); - it('Fail when compositing non-PNG image', function(done) { - sharp(fixtures.inputPngOverlayLayer1) - .overlayWith(fixtures.inputJpg) + it('Fail when overlay is larger', function(done) { + sharp(fixtures.inputJpg) + .resize(320) + .overlayWith(fixtures.inputPngOverlayLayer1) .toBuffer(function(error) { assert.strictEqual(true, error instanceof Error); done(); @@ -170,4 +184,62 @@ describe('Overlays', function() { sharp().overlayWith(1); }); }); + + it('Fail with unsupported gravity', function() { + assert.throws(function() { + sharp() + .overlayWith(fixtures.inputPngOverlayLayer1, { + gravity: 9 + }); + }); + }); + + it('Empty options', function() { + assert.doesNotThrow(function() { + sharp().overlayWith(fixtures.inputPngOverlayLayer1, {}); + }); + }); + + describe('Overlay with numeric gravity', function() { + Object.keys(sharp.gravity).forEach(function(gravity) { + it(gravity, function(done) { + var expected = fixtures.expected('overlay-gravity-' + gravity + '.jpg'); + sharp(fixtures.inputJpg) + .resize(80) + .overlayWith(fixtures.inputPngWithTransparency16bit, { + gravity: gravity + }) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual('jpeg', info.format); + assert.strictEqual(80, info.width); + assert.strictEqual(65, info.height); + assert.strictEqual(3, info.channels); + fixtures.assertSimilar(expected, data, done); + }); + }); + }); + }); + + describe('Overlay with string-based gravity', function() { + Object.keys(sharp.gravity).forEach(function(gravity) { + it(gravity, function(done) { + var expected = fixtures.expected('overlay-gravity-' + gravity + '.jpg'); + sharp(fixtures.inputJpg) + .resize(80) + .overlayWith(fixtures.inputPngWithTransparency16bit, { + gravity: sharp.gravity[gravity] + }) + .toBuffer(function(err, data, info) { + if (err) throw err; + assert.strictEqual('jpeg', info.format); + assert.strictEqual(80, info.width); + assert.strictEqual(65, info.height); + assert.strictEqual(3, info.channels); + fixtures.assertSimilar(expected, data, done); + }); + }); + }); + }); + }); diff --git a/test/unit/tile.js b/test/unit/tile.js index 6b0c797e..f0dd044c 100644 --- a/test/unit/tile.js +++ b/test/unit/tile.js @@ -47,156 +47,149 @@ var assertDeepZoomTiles = function(directory, expectedSize, expectedLevels, done describe('Tile', function() { - describe('Invalid tile values', function() { - it('size - NaN', function(done) { - var isValid = true; - try { - sharp().tile('zoinks'); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); + it('Valid size values pass', function() { + [1, 8192].forEach(function(size) { + assert.doesNotThrow(function() { + sharp().tile({ + size: size + }); + }); }); + }); - it('size - float', function(done) { - var isValid = true; - try { - sharp().tile(1.1); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); + it('Invalid size values fail', function() { + ['zoinks', 1.1, -1, 0, 8193].forEach(function(size) { + assert.throws(function() { + sharp().tile({ + size: size + }); + }); }); + }); - it('size - negative', function(done) { - var isValid = true; - try { - sharp().tile(-1); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); + it('Valid overlap values pass', function() { + [0, 8192].forEach(function(overlap) { + assert.doesNotThrow(function() { + sharp().tile({ + size: 8192, + overlap: overlap + }); + }); }); + }); - it('size - zero', function(done) { - var isValid = true; - try { - sharp().tile(0); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); + it('Invalid overlap values fail', function() { + ['zoinks', 1.1, -1, 8193].forEach(function(overlap) { + assert.throws(function() { + sharp().tile({ + overlap: overlap + }); + }); }); + }); - it('size - too large', function(done) { - var isValid = true; - try { - sharp().tile(8193); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); + it('Valid layout values pass', function() { + ['dz', 'google', 'zoomify'].forEach(function(layout) { + assert.doesNotThrow(function() { + sharp().tile({ + layout: layout + }); + }); }); + }); - it('overlap - NaN', function(done) { - var isValid = true; - try { - sharp().tile(null, 'zoinks'); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); + it('Invalid layout values fail', function() { + ['zoinks', 1].forEach(function(layout) { + assert.throws(function() { + sharp().tile({ + layout: layout + }); + }); }); + }); - it('overlap - float', function(done) { - var isValid = true; - try { - sharp().tile(null, 1.1); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); + it('Prevent larger overlap than default size', function() { + assert.throws(function() { + sharp().tile({overlap: 257}); }); + }); - it('overlap - negative', function(done) { - var isValid = true; - try { - sharp().tile(null, -1); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); + it('Prevent larger overlap than provided size', function() { + assert.throws(function() { + sharp().tile({size: 512, overlap: 513}); }); - - it('overlap - too large', function(done) { - var isValid = true; - try { - sharp().tile(null, 8193); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); - }); - - it('overlap - larger than default size', function(done) { - var isValid = true; - try { - sharp().tile(null, 257); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); - }); - - it('overlap - larger than provided size', function(done) { - var isValid = true; - try { - sharp().tile(512, 513); - } catch (err) { - isValid = false; - } - assert.strictEqual(false, isValid); - done(); - }); - }); if (sharp.format.dz.output.file) { - describe('Deep Zoom output', function() { - it('Tile size - 256px default', function(done) { - var directory = fixtures.path('output.256_files'); - rimraf(directory, function() { - sharp(fixtures.inputJpg).toFile(fixtures.path('output.256.dzi'), function(err, info) { + it('Deep Zoom layout', function(done) { + var directory = fixtures.path('output.dz_files'); + rimraf(directory, function() { + sharp(fixtures.inputJpg) + .toFile(fixtures.path('output.dz.dzi'), function(err, info) { if (err) throw err; assert.strictEqual('dz', info.format); assertDeepZoomTiles(directory, 256, 13, done); }); - }); }); + }); - it('Tile size/overlap - 512/16px', function(done) { - var directory = fixtures.path('output.512_files'); - rimraf(directory, function() { - sharp(fixtures.inputJpg).tile(512, 16).toFile(fixtures.path('output.512.dzi'), function(err, info) { + it('Deep Zoom layout with custom size+overlap', function(done) { + var directory = fixtures.path('output.dz.512_files'); + rimraf(directory, function() { + sharp(fixtures.inputJpg) + .tile({ + size: 512, + overlap: 16 + }) + .toFile(fixtures.path('output.dz.512.dzi'), function(err, info) { if (err) throw err; assert.strictEqual('dz', info.format); assertDeepZoomTiles(directory, 512 + 2 * 16, 13, done); }); - }); }); - }); + + it('Zoomify layout', function(done) { + var directory = fixtures.path('output.zoomify'); + rimraf(directory, function() { + sharp(fixtures.inputJpg) + .tile({ + layout: 'zoomify' + }) + .toFile(fixtures.path('output.zoomify.dzi'), function(err, info) { + if (err) throw err; + assert.strictEqual('dz', info.format); + fs.stat(path.join(directory, 'ImageProperties.xml'), function(err, stat) { + if (err) throw err; + assert.strictEqual(true, stat.isFile()); + assert.strictEqual(true, stat.size > 0); + done(); + }); + }); + }); + }); + + it('Google layout', function(done) { + var directory = fixtures.path('output.google'); + rimraf(directory, function() { + sharp(fixtures.inputJpg) + .tile({ + layout: 'google' + }) + .toFile(fixtures.path('output.google.dzi'), function(err, info) { + if (err) throw err; + assert.strictEqual('dz', info.format); + fs.stat(path.join(directory, '0', '0', '0.jpg'), function(err, stat) { + if (err) throw err; + assert.strictEqual(true, stat.isFile()); + assert.strictEqual(true, stat.size > 0); + done(); + }); + }); + }); + }); + } });