Test: remove saliency directory

These scripts were originally used to help determine
some of the logic that is now part of the 'attention'
crop strategy in libvips itself.
This commit is contained in:
Lovell Fuller 2023-09-26 21:22:50 +01:00
parent 8f63d131a4
commit 854ed65016
8 changed files with 0 additions and 307 deletions

View File

@ -1,16 +0,0 @@
# Crop strategy accuracy
1. Download the [MSRA Salient Object Database](http://research.microsoft.com/en-us/um/people/jiansun/SalientObject/salient_object.htm) (101MB).
2. Extract each image and its median human-labelled salient region.
3. Generate a test report of percentage deviance of top and left edges for each crop strategy, plus a naive centre gravity crop as "control".
```sh
git clone https://github.com/lovell/sharp.git
cd sharp/test/saliency
./download.sh
node report.js
python -m SimpleHTTPServer
```
The test report will then be available at
http://localhost:8000/report.html

View File

@ -1,25 +0,0 @@
#!/bin/sh
# Fetch and parse the MSRA Salient Object Database 'Image set B'
# http://research.microsoft.com/en-us/um/people/jiansun/salientobject/salient_object.htm
if [ ! -d Image ]; then
if [ ! -f ImageB.zip ]; then
echo "Downloading 5000 images (101MB)"
curl -O http://research.microsoft.com/en-us/um/people/jiansun/salientobject/ImageSetB/ImageB.zip
fi
unzip ImageB.zip
fi
if [ ! -d UserData ]; then
if [ ! -f UserDataB.zip ]; then
echo "Downloading human-labelled regions"
curl -O http://research.microsoft.com/en-us/um/people/jiansun/salientobject/ImageSetB/UserDataB.zip
fi
unzip UserDataB.zip
fi
if [ ! -f userData.json ]; then
echo "Processing human-labelled regions"
node userData.js
fi

View File

@ -1,40 +0,0 @@
// Copyright 2013 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
'use strict';
const fs = require('fs');
const request = require('request');
const tumblr = require('tumblr.js');
const client = tumblr.createClient({
consumer_key: '***',
consumer_secret: '***'
});
const fetchImages = function (offset) {
console.log(`Fetching offset ${offset}`);
client.posts('humanae', {
type: 'photo',
offset: offset
}, function (err, response) {
if (err) throw err;
if (response.posts.length > 0) {
response.posts.forEach((post) => {
const url = post.photos[0].alt_sizes
.filter((image) => image.width === 100)
.map((image) => image.url)[0];
const filename = `./images/${post.id}.jpg`;
try {
fs.statSync(filename);
} catch (err) {
if (err.code === 'ENOENT') {
request(url).pipe(fs.createWriteStream(filename));
}
}
});
fetchImages(offset + 20);
}
});
};
fetchImages(0);

View File

@ -1,9 +0,0 @@
{
"name": "sharp-crop-strategy-attention-model-humanae",
"version": "0.0.1",
"private": true,
"dependencies": {
"request": "^2.75.0",
"tumblr.js": "^1.1.1"
}
}

View File

@ -1,36 +0,0 @@
// Copyright 2013 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
'use strict';
const fs = require('fs');
const childProcess = require('child_process');
const a = [];
const b = [];
fs.readdirSync('./images')
.filter((file) => file.endsWith('.jpg'))
.forEach((file) => {
// Extract one pixel, avoiding first DCT block, and return value of A and B channels
const command = `convert ./images/${file}[1x1+8+8] -colorspace lab -format "%[fx:u.g] %[fx:u.b]" info:`;
const result = childProcess.execSync(command, { encoding: 'utf8' });
const ab = result.split(' ');
a.push(ab[0]);
b.push(ab[1]);
});
a.sort((v1, v2) => v1 - v2);
b.sort((v1, v2) => v1 - v2);
// Convert from 0..1 to -128..128
const convert = function (v) {
return Math.round(256 * (v - 0.5));
};
const threshold = Math.round(a.length / 100);
console.log(`Trimming lowest/highest ${threshold} for 98th percentile`);
// Ignore ~2% outliers
console.log(`a ${convert(a[threshold])} - ${convert(a[a.length - threshold])}`);
console.log(`b ${convert(b[threshold])} - ${convert(b[b.length - threshold])}`);

View File

@ -1,25 +0,0 @@
<html>
<head>
<link href="https://cdnjs.cloudflare.com/ajax/libs/metrics-graphics/2.10.1/metricsgraphics.min.css" rel="stylesheet" type="text/css">
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/4.2.6/d3.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/metrics-graphics/2.10.1/metricsgraphics.min.js"></script>
</head>
<body>
<div id="accuracy"></div>
<script>
d3.json('report.json', function(err, data) {
MG.data_graphic({
title: 'Crop accuracy',
data: data,
target: '#accuracy',
width: 960,
height: 600,
x_accessor: 'accuracy',
x_label: '% Accuracy',
y_accessor: ['centre', 'entropy', 'attention'],
legend: ['Centre', 'Entropy', 'Attention']
});
});
</script>
</body>
</html>

View File

@ -1,82 +0,0 @@
// Copyright 2013 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
'use strict';
const os = require('os');
const fs = require('fs');
const path = require('path');
const async = require('async');
const sharp = require('../../');
const crops = {
entropy: sharp.strategy.entropy,
attention: sharp.strategy.attention
};
const concurrency = os.cpus().length;
const scores = {};
const incrementScore = function (accuracy, crop) {
if (typeof scores[accuracy] === 'undefined') {
scores[accuracy] = {};
}
if (typeof scores[accuracy][crop] === 'undefined') {
scores[accuracy][crop] = 0;
}
scores[accuracy][crop]++;
};
const userData = require('./userData.json');
const files = Object.keys(userData);
async.eachLimit(files, concurrency, function (file, done) {
const filename = path.join(__dirname, 'Image', file);
const salientWidth = userData[file].right - userData[file].left;
const salientHeight = userData[file].bottom - userData[file].top;
sharp(filename).metadata(function (err, metadata) {
if (err) console.log(err);
const marginWidth = metadata.width - salientWidth;
const marginHeight = metadata.height - salientHeight;
async.each(Object.keys(crops), function (crop, done) {
async.parallel([
// Left edge accuracy
function (done) {
if (marginWidth) {
sharp(filename).resize(salientWidth, metadata.height).crop(crops[crop]).toBuffer(function (err, data, info) {
const delta = Math.abs(userData[file].left + info.cropOffsetLeft);
const accuracy = Math.round(marginWidth / (marginWidth + delta) * 100);
incrementScore(accuracy, crop);
done(err);
});
} else {
done();
}
},
// Top edge accuracy
function (done) {
if (marginHeight) {
sharp(filename).resize(metadata.width, salientHeight).crop(crops[crop]).toBuffer(function (err, data, info) {
const delta = Math.abs(userData[file].top + info.cropOffsetTop);
const accuracy = Math.round(marginHeight / (marginHeight + delta) * 100);
incrementScore(accuracy, crop);
done(err);
});
} else {
done();
}
}
], done);
}, done);
});
}, function () {
const report = [];
Object.keys(scores).forEach(function (accuracy) {
report.push(
Object.assign({
accuracy: Number(accuracy)
}, scores[accuracy])
);
});
fs.writeFileSync('report.json', JSON.stringify(report, null, 2));
});

View File

@ -1,74 +0,0 @@
// Copyright 2013 Lovell Fuller and others.
// SPDX-License-Identifier: Apache-2.0
'use strict';
const fs = require('fs');
const path = require('path');
const userDataDir = 'UserData';
const images = {};
const median = function (values) {
values.sort(function (a, b) {
return a - b;
});
const half = Math.floor(values.length / 2);
if (values.length % 2) {
return values[half];
} else {
return Math.floor((values[half - 1] + values[half]) / 2);
}
};
// List of files
fs.readdirSync(userDataDir).forEach(function (file) {
// Contents of file
const lines = fs.readFileSync(path.join(userDataDir, file), { encoding: 'utf-8' }).split(/\r\n/);
// First line = number of entries
const entries = parseInt(lines[0], 10);
// Verify number of entries
if (entries !== 500) {
throw new Error('Expecting 500 images in ' + file + ', found ' + entries);
}
// Keep track of which line we're on
let linePos = 2;
for (let i = 0; i < entries; i++) {
// Get data for current image
const filename = lines[linePos].replace(/\\/, path.sep);
linePos = linePos + 2;
const regions = lines[linePos].split('; ');
linePos = linePos + 2;
// Parse human-labelled regions for min/max coords
const lefts = [];
const tops = [];
const rights = [];
const bottoms = [];
regions.forEach(function (region) {
if (region.indexOf(' ') !== -1) {
const coords = region.split(' ');
lefts.push(parseInt(coords[0], 10));
tops.push(parseInt(coords[1], 10));
rights.push(parseInt(coords[2], 10));
bottoms.push(parseInt(coords[3], 10));
}
});
// Add image
images[filename] = {
left: median(lefts),
top: median(tops),
right: median(rights),
bottom: median(bottoms)
};
}
});
// Verify number of images found
const imageCount = Object.keys(images).length;
if (imageCount === 5000) {
// Write output
fs.writeFileSync('userData.json', JSON.stringify(images, null, 2));
} else {
throw new Error('Expecting 5000 images, found ' + imageCount);
}