图片相似度比对
一、Web Worker方式实现 -- 浏览器
技术栈:
前端:vue2
比对算法:pixelmatch、resemble
worker与线程池:thread.js
Ⅰ. pixelmatch + thread.js
1. 依赖安装与配置
shell
# 安装threads node 和 浏览器可用
npm install threads tiny-worker
# 安装webpack插件。默认的worker.js 只支持es5,无法使用import 和 require。
npm install -D threads-plugin
vue.config.js
js
// vue.config.js
// 引入
const ThreadsPlugin = require('threads-plugin')
// 配置plugins, 支持es6写法,可以再worker中使用import
configureWebpack: {
resolve: {
alias: {
"@": resolve('src')
}
},
plugins:[ new ThreadsPlugin()]
}
2. 业务逻辑
如果计算量很大,我们不可能无限制的开线程,因此需要使用
队列
+线程池
的思想。由此,我们使用Thread.js。 https://threads.js.org/
main线程
js
// 在处理业务逻辑的页面加入如下代码
// mock data
const leftImg = "https://ddiatmall.cafe24.com/web/product/medium/202103/1efbc777522e4279926b9acc02d4d1a7.jpg";
const eventData = {
imgUrl: "http://proj.kr/china/uca/UCA-023/600_c/f4f4f4.jpg",
listId: "1-PABSFWRWER0123",
}
const searchDataList = [];
for (let i = 0; i < 10; i++) {
searchDataList.push(eventData);
}
console.log("searchDataList ==", searchDataList)
// 调用方法
this.imgDiffTask(leftImg,searchDataList,0.1)
// 实际业务方法
async imgDiffTask(leftImg, searchImgList, threshold) {
// 【step1】
// 左侧图片下载,转为blob。此操作只需要在每次点击row时执行。
// 创建worker对象
const workerFun = await spawn(new Worker("@/worker/ThreadWorkerMulti"))
const blob = await workerFun.getImageBlob(leftImg)
await Thread.terminate(workerFun);
const start = performance.now();
const myTasks = []
// 【step2】
// 右侧图片相似度对比与计算
const pool = Pool(() => spawn(new Worker("@/worker/ThreadWorkerMulti")), {
concurrency: 4,
name: "task-match_img"
})
for (let i = 0; i < searchImgList.length; i++) {
let matchData =searchImgList[i];
const task = pool.queue(worker => worker.doImgDiff(blob, matchData.imgUrl, matchData.listId, threshold))
.then()
.catch((error) => {
console.log("error ==", error)
})
.finally();
myTasks.push(task);
}
const result = await Promise.all(myTasks)
.finally(() => {
// 关闭线程池
pool.completed();
pool.terminate();
})
const time = performance.now() - start;
console.log('花费时间:'+time)
console.log("result ==", result)
return result;
}
worker线程
js
// ThreadWorkerMulti.js
/* eslint-disable */
import {expose} from "threads"
import pixelmatch from 'pixelmatch'
/**
*
* @param tmpBlob 源图片blob
* @param img2Url 要比对的图片url地址
* @param listId 唯一标识id
* @param threshold 系数
*/
async function doImgDiff(tmpBlob, img2Url, listId, threshold) {
// 源图片,即左侧图片
// const tmpBlob = await getImageBlob(img1Url)
// 目标图片,即右侧分词查询list中的图片
const tmpBlob2 = await getImageBlob(img2Url)
const imageBitmap = await self.createImageBitmap(tmpBlob);
const imageBitmap2 = await self.createImageBitmap(tmpBlob2);
let canvas = new OffscreenCanvas(imageBitmap.width, imageBitmap.height);
// willReadFrequently == > Canvas2D: Multiple readback operations using getImageData are faster with the willReadFrequently attribute set to true warnings
let ctx = canvas.getContext('2d',{ willReadFrequently: true});
const diff = ctx.createImageData(imageBitmap.width, imageBitmap.height)
const imageData = getImageData(imageBitmap, ctx, canvas);
const imageData2 = getImageData(imageBitmap2, ctx, canvas);
const score = getImgScore(imageData, imageData2, diff, threshold, listId)
return score;
}
/**
* 获取图片blob二进制流
* @param url
* @returns {Promise<unknown>}
*/
function getImageBlob(url) {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
url = "https://mp-statistics-dev.hanpda.com/tools/mstats/api/product/images?imageUrl=" + url
xhr.open("get", url, true);
xhr.responseType = "blob";
xhr.onload = function () {
if (this.status == 200) {
resolve(this.response);
} else {
reject(new Error('error-'));
}
};
xhr.send();
xhr.onerror = () => {
reject(new Error('error-'));
}
})
}
/**
* 计算图片相似度
* @param imageData1
* @param imageData2
* @param diff
* @returns {number|number}
*/
function getImgScore(imageData1, imageData2, diff, _threshold, _listId) {
const score = pixelmatch(imageData1.data, imageData2.data, diff.data, imageData1.width, imageData1.height, {threshold: _threshold})
const percent = ~~(score / (imageData1.width * imageData1.height) * 1000) / 10
const result = {
listId: _listId,
percent: percent
}
console.log("result ===", result)
return result;
}
function getImageData(imageBitmap, ctx, canvas) {
ctx.drawImage(imageBitmap, 0, 0, canvas.width, canvas.height)
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height)
return imageData;
}
// 要导出的函数
const exposeFun={
doImgDiff, getImageBlob
}
expose(exposeFun)
Ⅱ. resemble + thread.js
1. 依赖安装与配置
shell
# browser浏览器运行不需要安装 node-canvas
npm install resemblejs
npm install --no-optional
# --------------------------------
# 在vue 文件中引入resemble.js [源码修改版]
# 在/public文件夹中引入 resemble.js
# 在 index.html中引入resemble.js
<script src="<%= BASE_URL %>/resemble.js"></script>
Resemble.js -- 修改版
javascript
// Resemble.js 源码修改版 -- canvas 修改为 offscreenCanvas,以ImageBitmap的方式处理图片
/*
James Cryer / Huddle
URL: https://github.com/Huddle/Resemble.js
*/
var naiveFallback = function () {
// ISC (c) 2011-2019 https://github.com/medikoo/es5-ext/blob/master/global.js
if (typeof self === "object" && self) {
return self;
}
if (typeof window === "object" && window) {
return window;
}
throw new Error("Unable to resolve global `this`");
};
var getGlobalThis = function () {
// ISC (c) 2011-2019 https://github.com/medikoo/es5-ext/blob/master/global.js
// Fallback to standard globalThis if available
if (typeof globalThis === "object" && globalThis) {
return globalThis;
}
try {
Object.defineProperty(Object.prototype, "__global__", {
get: function () {
return this;
},
configurable: true
});
} catch (error) {
return naiveFallback();
}
try {
// eslint-disable-next-line no-undef
if (!__global__) {
return naiveFallback();
}
return __global__; // eslint-disable-line no-undef
} finally {
delete Object.prototype.__global__;
}
};
var isNode = function () {
const globalPolyfill = getGlobalThis();
return typeof globalPolyfill.process !== "undefined" && globalPolyfill.process.versions && globalPolyfill.process.versions.node;
};
(function (root, factory) {
"use strict";
if (typeof define === "function" && define.amd) {
define([], factory);
} else if (typeof module === "object" && module.exports) {
module.exports = factory();
} else {
root.resemble = factory();
}
})(this /* eslint-disable-line no-invalid-this*/, function () {
"use strict";
var Img;
var Canvas;
var loadNodeCanvasImage;
function createCanvas(width, height) {
/* if (isNode()) {
return Canvas.createCanvas(width, height);
}*/
// console.log(" 创建离屏canvas")
var cnvs = new OffscreenCanvas(width,height)
return cnvs;
}
var oldGlobalSettings = {};
var globalOutputSettings = oldGlobalSettings;
var resemble = function (fileData) {
var pixelTransparency = 1;
var errorPixelColor = {
// Color for Error Pixels. Between 0 and 255.
red: 255,
green: 0,
blue: 255,
alpha: 255
};
var targetPix = { r: 0, g: 0, b: 0, a: 0 }; // isAntialiased
var errorPixelTransform = {
flat: function (px, offset) {
px[offset] = errorPixelColor.red;
px[offset + 1] = errorPixelColor.green;
px[offset + 2] = errorPixelColor.blue;
px[offset + 3] = errorPixelColor.alpha;
},
movement: function (px, offset, d1, d2) {
px[offset] = (d2.r * (errorPixelColor.red / 255) + errorPixelColor.red) / 2;
px[offset + 1] = (d2.g * (errorPixelColor.green / 255) + errorPixelColor.green) / 2;
px[offset + 2] = (d2.b * (errorPixelColor.blue / 255) + errorPixelColor.blue) / 2;
px[offset + 3] = d2.a;
},
flatDifferenceIntensity: function (px, offset, d1, d2) {
px[offset] = errorPixelColor.red;
px[offset + 1] = errorPixelColor.green;
px[offset + 2] = errorPixelColor.blue;
px[offset + 3] = colorsDistance(d1, d2);
},
movementDifferenceIntensity: function (px, offset, d1, d2) {
var ratio = (colorsDistance(d1, d2) / 255) * 0.8;
px[offset] = (1 - ratio) * (d2.r * (errorPixelColor.red / 255)) + ratio * errorPixelColor.red;
px[offset + 1] = (1 - ratio) * (d2.g * (errorPixelColor.green / 255)) + ratio * errorPixelColor.green;
px[offset + 2] = (1 - ratio) * (d2.b * (errorPixelColor.blue / 255)) + ratio * errorPixelColor.blue;
px[offset + 3] = d2.a;
},
diffOnly: function (px, offset, d1, d2) {
px[offset] = d2.r;
px[offset + 1] = d2.g;
px[offset + 2] = d2.b;
px[offset + 3] = d2.a;
}
};
var errorPixel = errorPixelTransform.flat;
var errorType;
var boundingBoxes;
var ignoredBoxes;
var ignoreAreasColoredWith;
var largeImageThreshold = 1200;
var useCrossOrigin = true;
var data = {};
var images = [];
var updateCallbackArray = [];
var tolerance = {
// between 0 and 255
red: 16,
green: 16,
blue: 16,
alpha: 16,
minBrightness: 16,
maxBrightness: 240
};
var ignoreAntialiasing = false;
var ignoreColors = false;
var scaleToSameSize = false;
var compareOnly = false;
var returnEarlyThreshold;
function colorsDistance(c1, c2) {
return (Math.abs(c1.r - c2.r) + Math.abs(c1.g - c2.g) + Math.abs(c1.b - c2.b)) / 3;
}
function withinBoundingBox(x, y, width, height, box) {
return x > (box.left || 0) && x < (box.right || width) && y > (box.top || 0) && y < (box.bottom || height);
}
function withinComparedArea(x, y, width, height, pixel2) {
var isIncluded = true;
var i;
var boundingBox;
var ignoredBox;
var selected;
var ignored;
if (boundingBoxes instanceof Array) {
selected = false;
for (i = 0; i < boundingBoxes.length; i++) {
boundingBox = boundingBoxes[i];
if (withinBoundingBox(x, y, width, height, boundingBox)) {
selected = true;
break;
}
}
}
if (ignoredBoxes instanceof Array) {
ignored = true;
for (i = 0; i < ignoredBoxes.length; i++) {
ignoredBox = ignoredBoxes[i];
if (withinBoundingBox(x, y, width, height, ignoredBox)) {
ignored = false;
break;
}
}
}
if (ignoreAreasColoredWith) {
return colorsDistance(pixel2, ignoreAreasColoredWith) !== 0;
}
if (selected === undefined && ignored === undefined) {
return true;
}
if (selected === false && ignored === true) {
return false;
}
if (selected === true || ignored === true) {
isIncluded = true;
}
if (selected === false || ignored === false) {
isIncluded = false;
}
return isIncluded;
}
function triggerDataUpdate() {
var len = updateCallbackArray.length;
var i;
for (i = 0; i < len; i++) {
if (typeof updateCallbackArray[i] === "function") {
updateCallbackArray[i](data);
}
}
}
function loop(w, h, callback) {
var x;
var y;
for (x = 0; x < w; x++) {
for (y = 0; y < h; y++) {
callback(x, y);
}
}
}
function parseImage(sourceImageData, width, height) {
var pixelCount = 0;
var redTotal = 0;
var greenTotal = 0;
var blueTotal = 0;
var alphaTotal = 0;
var brightnessTotal = 0;
var whiteTotal = 0;
var blackTotal = 0;
loop(width, height, function (horizontalPos, verticalPos) {
var offset = (verticalPos * width + horizontalPos) * 4;
var red = sourceImageData[offset];
var green = sourceImageData[offset + 1];
var blue = sourceImageData[offset + 2];
var alpha = sourceImageData[offset + 3];
var brightness = getBrightness(red, green, blue);
if (red === green && red === blue && alpha) {
if (red === 0) {
blackTotal++;
} else if (red === 255) {
whiteTotal++;
}
}
pixelCount++;
redTotal += (red / 255) * 100;
greenTotal += (green / 255) * 100;
blueTotal += (blue / 255) * 100;
alphaTotal += ((255 - alpha) / 255) * 100;
brightnessTotal += (brightness / 255) * 100;
});
data.red = Math.floor(redTotal / pixelCount);
data.green = Math.floor(greenTotal / pixelCount);
data.blue = Math.floor(blueTotal / pixelCount);
data.alpha = Math.floor(alphaTotal / pixelCount);
data.brightness = Math.floor(brightnessTotal / pixelCount);
data.white = Math.floor((whiteTotal / pixelCount) * 100);
data.black = Math.floor((blackTotal / pixelCount) * 100);
triggerDataUpdate();
}
function onLoadImage(hiddenImage, callback) {
// don't assign to hiddenImage, see https://github.com/Huddle/Resemble.js/pull/87/commits/300d43352a2845aad289b254bfbdc7cd6a37e2d7
var width = hiddenImage.width;
var height = hiddenImage.height;
if (scaleToSameSize && images.length === 1) {
width = images[0].width;
height = images[0].height;
}
var hiddenCanvas = createCanvas(width, height);
var imageData;
hiddenCanvas.getContext("2d").drawImage(hiddenImage, 0, 0, width, height);
imageData = hiddenCanvas.getContext("2d").getImageData(0, 0, width, height);
images.push(imageData);
callback(imageData, width, height);
}
function loadImageData(fileDataForImage, callback) {
onLoadImage(fileDataForImage, callback);
}
function isColorSimilar(a, b, color) {
var absDiff = Math.abs(a - b);
if (typeof a === "undefined") {
return false;
}
if (typeof b === "undefined") {
return false;
}
if (a === b) {
return true;
} else if (absDiff < tolerance[color]) {
return true;
}
return false;
}
function isPixelBrightnessSimilar(d1, d2) {
var alpha = isColorSimilar(d1.a, d2.a, "alpha");
var brightness = isColorSimilar(d1.brightness, d2.brightness, "minBrightness");
return brightness && alpha;
}
function getBrightness(r, g, b) {
return 0.3 * r + 0.59 * g + 0.11 * b;
}
function isRGBSame(d1, d2) {
var red = d1.r === d2.r;
var green = d1.g === d2.g;
var blue = d1.b === d2.b;
return red && green && blue;
}
function isRGBSimilar(d1, d2) {
var red = isColorSimilar(d1.r, d2.r, "red");
var green = isColorSimilar(d1.g, d2.g, "green");
var blue = isColorSimilar(d1.b, d2.b, "blue");
var alpha = isColorSimilar(d1.a, d2.a, "alpha");
return red && green && blue && alpha;
}
function isContrasting(d1, d2) {
return Math.abs(d1.brightness - d2.brightness) > tolerance.maxBrightness;
}
function getHue(red, green, blue) {
var r = red / 255;
var g = green / 255;
var b = blue / 255;
var max = Math.max(r, g, b);
var min = Math.min(r, g, b);
var h;
var d;
if (max === min) {
h = 0; // achromatic
} else {
d = max - min;
switch (max) {
case r:
h = (g - b) / d + (g < b ? 6 : 0);
break;
case g:
h = (b - r) / d + 2;
break;
case b:
h = (r - g) / d + 4;
break;
default:
h /= 6;
}
}
return h;
}
function isAntialiased(sourcePix, pix, cacheSet, verticalPos, horizontalPos, width) {
var offset;
var distance = 1;
var i;
var j;
var hasHighContrastSibling = 0;
var hasSiblingWithDifferentHue = 0;
var hasEquivalentSibling = 0;
addHueInfo(sourcePix);
for (i = distance * -1; i <= distance; i++) {
for (j = distance * -1; j <= distance; j++) {
if (i === 0 && j === 0) {
// ignore source pixel
} else {
offset = ((verticalPos + j) * width + (horizontalPos + i)) * 4;
if (!getPixelInfo(targetPix, pix, offset, cacheSet)) {
continue;
}
addBrightnessInfo(targetPix);
addHueInfo(targetPix);
if (isContrasting(sourcePix, targetPix)) {
hasHighContrastSibling++;
}
if (isRGBSame(sourcePix, targetPix)) {
hasEquivalentSibling++;
}
if (Math.abs(targetPix.h - sourcePix.h) > 0.3) {
hasSiblingWithDifferentHue++;
}
if (hasSiblingWithDifferentHue > 1 || hasHighContrastSibling > 1) {
return true;
}
}
}
}
if (hasEquivalentSibling < 2) {
return true;
}
return false;
}
function copyPixel(px, offset, pix) {
if (errorType === "diffOnly") {
return;
}
px[offset] = pix.r; // r
px[offset + 1] = pix.g; // g
px[offset + 2] = pix.b; // b
px[offset + 3] = pix.a * pixelTransparency; // a
}
function copyGrayScalePixel(px, offset, pix) {
if (errorType === "diffOnly") {
return;
}
px[offset] = pix.brightness; // r
px[offset + 1] = pix.brightness; // g
px[offset + 2] = pix.brightness; // b
px[offset + 3] = pix.a * pixelTransparency; // a
}
function getPixelInfo(dst, pix, offset) {
if (pix.length > offset) {
dst.r = pix[offset];
dst.g = pix[offset + 1];
dst.b = pix[offset + 2];
dst.a = pix[offset + 3];
return true;
}
return false;
}
function addBrightnessInfo(pix) {
pix.brightness = getBrightness(pix.r, pix.g, pix.b); // 'corrected' lightness
}
function addHueInfo(pix) {
pix.h = getHue(pix.r, pix.g, pix.b);
}
function analyseImages(img1, img2, width, height) {
var data1 = img1.data;
var data2 = img2.data;
var hiddenCanvas;
var context;
var imgd;
var pix;
if (!compareOnly) {
hiddenCanvas = createCanvas(width, height);
context = hiddenCanvas.getContext("2d");
imgd = context.createImageData(width, height);
pix = imgd.data;
}
var mismatchCount = 0;
var diffBounds = {
top: height,
left: width,
bottom: 0,
right: 0
};
var updateBounds = function (x, y) {
diffBounds.left = Math.min(x, diffBounds.left);
diffBounds.right = Math.max(x, diffBounds.right);
diffBounds.top = Math.min(y, diffBounds.top);
diffBounds.bottom = Math.max(y, diffBounds.bottom);
};
var time = Date.now();
var skip;
if (!!largeImageThreshold && ignoreAntialiasing && (width > largeImageThreshold || height > largeImageThreshold)) {
skip = 6;
}
var pixel1 = { r: 0, g: 0, b: 0, a: 0 };
var pixel2 = { r: 0, g: 0, b: 0, a: 0 };
var skipTheRest = false;
loop(width, height, function (horizontalPos, verticalPos) {
if (skipTheRest) {
return;
}
if (skip) {
// only skip if the image isn't small
if (verticalPos % skip === 0 || horizontalPos % skip === 0) {
return;
}
}
var offset = (verticalPos * width + horizontalPos) * 4;
if (!getPixelInfo(pixel1, data1, offset, 1) || !getPixelInfo(pixel2, data2, offset, 2)) {
return;
}
var isWithinComparedArea = withinComparedArea(horizontalPos, verticalPos, width, height, pixel2);
if (ignoreColors) {
addBrightnessInfo(pixel1);
addBrightnessInfo(pixel2);
if (isPixelBrightnessSimilar(pixel1, pixel2) || !isWithinComparedArea) {
if (!compareOnly) {
copyGrayScalePixel(pix, offset, pixel2);
}
} else {
if (!compareOnly) {
errorPixel(pix, offset, pixel1, pixel2);
}
mismatchCount++;
updateBounds(horizontalPos, verticalPos);
}
return;
}
if (isRGBSimilar(pixel1, pixel2) || !isWithinComparedArea) {
if (!compareOnly) {
copyPixel(pix, offset, pixel1);
}
} else if (
ignoreAntialiasing &&
(addBrightnessInfo(pixel1), // jit pixel info augmentation looks a little weird, sorry.
addBrightnessInfo(pixel2),
isAntialiased(pixel1, data1, 1, verticalPos, horizontalPos, width) || isAntialiased(pixel2, data2, 2, verticalPos, horizontalPos, width))
) {
if (isPixelBrightnessSimilar(pixel1, pixel2) || !isWithinComparedArea) {
if (!compareOnly) {
copyGrayScalePixel(pix, offset, pixel2);
}
} else {
if (!compareOnly) {
errorPixel(pix, offset, pixel1, pixel2);
}
mismatchCount++;
updateBounds(horizontalPos, verticalPos);
}
} else {
if (!compareOnly) {
errorPixel(pix, offset, pixel1, pixel2);
}
mismatchCount++;
updateBounds(horizontalPos, verticalPos);
}
if (compareOnly) {
var currentMisMatchPercent = (mismatchCount / (height * width)) * 100;
if (currentMisMatchPercent > returnEarlyThreshold) {
skipTheRest = true;
}
}
});
data.rawMisMatchPercentage = (mismatchCount / (height * width)) * 100;
data.misMatchPercentage = data.rawMisMatchPercentage.toFixed(2);
data.diffBounds = diffBounds;
data.analysisTime = Date.now() - time;
data.getImageDataUrl = function (text) {
if (compareOnly) {
throw Error("No diff image available - ran in compareOnly mode");
}
var barHeight = 0;
if (text) {
barHeight = addLabel(text, context, hiddenCanvas);
}
context.putImageData(imgd, 0, barHeight);
return hiddenCanvas.toDataURL("image/png");
};
if (!compareOnly && hiddenCanvas.toBuffer) {
data.getBuffer = function (includeOriginal) {
if (includeOriginal) {
var imageWidth = hiddenCanvas.width + 2;
hiddenCanvas.width = imageWidth * 3;
context.putImageData(img1, 0, 0);
context.putImageData(img2, imageWidth, 0);
context.putImageData(imgd, imageWidth * 2, 0);
} else {
context.putImageData(imgd, 0, 0);
}
return hiddenCanvas.toBuffer();
};
}
}
function addLabel(text, context, hiddenCanvas) {
var textPadding = 2;
context.font = "12px sans-serif";
var textWidth = context.measureText(text).width + textPadding * 2;
var barHeight = 22;
if (textWidth > hiddenCanvas.width) {
hiddenCanvas.width = textWidth;
}
hiddenCanvas.height += barHeight;
context.fillStyle = "#666";
context.fillRect(0, 0, hiddenCanvas.width, barHeight - 4);
context.fillStyle = "#fff";
context.fillRect(0, barHeight - 4, hiddenCanvas.width, 4);
context.fillStyle = "#fff";
context.textBaseline = "top";
context.font = "12px sans-serif";
context.fillText(text, textPadding, 1);
return barHeight;
}
function normalise(img, w, h) {
var c;
var context;
if (img.height < h || img.width < w) {
c = createCanvas(w, h);
context = c.getContext("2d");
context.putImageData(img, 0, 0);
return context.getImageData(0, 0, w, h);
}
return img;
}
function outputSettings(options) {
var key;
if (options.errorColor) {
for (key in options.errorColor) {
if (options.errorColor.hasOwnProperty(key)) {
errorPixelColor[key] = options.errorColor[key] === void 0 ? errorPixelColor[key] : options.errorColor[key];
}
}
}
if (options.errorType && errorPixelTransform[options.errorType]) {
errorPixel = errorPixelTransform[options.errorType];
errorType = options.errorType;
}
if (options.errorPixel && typeof options.errorPixel === "function") {
errorPixel = options.errorPixel;
}
pixelTransparency = isNaN(Number(options.transparency)) ? pixelTransparency : options.transparency;
if (options.largeImageThreshold !== undefined) {
largeImageThreshold = options.largeImageThreshold;
}
if (options.useCrossOrigin !== undefined) {
useCrossOrigin = options.useCrossOrigin;
}
if (options.boundingBox !== undefined) {
boundingBoxes = [options.boundingBox];
}
if (options.ignoredBox !== undefined) {
ignoredBoxes = [options.ignoredBox];
}
if (options.boundingBoxes !== undefined) {
boundingBoxes = options.boundingBoxes;
}
if (options.ignoredBoxes !== undefined) {
ignoredBoxes = options.ignoredBoxes;
}
if (options.ignoreAreasColoredWith !== undefined) {
ignoreAreasColoredWith = options.ignoreAreasColoredWith;
}
}
function compare(one, two) {
if (globalOutputSettings !== oldGlobalSettings) {
outputSettings(globalOutputSettings);
}
function onceWeHaveBoth() {
var width;
var height;
if (images.length === 2) {
if (images[0].error || images[1].error) {
data = {};
data.error = images[0].error ? images[0].error : images[1].error;
triggerDataUpdate();
return;
}
width = images[0].width > images[1].width ? images[0].width : images[1].width;
height = images[0].height > images[1].height ? images[0].height : images[1].height;
if (images[0].width === images[1].width && images[0].height === images[1].height) {
data.isSameDimensions = true;
} else {
data.isSameDimensions = false;
}
data.dimensionDifference = {
width: images[0].width - images[1].width,
height: images[0].height - images[1].height
};
analyseImages(normalise(images[0], width, height), normalise(images[1], width, height), width, height);
triggerDataUpdate();
}
}
images = [];
loadImageData(one, onceWeHaveBoth);
loadImageData(two, onceWeHaveBoth);
}
function getCompareApi(param) {
var secondFileData;
var hasMethod = typeof param === "function";
if (!hasMethod) {
// assume it's file data
secondFileData = param;
}
var self = {
setReturnEarlyThreshold: function (threshold) {
if (threshold) {
compareOnly = true;
returnEarlyThreshold = threshold;
}
return self;
},
scaleToSameSize: function () {
scaleToSameSize = true;
if (hasMethod) {
param();
}
return self;
},
useOriginalSize: function () {
scaleToSameSize = false;
if (hasMethod) {
param();
}
return self;
},
ignoreNothing: function () {
tolerance.red = 0;
tolerance.green = 0;
tolerance.blue = 0;
tolerance.alpha = 0;
tolerance.minBrightness = 0;
tolerance.maxBrightness = 255;
ignoreAntialiasing = false;
ignoreColors = false;
if (hasMethod) {
param();
}
return self;
},
ignoreLess: function () {
tolerance.red = 16;
tolerance.green = 16;
tolerance.blue = 16;
tolerance.alpha = 16;
tolerance.minBrightness = 16;
tolerance.maxBrightness = 240;
ignoreAntialiasing = false;
ignoreColors = false;
if (hasMethod) {
param();
}
return self;
},
ignoreAntialiasing: function () {
tolerance.red = 32;
tolerance.green = 32;
tolerance.blue = 32;
tolerance.alpha = 32;
tolerance.minBrightness = 64;
tolerance.maxBrightness = 96;
ignoreAntialiasing = true;
ignoreColors = false;
if (hasMethod) {
param();
}
return self;
},
ignoreColors: function () {
tolerance.alpha = 16;
tolerance.minBrightness = 16;
tolerance.maxBrightness = 240;
ignoreAntialiasing = false;
ignoreColors = true;
if (hasMethod) {
param();
}
return self;
},
ignoreAlpha: function () {
tolerance.red = 16;
tolerance.green = 16;
tolerance.blue = 16;
tolerance.alpha = 255;
tolerance.minBrightness = 16;
tolerance.maxBrightness = 240;
ignoreAntialiasing = false;
ignoreColors = false;
if (hasMethod) {
param();
}
return self;
},
repaint: function () {
if (hasMethod) {
param();
}
return self;
},
outputSettings: function (options) {
outputSettings(options);
return self;
},
onComplete: function (callback) {
updateCallbackArray.push(callback);
var wrapper = function () {
compare(fileData, secondFileData);
};
wrapper();
return getCompareApi(wrapper);
},
setupCustomTolerance: function (customSettings) {
for (var property in tolerance) {
if (!customSettings.hasOwnProperty(property)) {
continue;
}
tolerance[property] = customSettings[property];
}
}
};
return self;
}
var rootSelf = {
onComplete: function (callback) {
updateCallbackArray.push(callback);
loadImageData(fileData, function (imageData, width, height) {
parseImage(imageData.data, width, height);
});
},
compareTo: function (secondFileData) {
return getCompareApi(secondFileData);
},
outputSettings: function (options) {
outputSettings(options);
return rootSelf;
}
};
return rootSelf;
};
function setGlobalOutputSettings(settings) {
globalOutputSettings = settings;
return resemble;
}
function applyIgnore(api, ignore, customTolerance) {
switch (ignore) {
case "nothing":
api.ignoreNothing();
break;
case "less":
api.ignoreLess();
break;
case "antialiasing":
api.ignoreAntialiasing();
break;
case "colors":
api.ignoreColors();
break;
case "alpha":
api.ignoreAlpha();
break;
default:
throw new Error("Invalid ignore: " + ignore);
}
api.setupCustomTolerance(customTolerance);
}
resemble.compare = function (image1, image2, options, cb) {
var callback;
var opt;
if (typeof options === "function") {
callback = options;
opt = {};
} else {
callback = cb;
opt = options || {};
}
var res = resemble(image1);
var compare;
if (opt.output) {
res.outputSettings(opt.output);
}
compare = res.compareTo(image2);
if (opt.returnEarlyThreshold) {
compare.setReturnEarlyThreshold(opt.returnEarlyThreshold);
}
if (opt.scaleToSameSize) {
compare.scaleToSameSize();
}
var toleranceSettings = opt.tolerance || {};
if (typeof opt.ignore === "string") {
applyIgnore(compare, opt.ignore, toleranceSettings);
} else if (opt.ignore && opt.ignore.forEach) {
opt.ignore.forEach(function (v) {
applyIgnore(compare, v, toleranceSettings);
});
}
compare.onComplete(function (data) {
if (data.error) {
callback(data.error);
} else {
callback(null, data);
}
});
};
resemble.outputSettings = setGlobalOutputSettings;
return resemble;
});
2. 业务逻辑
**main线程 **
javascript
// MatchingSearch.vue
// 触发方法
const leftImg =await this.current.mainImage;
const searchDataList = [];
results.forEach(item => {
const tmpData = {
custom: {
leftImg: leftImg,
rightImg: item.ecImageUrl
},
imgUrl: item.ecImageUrl
}
searchDataList.push(tmpData)
})
this.imgDiffTaskResembleTask(leftImg, searchDataList)
/**
* 实际方法
**/
async imgDiffTaskResembleTask(leftImg, searchImgList){
console.log("searchImgList ==", searchImgList.length)
const workerFun = await spawn(new Worker("@/worker/RemblThreadWorker"))
const img1Blob = await workerFun.getImageBlob(leftImg)
await Thread.terminate(workerFun);
const start = performance.now();
const myTasks = []
const pool = Pool(() => spawn(new Worker("@/worker/RemblThreadWorker")), {
concurrency: 4,
name: "task-match_img_rembl"
})
let len = searchImgList.length;
for (let i = 0; i < len; i++) {
let matchData = searchImgList[i];
const task = pool.queue(worker => worker.getRemblMisMatchPercentage(img1Blob, matchData.imgUrl, matchData.custom))
.then()
.catch((error) => {
console.log("error ==", error)
})
.finally();
myTasks.push(task);
}
const result = await Promise.all(myTasks)
.finally(() => {
// 关闭线程池
pool.completed();
pool.terminate();
})
const time = performance.now() - start;
console.log('花费时间:' + time)
console.log("result ==", result)
return result;
}
worker线程
javascript
// RemblThreadWorker.js
/* eslint-disable */
import {expose} from "threads"
// 导入 /public下的resemble.js文件
importScripts("../resemble.js")
/**
* 获取图片分析结果
* @param analysisPng
* @returns {Promise<unknown>}
*/
function getRemblAnalysisInfo(analysisPng) {
return new Promise(resolve => {
resemble(analysisPng).onComplete(function (data) {
resolve(data)
})
})
}
/**
* 查询图片比对相似度
* @param blob1
* @param img2Url
* @param custom
* @returns {Promise<unknown>}
*/
async function getRemblMisMatchPercentage(blob1, img2Url, custom) {
const blob2 = await getImageBlob(img2Url);
const imageBitmap = await self.createImageBitmap(blob1);
const image2Bitmap = await self.createImageBitmap(blob2);
const options = await getRemblOptions(imageBitmap, image2Bitmap);
return new Promise(resolve => {
resemble(imageBitmap)
.compareTo(image2Bitmap)
/* .ignoreNothing()*/
// 使用相同尺寸
.scaleToSameSize()
// 忽略抗锯齿 (位置不同,便于查找)
.ignoreAntialiasing()
// 个性化参数配置
.outputSettings(options)
.onComplete((data) => {
//console.log("不同点 :" + data.misMatchPercentage + "%")
const result = {
percentage: (100 - data.misMatchPercentage).toFixed(2),
custom: custom
}
resolve(result);
});
})
}
/**
* 设置rembl option配置
* @param blob1
* @param blob2
* @returns {Promise<{}>}
*/
async function getRemblOptions(blob1, blob2) {
const options = {outputDiff: false};
/* // 忽略白色
const ignoreAreasColoredWith = {
r: 255,
g: 255,
b: 255,
a: 0
}
// 分析第二张图片,他的white像素占比情况,如果占比>55%,那么比较图片时不可以忽略while底色。
const analysisInfo = await getRemblAnalysisInfo(blob2)
const white = analysisInfo.white;
const tmpArr = [];
tmpArr.push(analysisInfo.alpha)
tmpArr.push(analysisInfo.black)
tmpArr.push(analysisInfo.blue)
tmpArr.push(analysisInfo.brightness)
tmpArr.push(analysisInfo.green)
tmpArr.push(analysisInfo.red)
const max = Math.max(...tmpArr);
// 排除底色 white(255,255,255) 导致的 图片特别相似的问题
if (white / max < 0.55) {
options.ignoreAreasColoredWith = ignoreAreasColoredWith;
}
options.largeImageThreshold = 0
options.outputDiff = false*/
return options;
}
/**
* 背景是否是白色
*/
async function isBackgroundWhite(img) {
const analysisInfo = await getRemblAnalysisInfo(blob2)
const white = analysisInfo.white;
const tmpArr = [];
tmpArr.push(analysisInfo.alpha)
tmpArr.push(analysisInfo.black)
tmpArr.push(analysisInfo.blue)
tmpArr.push(analysisInfo.brightness)
tmpArr.push(analysisInfo.green)
tmpArr.push(analysisInfo.red)
const max = Math.max(...tmpArr);
// 排除底色 white(255,255,255) 导致的 图片特别相似的问题
return (white/max).toFixed(2);
}
/**
* 获取图片blob二进制流
* @param url
* @returns {Promise<unknown>}
*/
function getImageBlob(url) {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
url = "https://mp-statistics-dev.hanpda.com/tools/mstats/api/product/images?imageUrl=" + url
xhr.open("get", url, true);
xhr.responseType = "blob";
xhr.onload = function () {
if (this.status == 200) {
resolve(this.response);
} else {
reject(new Error('error-'));
}
};
xhr.send();
xhr.onerror = (error) => {
reject(new Error('error-'));
}
})
}
// 要导出的函数
const exposeFun = {getImageBlob, getRemblMisMatchPercentage}
expose(exposeFun)
性能测试
多核与性能的关系
使用nginx做代理,转发图片请求,解决跨域问题。
pixelmatch 参数配置 theshold = 0.1;
resemble 使用默认配置,只开启resize;
每种场景耗时均获取10次请求的平均值计算而来
Times | Source Size | cpu cores | Resemble (ms) | Pixelmatch (ms) |
---|---|---|---|---|
1904 | 119M | 12 | 24307 | 24605 |
1904 | 119M | 10 | 24083 | 43015 |
1904 | 119M | 8 | 24250 | 43209 |
1904 | 119M | 6 | 28744 | 48946 |
1904 | 119M | 4 | 31550 | 60546 |
结论
综合来看 resemble 的性能要优于pixelmatch。尤其是多线程场景下。从上述数据结果分析,我们不难发现,性能限制与浏览器tcp同一时间内最大是6个的限制基本一致。推荐配置6核,性能配置8核。
二、java图片相似度计算
使用JImageHash依赖库来实现图片的相似度比对,然后计算hamming距离来标识相似度
实际测试多线程下性能较差,不考虑。
三、nodejs 图片相似度计算
使用express框架作为web服务
pool:piscina + shuffled-priority-queue
算法库:imghash + hamming
依赖库
shell
# axios请求
npm install axios
# hash计算核心库
npm install blockhash-core
# 计算两个hash之间的hamming距离
npm install hamming
# 根据图片buffer生成hash
npm install imghash
# 线程池管理
npm install piscina
# 队列,与piscina配合使用
项目结构目录
shell
--public
--routes
|--image-diff.js
--services
|--ImgPiscinaService.js
--workers
|--worker.js
实现
1. app配置router
javascript
...
// 引入image-diff router
var imgRouter = require('./routes/image-diff')
...
// 使用router路由
app.use('/img', imgRouter);
2. routes:image-diff.js
javascript
var express = require('express');
var router = express.Router();
const imgPiscinaService = require('../services/ImgPiscinaService')
router.get('/hash', async function (req, res, next) {
const bb = await imgPiscinaService.getImgHash();
res.send(bb);
});
// getHammingScore
router.get('/hamming', async function (req, res, next) {
res.send(imgPiscinaService.getHammingScore());
});
module.exports = router;
3. services:ImgPiscinaService.js
javascript
const Piscina = require('piscina')
const path = require('path');
const spq = require('shuffled-priority-queue')
const fs = require("fs");
const kItem = Symbol('item');
const configFile = path.resolve(__dirname, '../mock.json')
const leftMockFile = path.resolve(__dirname, '../mock-left.json')
const hammingLeftFile = path.resolve(__dirname, '../mock-hamming-left.json')
const hammingRightFile = path.resolve(__dirname, '../mock-hamming-right.json')
const hamming = require('hamming')
class PriorityTaskQueue {
queue = spq();
get size() {
return this.queue.length;
}
push(value) {
const queueOptions = value[Piscina.queueOptionsSymbol];
const priority = queueOptions ? (queueOptions.priority || 0) : 0;
value[kItem] = this.queue.add({priority, value});
}
remove(value) {
this.queue.remove(value[kItem]);
}
shift() {
return this.queue.shift().value;
}
}
/**
* 使用Piscina 创建线程池pools
* @type {Piscina}
*/
const pool = new Piscina({
// worker文件path
filename: path.resolve(__dirname, "../workers/worker.js"),
// 队列,这里使用shuffled-priority-queue创建
taskQueue: new PriorityTaskQueue(),
// 单个线程的超时时间,1500ms。
idleTimeout: 1500
})
/*const hammingPool = new Piscina({
filename: path.resolve(__dirname, "../workers/hamming-worker.js"),
taskQueue: new PriorityTaskQueue(),
idleTimeout: 1500
})*/
const ImgService = {
// 生成img hash
/**
*
* @param leftList => { url:"http://10.15.1.175:80/img4/99.jpg" }
* @param rightList => { url:"http://10.15.1.175:80/img4/99.jpg"}
* @returns {Promise<{left: unknown[], right: unknown[]}>}
*/
async getImgHash(leftList, rightList) {
// mock data
const leftData = fs.readFileSync(leftMockFile, 'UTF-8').toString();
let leftJson = JSON.parse(leftData);
leftList = leftJson;
const data = fs.readFileSync(configFile, 'UTF-8').toString();
let json = JSON.parse(data);
rightList = json;
// 计算图片hash值
console.time("times")
// 左侧图片的hash计算
const leftArr = await this.doTask(leftList);
// 右侧图片的hash计算
const rightArr = await this.doTask(rightList);
const result = {
left: leftArr,
right: rightArr
}
console.timeEnd("times")
return result
},
/**
* 计算hamming距离,找到左侧对比相似的图片
* @param leftArr
* @param rightArr
* @returns {*[]}
*/
getHammingScore(leftArr, rightArr) {
// mock data
const leftData = fs.readFileSync(hammingLeftFile, 'UTF-8').toString();
let leftJson = JSON.parse(leftData);
leftArr = leftJson;
const rightData = fs.readFileSync(hammingRightFile, 'UTF-8').toString();
let rightJson = JSON.parse(rightData);
rightArr = rightJson;
const result = [];
const hashData = {
leftObj: {},
rightList: []
}
// 左侧列表,匹配右侧找到最相似的图像
for (let i = 0; i < leftArr.length; i++) {
const hash1 = leftArr[i].imageHashCode
const rightList = [];
// 编译右侧列表,计算hamming 距离
for (let j = 0; j < rightArr.length; j++) {
const hash2 = rightArr[j].imageHashCode;
const score = hamming(hash1, hash2);
if (score != null && score <= 5) {
const data = rightArr[j];
data.score = score;
rightList.push(data);
}
}
if (rightList.length > 0) {
hashData.leftObj = leftArr[i];
hashData.rightList = rightList
result.push(hashData);
}
}
return result;
},
/**
* 使用线程池添加任务,获取线程池任务执行结果
* @param urlList
* @returns {Promise<unknown[]>}
*/
async doTask(urlList) {
const rightTasks = [];
for (let i = 0; i < urlList.length; i++) {
let url = urlList[i].imageUrl;
console.log("i" + i + "; url =" + url)
const task = pool.run({imgUrl: url, custom: urlList[i]}, {name: 'getImageHash'})
rightTasks.push(task);
}
const rightArr = await Promise.all(rightTasks)
return rightArr;
}
}
module.exports = ImgService
4. workers: worker.js
javascript
const imageHash = require('imghash')
const axios = require('axios')
const exportFun = {
getImageHash({imgUrl, custom}) {
return new Promise(resolve => {
/**
* 使用axios请求远程图片地址并获取buffer
*
* responseType: arraybuffer
* timeout 1000: 最大等待时间为1000ms
*/
axios({method: 'get', url: imgUrl, responseType: 'arraybuffer', timeout: 1000})
.then(async (res) => {
/**
* 配置bits未16位。默认是8位,生成的hash过短,导致hamming计算时计算出的图片差异较大。
* @type {string | string}
*/
const hash = await imageHash.hash(res.data, 16)
console.log("hash ==", hash)
custom.imageHashCode = hash
resolve(custom);
}).catch(e => {
console.log("error ==", e)
custom.imageHashCode = ""
resolve(custom);
})
})
}
}
module.exports = exportFun;
性能
11423张图片, 总图片尺寸119M。压缩后11M
nodejs 开启的线程依赖于cpu线程数(12核/线程)。
size | disk unzip (s) | disk zip (s) | url unzip (s) | url zip (s) |
---|---|---|---|---|
11423 | 28.630 | 2.330 | 42.127 | 6.152 |
四、golang图片相似度计算
golang的性能高、占用小。以线上服务为例,8协程 5600请求,3s内处理完毕。硬件:2核2g
核心代码
ImgService.go
go
package img_service
import (
"bytes"
"crypto/tls"
"errors"
"fmt"
"github.com/corona10/goimagehash"
"github.com/disintegration/imageorient"
"github.com/gin-gonic/gin"
"github.com/panjf2000/ants/v2"
"github.com/valyala/fasthttp"
"go-img/pkg/app"
"go-img/pkg/e"
"image"
_ "image/gif"
_ "image/jpeg"
_ "image/png"
"log"
"net/http"
"strconv"
"sync"
"time"
)
const (
routineCountTotal = 8
)
var client *http.Client
var FastClient fasthttp.Client = fastClient()
// hamming接口元数据
type ImgMetaData struct {
EcList []EcMetaData `json:"ecList"`
MpList []MpMetaData `json:"mpList"`
}
type HammingDataInterface struct {
EcRecord EcMetaData `json:"ecRecord"`
MpRecord MpMetaData `json:"mpRecord"`
}
type ImgDataReqRes struct {
ImgList []EcMetaData `json:"list"`
}
type EcMetaData struct {
ImageURL string `json:"imageUrl,omitempty"`
ShopNo string `json:"shopNo"`
ImageHashCode string `json:"imageHashCode,omitempty"`
Score string `json:"score,omitempty"`
LeftOrRight string `json:"leftOrRight"`
}
type MpMetaData struct {
ShopNo string `json:"shopNo"`
ImageURL string `json:"imageUrl,omitempty"`
ImageHashCode string `json:"imageHashCode,omitempty"`
}
// hamming接口 返回结构
type HammingResp struct {
List []HammingScore `json:"list"`
}
type HammingScore struct {
ShopNo string `json:"shopNo"`
ImageURL string `json:"imageUrl,omitempty"`
ImageHashCode string `json:"imageHashCode,omitempty"`
MatchEcList []EcMetaData `json:"ecList"`
}
type tmp struct {
metaDataTmp ImgDataReqRes
failRequestNumber int32
successRequestNumber int32
}
// 生成图片hash
func GetImgHash(c *gin.Context) {
var (
appG = app.Gin{C: c}
metaData ImgDataReqRes
)
httpCode, errCode := app.BindAndValid(c, &metaData)
if errCode != e.SUCCESS {
appG.Response(httpCode, errCode, nil)
return
}
t := new(tmp)
goEcHash(metaData.ImgList, t)
fmt.Printf("结束....,失败请求数==%d, 成功请求数==%d", t.failRequestNumber, t.successRequestNumber)
appG.Response(http.StatusOK, e.SUCCESS, t.metaDataTmp)
}
// 获取图片相似度分数
func GetHammingScore(c *gin.Context) {
var (
appG = app.Gin{C: c}
metaData ImgMetaData
)
httpCode, errCode := app.BindAndValid(c, &metaData)
if errCode != e.SUCCESS {
appG.Response(httpCode, errCode, nil)
return
}
respDataList := new(HammingResp)
defer ants.Release()
var wg sync.WaitGroup
p, err := ants.NewPoolWithFunc(100, func(v interface{}) {
interfaceHamming(v, respDataList)
// time.Sleep(time.Duration(100) * time.Millisecond)
wg.Done()
})
if err != nil {
return
}
if p == nil {
log.Printf("协程池创建失败==%s", err)
return
}
defer p.Release()
tmpData := HammingDataInterface{}
for _, record := range metaData.MpList {
if len(record.ImageHashCode) == 0 {
continue
}
for _, ecRecord := range metaData.EcList {
if len(ecRecord.ImageHashCode) == 0 {
continue
}
if record.ShopNo != ecRecord.ShopNo {
continue
}
tmpData.MpRecord = record
tmpData.EcRecord = ecRecord
wg.Add(1)
if err = p.Invoke(tmpData); err != nil {
continue
}
}
}
wg.Wait()
appG.Response(http.StatusOK, e.SUCCESS, respDataList)
}
func fastClient() fasthttp.Client {
return fasthttp.Client{
Name: "FunTester",
NoDefaultUserAgentHeader: true,
TLSConfig: &tls.Config{InsecureSkipVerify: true},
MaxConnsPerHost: 2000,
MaxIdleConnDuration: 1 * time.Second,
MaxConnDuration: 1 * time.Second,
ReadTimeout: 1 * time.Second,
WriteTimeout: 1 * time.Second,
MaxConnWaitTimeout: 1 * time.Second,
}
}
// fastGet 获取GET请求对象,没有进行资源回收
// @Description:
// @param url
// @param args
// @return *fasthttp.Request
func fastGet(url string) *fasthttp.Request {
req := fasthttp.AcquireRequest()
req.Header.SetMethod("GET")
req.SetRequestURI(url)
return req
}
// FastResponse 获取响应,保证资源回收
// @Description:
// @param request
// @return []byte
// @return error
func fastResponse(request *fasthttp.Request) ([]byte, error) {
response := fasthttp.AcquireResponse()
defer fasthttp.ReleaseRequest(request)
defer fasthttp.ReleaseResponse(response)
if err := FastClient.Do(request, response); err != nil {
log.Printf("响应出错了==%s", err)
return nil, err
}
return response.Body(), nil
}
func getImg(url string) (image.Image, error) {
get := fastGet(url)
byteBody, _ := fastResponse(get)
reader := bytes.NewReader(byteBody)
img, _, err := imageorient.Decode(reader)
return img, err
}
func interfaceHamming(v interface{}, respDataList *HammingResp) {
data := v.(HammingDataInterface)
hash1, _ := goimagehash.ExtImageHashFromString(data.MpRecord.ImageHashCode)
hash2, _ := goimagehash.ExtImageHashFromString(data.EcRecord.ImageHashCode)
distance, _ := hash1.Distance(hash2)
// 找hamming 差距小于2的 {0,1,2}
if distance < 2 {
res := &HammingScore{}
res.ImageURL = data.MpRecord.ImageURL
res.ImageHashCode = data.MpRecord.ImageHashCode
res.ShopNo = data.MpRecord.ShopNo
data.EcRecord.Score = strconv.Itoa(distance)
res.MatchEcList = append(res.MatchEcList, data.EcRecord)
respDataList.List = append(respDataList.List, *res)
}
}
func doHash(v interface{}, t *tmp) (err error) {
if v == nil || v == "" {
t.failRequestNumber++
fmt.Println("未指定图片URL")
return errors.New("未指定图片URL")
}
url := v.(EcMetaData)
img, err := getImg(url.ImageURL)
if err != nil {
fmt.Println(err.Error())
t.failRequestNumber++
return
}
if img == nil {
fmt.Println(err.Error())
t.failRequestNumber++
return errors.New("获取图片失败")
}
hash1, err := goimagehash.PerceptionHash(img)
if err != nil {
fmt.Println(err.Error())
t.failRequestNumber++
return err //这个err是局部变量,非方法返回值定义的err所以需要return指定变量
}
if hash1 == nil {
fmt.Println(err.Error())
t.failRequestNumber++
return errors.New("图片哈希失败")
}
url.ImageHashCode = hash1.ToString()
t.metaDataTmp.ImgList = append(t.metaDataTmp.ImgList, url)
t.successRequestNumber++
return
}
// 最终返回list
func goEcHash(list []EcMetaData, t *tmp) (err error) {
defer ants.Release()
var wg sync.WaitGroup
beg := time.Now()
p, err := ants.NewPoolWithFunc(routineCountTotal, func(v interface{}) {
doHash(v, t)
wg.Done()
})
if err != nil {
return
}
if p == nil {
return errors.New("协程池创建失败")
}
defer p.Release()
for i := 0; i < len(list); i++ {
wg.Add(1)
if err = p.Invoke(list[i]); err != nil {
continue
}
}
wg.Wait()
fmt.Printf("time consumed: %fs", time.Now().Sub(beg).Seconds())
return
}