function ImageWrapper(size, data, ArrayType, initialize) { if (!data) { if (ArrayType) { this.data = new ArrayType(size.x * size.y); if (ArrayType === Array && initialize) { _array_helper2.default.init(this.data, 0); } } else { this.data = new Uint8Array(size.x * size.y); if (Uint8Array === Array && initialize) { _array_helper2.default.init(this.data, 0); } } } else { this.data = data; } this.size = size; }
n/a
function ImageWrapper(size, data, ArrayType, initialize) { if (!data) { if (ArrayType) { this.data = new ArrayType(size.x * size.y); if (ArrayType === Array && initialize) { _array_helper2.default.init(this.data, 0); } } else { this.data = new Uint8Array(size.x * size.y); if (Uint8Array === Array && initialize) { _array_helper2.default.init(this.data, 0); } } } else { this.data = data; } this.size = size; }
...
}
var imageWrapper;
self.onmessage = function (e) {
if (e.data.cmd === 'init') {
var config = e.data.config;
config.numOfWorkers = 0;
imageWrapper = new Quagga.ImageWrapper({
x: e.data.size.x,
y: e.data.size.y
}, new Uint8Array(e.data.imageData));
Quagga.init(config, ready, imageWrapper);
Quagga.onProcessed(onProcessed);
} else if (e.data.cmd === 'process') {
imageWrapper.data = new Uint8Array(e.data.imageData);
...
function decodeSingle(config, resultCallback) { var _this = this; config = (0, _merge3.default)({ inputStream: { type: "ImageStream", sequence: false, size: 800, src: config.src }, numOfWorkers: false ? 0 : 1, locator: { halfSample: false } }, config); this.init(config, function () { _events2.default.once("processed", function (result) { _this.stop(); resultCallback.call(null, result); }, true); _start(); }); }
...
### Quagga.onDetected(callback)
Registers a `callback(data)` function which is triggered whenever a barcode-
pattern has been located and decoded successfully. The passed `data` object
contains information about the decoding process including the detected code
which can be obtained by calling `data.codeResult.code`.
### Quagga.decodeSingle(config, callback)
In contrast to the calls described above, this method does not rely on
`getUserMedia` and operates on a single image instead. The provided callback
is the same as in `onDetected` and contains the result `data` object.
### Quagga.offProcessed(handler)
...
function init(config, cb, imageWrapper) { _config = (0, _merge3.default)({}, _config3.default, config); if (imageWrapper) { _onUIThread = false; initializeData(imageWrapper); return cb(); } else { initInputStream(cb); } }
...
also creates a `quagga.js` file in the `lib` folder.
## <a name="api">API</a>
You can check out the [examples][github_examples] to get an idea of how to
use QuaggaJS. Basically the library exposes the following API:
### <a name="quaggainit">Quagga.init(config, callback)</a>
;
This method initializes the library for a given configuration `config` (see
below) and invokes the `callback(err)` when Quagga has finished its
bootstrapping phase. The initialization process also requests for camera
access if real-time detection is configured. In case of an error, the `err`
parameter is set and contains information about the cause. A potential cause
may be the `inputStream.type` is set to `LiveStream`, but the browser does
...
function offDetected(callback) { _events2.default.unsubscribe("detected", callback); }
...
is the same as in `onDetected` and contains the result `data` object.
### Quagga.offProcessed(handler)
In case the `onProcessed` event is no longer relevant, `offProcessed` removes
the given `handler` from the event-queue.
### Quagga.offDetected(handler)
In case the `onDetected` event is no longer relevant, `offDetected` removes
the given `handler` from the event-queue.
## <a name="resultobject">The result object</a>
The callbacks passed into `onProcessed`, `onDetected` and `decodeSingle`
...
function offProcessed(callback) { _events2.default.unsubscribe("processed", callback); }
...
### Quagga.decodeSingle(config, callback)
In contrast to the calls described above, this method does not rely on
`getUserMedia` and operates on a single image instead. The provided callback
is the same as in `onDetected` and contains the result `data` object.
### Quagga.offProcessed(handler)
In case the `onProcessed` event is no longer relevant, `offProcessed` removes
the given `handler` from the event-queue.
### Quagga.offDetected(handler)
In case the `onDetected` event is no longer relevant, `offDetected` removes
...
function onDetected(callback) { _events2.default.subscribe("detected", callback); }
...
### Quagga.onProcessed(callback)
This method registers a `callback(data)` function that is called for each frame
after the processing is done. The `data` object contains detailed information
about the success/failure of the operation. The output varies, depending whether
the detection and/or decoding were successful or not.
### Quagga.onDetected(callback)
Registers a `callback(data)` function which is triggered whenever a barcode-
pattern has been located and decoded successfully. The passed `data` object
contains information about the decoding process including the detected code
which can be obtained by calling `data.codeResult.code`.
### Quagga.decodeSingle(config, callback)
...
function onProcessed(callback) { _events2.default.subscribe("processed", callback); }
...
### Quagga.stop()
If the decoder is currently running, after calling `stop()` the decoder does not
process any more images. Additionally, if a camera-stream was requested upon
initialization, this operation also disconnects the camera.
### Quagga.onProcessed(callback)
This method registers a `callback(data)` function that is called for each frame
after the processing is done. The `data` object contains detailed information
about the success/failure of the operation. The output varies, depending whether
the detection and/or decoding were successful or not.
### Quagga.onDetected(callback)
...
function pause() { _stopped = true; }
...
};
that.setAttribute = function (name, value) {
video.setAttribute(name, value);
};
that.pause = function () {
video.pause();
};
that.play = function () {
video.play();
};
that.setCurrentTime = function (time) {
...
function registerResultCollector(resultCollector) { if (resultCollector && typeof resultCollector.addResult === 'function') { _resultCollector = resultCollector; } }
...
}
});
```
### Using a ``ResultCollector``
After creating a ``ResultCollector`` you have to attach it to Quagga by
calling ``Quagga.registerResultCollector(resultCollector)``.
### Reading results
After a test/recording session, you can now print the collected results which
do not fit into a certain schema. Calling ``getResults`` on the
``resultCollector`` returns an ``Array`` containing objects with:
...
function setReaders(readers) { _setReaders(readers); }
...
}, new Uint8Array(e.data.imageData));
Quagga.init(config, ready, imageWrapper);
Quagga.onProcessed(onProcessed);
} else if (e.data.cmd === 'process') {
imageWrapper.data = new Uint8Array(e.data.imageData);
Quagga.start();
} else if (e.data.cmd === 'setReaders') {
Quagga.setReaders(e.data.readers);
}
};
function onProcessed(result) {
self.postMessage({
'event': 'processed',
imageData: imageWrapper.data,
...
function start() { _start(); }
...
}
}, function(err) {
if (err) {
console.log(err);
return
}
console.log("Initialization finished. Ready to start");
Quagga.start();
});
```
### Quagga.start()
When the library is initialized, the `start()` method starts the video-stream
and begins locating and decoding the images.
...
function stop() { _stopped = true; adjustWorkerPool(0); if (_config.inputStream.type === "LiveStream") { _camera_access2.default.release(); _inputStream.clearEventHandlers(); } }
...
```
### Quagga.start()
When the library is initialized, the `start()` method starts the video-stream
and begins locating and decoding the images.
### Quagga.stop()
If the decoder is currently running, after calling `stop()` the decoder does not
process any more images. Additionally, if a camera-stream was requested upon
initialization, this operation also disconnects the camera.
### Quagga.onProcessed(callback)
...
function enumerateVideoDevices() { return (0, _mediaDevices.enumerateDevices)().then(function (devices) { return devices.filter(function (device) { return device.kind === 'videoinput'; }); }); }
n/a
function getActiveStreamLabel() { if (streamRef) { var tracks = streamRef.getVideoTracks(); if (tracks && tracks.length) { return tracks[0].label; } } }
n/a
function release() { var tracks = streamRef && streamRef.getVideoTracks(); if (tracks && tracks.length) { tracks[0].stop(); } streamRef = null; }
...
start: function start() {
_start();
},
stop: function stop() {
_stopped = true;
adjustWorkerPool(0);
if (_config.inputStream.type === "LiveStream") {
__WEBPACK_IMPORTED_MODULE_7__input_camera_access__["a" /* default */].release
();
_inputStream.clearEventHandlers();
}
},
pause: function pause() {
_stopped = true;
},
onDetected: function onDetected(callback) {
...
function request(video, videoConstraints) { return pickConstraints(videoConstraints).then(initCamera.bind(null, video)); }
...
video = $viewport.querySelector("video");
if (!video) {
video = document.createElement("video");
$viewport.appendChild(video);
}
}
_inputStream = __WEBPACK_IMPORTED_MODULE_11_input_stream__["a" /* default */].createLiveStream(video);
__WEBPACK_IMPORTED_MODULE_7__input_camera_access__["a" /* default */].request
span>(video, _config.inputStream.constraints).then(function () {
_inputStream.trigger("canrecord");
}).catch(function (err) {
return cb(err);
});
}
_inputStream.setAttribute("preload", "auto");
...
function drawImage(imageData, size, ctx) { var canvasData = ctx.getImageData(0, 0, size.x, size.y), data = canvasData.data, imageDataPos = imageData.length, canvasDataPos = data.length, value; if (canvasDataPos / imageDataPos !== 4) { return false; } while (imageDataPos--) { value = imageData[imageDataPos]; data[--canvasDataPos] = 255; data[--canvasDataPos] = value; data[--canvasDataPos] = value; data[--canvasDataPos] = value; } ctx.putImageData(canvasData, 0, 0); return true; }
...
}
}
return queue;
};
function grayArrayFromImage(htmlImage, offsetX, ctx, array) {
ctx.drawImage(htmlImage, offsetX, 0, htmlImage.width, htmlImage.height);
var ctxData = ctx.getImageData(offsetX, 0, htmlImage.width, htmlImage.height).data;
computeGray(ctxData, array);
};
function grayArrayFromContext(ctx, size, offset, array) {
var ctxData = ctx.getImageData(offset.x, offset.y, size.x, size.y).data;
computeGray(ctxData, array);
...
function drawPath(path, def, ctx, style) { ctx.strokeStyle = style.color; ctx.fillStyle = style.color; ctx.lineWidth = style.lineWidth; ctx.beginPath(); ctx.moveTo(path[0][def.x], path[0][def.y]); for (var j = 1; j < path.length; j++) { ctx.lineTo(path[j][def.x], path[j][def.y]); } ctx.closePath(); ctx.stroke(); }
...
function tryDecode(line) {
var result = null,
i,
barcodeLine = __WEBPACK_IMPORTED_MODULE_0__bresenham__["a" /* default */].getBarcodeLine(inputImageWrapper, line[0
], line[1]);
if (true && config.debug.showFrequency) {
__WEBPACK_IMPORTED_MODULE_1__common_image_debug__["a" /* default */].drawPath
span>(line, { x: 'x', y: 'y' }, _canvas.ctx.overlay, { color: 'red', lineWidth: 3 });
__WEBPACK_IMPORTED_MODULE_0__bresenham__["a" /* default */].debug.printFrequency(barcodeLine.line, _canvas.dom.frequency
);
}
__WEBPACK_IMPORTED_MODULE_0__bresenham__["a" /* default */].toBinaryLine(barcodeLine);
if (true && config.debug.showPattern) {
__WEBPACK_IMPORTED_MODULE_0__bresenham__["a" /* default */].debug.printPattern(barcodeLine.line, _canvas.dom.pattern
);
...
function drawRect(pos, size, ctx, style) { ctx.strokeStyle = style.color; ctx.fillStyle = style.color; ctx.lineWidth = 1; ctx.beginPath(); ctx.strokeRect(pos.x, pos.y, size.x, size.y); }
...
// draw all patches which are to be taken into consideration
overAvg = 0;
for (i = 0; i < patches.length; i++) {
patch = patches[i];
overAvg += patch.rad;
if (true && _config.debug.showPatches) {
__WEBPACK_IMPORTED_MODULE_3__common_image_debug__["a" /* default */].drawRect
(patch.pos, _subImageWrapper.size, _canvasContainer.ctx.binary, { color: "red" });
}
}
overAvg /= patches.length;
overAvg = (overAvg * 180 / Math.PI + 90) % 180 - 90;
if (overAvg < 0) {
overAvg += 180;
...
function ImageWrapper(size, data, ArrayType, initialize) { if (!data) { if (ArrayType) { this.data = new ArrayType(size.x * size.y); if (ArrayType === Array && initialize) { _array_helper2.default.init(this.data, 0); } } else { this.data = new Uint8Array(size.x * size.y); if (Uint8Array === Array && initialize) { _array_helper2.default.init(this.data, 0); } } } else { this.data = data; } this.size = size; }
...
}
var imageWrapper;
self.onmessage = function (e) {
if (e.data.cmd === 'init') {
var config = e.data.config;
config.numOfWorkers = 0;
imageWrapper = new Quagga.ImageWrapper({
x: e.data.size.x,
y: e.data.size.y
}, new Uint8Array(e.data.imageData));
Quagga.init(config, ready, imageWrapper);
Quagga.onProcessed(onProcessed);
} else if (e.data.cmd === 'process') {
imageWrapper.data = new Uint8Array(e.data.imageData);
...
clearArray = function (array) { var l = array.length; while (l--) { array[l] = 0; } }
n/a
sample = function (inImg, x, y) { var lx = Math.floor(x); var ly = Math.floor(y); var w = inImg.size.x; var base = ly * inImg.size.x + lx; var a = inImg.data[base + 0]; var b = inImg.data[base + 1]; var c = inImg.data[base + w]; var d = inImg.data[base + w + 1]; var e = a - b; x -= lx; y -= ly; var result = Math.floor(x * (y * (e - c + d) - e) + y * (c - a) + a); return result; }
n/a
convolve = function (kernel) { var x, y, kx, ky, kSize = kernel.length / 2 | 0, accu = 0; for (y = 0; y < this.size.y; y++) { for (x = 0; x < this.size.x; x++) { accu = 0; for (ky = -kSize; ky <= kSize; ky++) { for (kx = -kSize; kx <= kSize; kx++) { accu += kernel[ky + kSize][kx + kSize] * this.getSafe(x + kx, y + ky); } } this.data[y * this.size.x + x] = accu; } } }
n/a
copyTo = function (imageWrapper) { var length = this.data.length, srcData = this.data, dstData = imageWrapper.data; while (length--) { dstData[length] = srcData[length]; } }
n/a
get = function (x, y) { return this.data[y * this.size.x + x]; }
...
canvas.height = this.size.y;
frame = ctx.getImageData(0, 0, canvas.width, canvas.height);
data = frame.data;
current = 0;
for (y = 0; y < this.size.y; y++) {
for (x = 0; x < this.size.x; x++) {
pixel = y * this.size.x + x;
current = this.get(x, y) * scale;
data[pixel * 4 + 0] = current;
data[pixel * 4 + 1] = current;
data[pixel * 4 + 2] = current;
data[pixel * 4 + 3] = 255;
}
}
//frame.data = data;
...
getSafe = function (x, y) { var i; if (!this.indexMapping) { this.indexMapping = { x: [], y: [] }; for (i = 0; i < this.size.x; i++) { this.indexMapping.x[i] = i; this.indexMapping.x[i + this.size.x] = i; } for (i = 0; i < this.size.y; i++) { this.indexMapping.y[i] = i; this.indexMapping.y[i + this.size.y] = i; } } return this.data[this.indexMapping.y[y + this.size.y] * this.size.x + this.indexMapping.x[x + this.size.x]]; }
...
kSize = kernel.length / 2 | 0,
accu = 0;
for (y = 0; y < this.size.y; y++) {
for (x = 0; x < this.size.x; x++) {
accu = 0;
for (ky = -kSize; ky <= kSize; ky++) {
for (kx = -kSize; kx <= kSize; kx++) {
accu += kernel[ky + kSize][kx + kSize] * this.getSafe(x + kx, y +
ky);
}
}
this.data[y * this.size.x + x] = accu;
}
}
};
...
inImageWithBorder = function (imgRef, border) { return imgRef.x >= border && imgRef.y >= border && imgRef.x < this.size.x - border && imgRef.y < this.size.y - border; }
...
line[0].x -= extension.x;
line[1].y += extension.y;
line[1].x += extension.x;
}
// check if inside image
extendLine(ext);
while (ext > 1 && (!inputImageWrapper.inImageWithBorder(line[0], 0
) || !inputImageWrapper.inImageWithBorder(line[1], 0))) {
ext -= Math.ceil(ext / 2);
extendLine(-ext);
}
return line;
}
function getLine(box) {
...
invert = function () { var data = this.data, length = data.length; while (length--) { data[length] = data[length] ? 0 : 1; } }
...
if (true && _config.debug.boxFromPatches.showTransformedBox) {
__WEBPACK_IMPORTED_MODULE_3__common_image_debug__["a" /* default */].drawPath(box, { x: 0, y: 1 }, _canvasContainer
.ctx.binary, { color: '#ff0000', lineWidth: 2 });
}
scale = _config.halfSample ? 2 : 1;
// reverse rotation;
transMat = mat2.invert(transMat, transMat);
for (j = 0; j < 4; j++) {
vec2.transformMat2(box[j], box[j], transMat);
}
if (true && _config.debug.boxFromPatches.showBB) {
__WEBPACK_IMPORTED_MODULE_3__common_image_debug__["a" /* default */].drawPath(box, { x: 0, y: 1 }, _canvasContainer
.ctx.binary, { color: '#ff0000', lineWidth: 2 });
}
...
moments = function (labelcount) { var data = this.data, x, y, height = this.size.y, width = this.size.x, val, ysq, labelsum = [], i, label, mu11, mu02, mu20, x_, y_, tmp, result = [], PI = Math.PI, PI_4 = PI / 4; if (labelcount <= 0) { return result; } for (i = 0; i < labelcount; i++) { labelsum[i] = { m00: 0, m01: 0, m10: 0, m11: 0, m02: 0, m20: 0, theta: 0, rad: 0 }; } for (y = 0; y < height; y++) { ysq = y * y; for (x = 0; x < width; x++) { val = data[y * width + x]; if (val > 0) { label = labelsum[val - 1]; label.m00 += 1; label.m01 += y; label.m10 += x; label.m11 += x * y; label.m02 += ysq; label.m20 += x * x; } } } for (i = 0; i < labelcount; i++) { label = labelsum[i]; if (!isNaN(label.m00) && label.m00 !== 0) { x_ = label.m10 / label.m00; y_ = label.m01 / label.m00; mu11 = label.m11 / label.m00 - x_ * y_; mu02 = label.m02 / label.m00 - y_ * y_; mu20 = label.m20 / label.m00 - x_ * x_; tmp = (mu02 - mu20) / (2 * mu11); tmp = 0.5 * Math.atan(tmp) + (mu11 >= 0 ? PI_4 : -PI_4) + PI; label.theta = (tmp * 180 / PI + 90) % 180 - 90; if (label.theta < 0) { label.theta += 180; } label.rad = tmp > PI ? tmp - PI : tmp; label.vec = vec2.clone([Math.cos(tmp), Math.sin(tmp)]); result.push(label); } } return result; }
...
rasterResult = rasterizer.rasterize(0);
if (true && _config.debug.showLabels) {
_labelImageWrapper.overlay(_canvasContainer.dom.binary, Math.floor(360 / rasterResult.count), { x: x, y: y });
}
// calculate moments from the skeletonized patch
moments = _labelImageWrapper.moments(rasterResult.count);
// extract eligible patches
patchesFound = patchesFound.concat(describePatch(moments, [i, j], x, y));
}
}
if (true && _config.debug.showFoundPatches) {
...
overlay = function (canvas, scale, from) { if (!scale || scale < 0 || scale > 360) { scale = 360; } var hsv = [0, 1, 1]; var rgb = [0, 0, 0]; var whiteRgb = [255, 255, 255]; var blackRgb = [0, 0, 0]; var result = []; var ctx = canvas.getContext('2d'); var frame = ctx.getImageData(from.x, from.y, this.size.x, this.size.y); var data = frame.data; var length = this.data.length; while (length--) { hsv[0] = this.data[length] * scale; result = hsv[0] <= 0 ? whiteRgb : hsv[0] >= 360 ? blackRgb : (0, _cv_utils.hsv2rgb)(hsv, rgb); data[length * 4 + 0] = result[0]; data[length * 4 + 1] = result[1]; data[length * 4 + 2] = result[2]; data[length * 4 + 3] = 255; } ctx.putImageData(frame, from.x, from.y); }
...
// Rasterize, find individual bars
_skelImageWrapper.zeroBorder();
__WEBPACK_IMPORTED_MODULE_2__common_array_helper__["a" /* default */].init(_labelImageWrapper.data, 0);
rasterizer = __WEBPACK_IMPORTED_MODULE_4__rasterizer__["a" /* default */].create(_skelImageWrapper, _labelImageWrapper
);
rasterResult = rasterizer.rasterize(0);
if (true && _config.debug.showLabels) {
_labelImageWrapper.overlay(_canvasContainer.dom.binary, Math.floor(360 / rasterResult
.count), { x: x, y: y });
}
// calculate moments from the skeletonized patch
moments = _labelImageWrapper.moments(rasterResult.count);
// extract eligible patches
patchesFound = patchesFound.concat(describePatch(moments, [i, j], x, y));
...
set = function (x, y, value) { this.data[y * this.size.x + x] = value; return this; }
...
var path = require('path');
var webpack = require('webpack');
module.exports = function(config) {
config.set({
basePath: '',
frameworks: ['source-map-support', 'mocha', 'chai', 'sinon', 'sinon-chai'],
files: [
'test/test-main.js',
{pattern: 'test/spec/**/*.js', included: false}
],
preprocessors: {
...
show = function (canvas, scale) { var ctx, frame, data, current, pixel, x, y; if (!scale) { scale = 1.0; } ctx = canvas.getContext('2d'); canvas.width = this.size.x; canvas.height = this.size.y; frame = ctx.getImageData(0, 0, canvas.width, canvas.height); data = frame.data; current = 0; for (y = 0; y < this.size.y; y++) { for (x = 0; x < this.size.x; x++) { pixel = y * this.size.x + x; current = this.get(x, y) * scale; data[pixel * 4 + 0] = current; data[pixel * 4 + 1] = current; data[pixel * 4 + 2] = current; data[pixel * 4 + 3] = 255; } } //frame.data = data; ctx.putImageData(frame, 0, 0); }
...
/**
* Creates a binary image of the current image
*/
function binarizeImage() {
__webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__common_cv_utils__["f" /* otsuThreshold */])(_currentImageWrapper
, _binaryImageWrapper);
_binaryImageWrapper.zeroBorder();
if (true && _config.debug.showCanvas) {
_binaryImageWrapper.show(_canvasContainer.dom.binary, 255);
}
}
/**
* Iterate over the entire image
* extract patches
*/
...
subImage = function (from, size) { return new _subImage2.default(from, size, this); }
n/a
subImageAsCopy = function (imageWrapper, from) { var sizeY = imageWrapper.size.y, sizeX = imageWrapper.size.x; var x, y; for (x = 0; x < sizeX; x++) { for (y = 0; y < sizeY; y++) { imageWrapper.data[y * sizeX + x] = this.data[(from.y + y) * this.size.x + from.x + x]; } } }
...
result.push(points[i].point);
}
}
return result;
}
function skeletonize(x, y) {
_binaryImageWrapper.subImageAsCopy(_subImageWrapper, __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__common_cv_utils__
["b" /* imageRef */])(x, y));
_skeletonizer.skeletonize();
// Show skeleton if requested
if (true && _config.debug.showSkeleton) {
_skelImageWrapper.overlay(_canvasContainer.dom.binary, 360, __webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__common_cv_utils__
["b" /* imageRef */])(x, y));
}
}
...
zeroBorder = function () { var i, width = this.size.x, height = this.size.y, data = this.data; for (i = 0; i < width; i++) { data[i] = data[(height - 1) * width + i] = 0; } for (i = 1; i < height - 1; i++) { data[i * width] = data[i * width + (width - 1)] = 0; } }
...
}
/**
* Creates a binary image of the current image
*/
function binarizeImage() {
__webpack_require__.i(__WEBPACK_IMPORTED_MODULE_1__common_cv_utils__["f" /* otsuThreshold */])(_currentImageWrapper
, _binaryImageWrapper);
_binaryImageWrapper.zeroBorder();
if (true && _config.debug.showCanvas) {
_binaryImageWrapper.show(_canvasContainer.dom.binary, 255);
}
}
/**
* Iterate over the entire image
...
function create(config) { var canvas = document.createElement("canvas"), ctx = canvas.getContext("2d"), results = [], capacity = config.capacity || 20, capture = config.capture === true; function matchesConstraints(codeResult) { return capacity && codeResult && !contains(codeResult, config.blacklist) && passesFilter(codeResult, config.filter); } return { addResult: function addResult(data, imageSize, codeResult) { var result = {}; if (matchesConstraints(codeResult)) { capacity--; result.codeResult = codeResult; if (capture) { canvas.width = imageSize.x; canvas.height = imageSize.y; _image_debug2.default.drawImage(data, imageSize, ctx); result.frame = canvas.toDataURL(); } results.push(result); } }, getResults: function getResults() { return results; } }; }
...
### Creating a ``ResultCollector``
You can easily create a new ``ResultCollector`` by calling its ``create``
method with a configuration.
```javascript
var resultCollector = Quagga.ResultCollector.create({
capture: true, // keep track of the image producing this result
capacity: 20, // maximum number of results to store
blacklist: [ // list containing codes which should not be recorded
{code: "3574660239843", format: "ean_13"}],
filter: function(codeResult) {
// only store results which match this constraint
// returns true/false
...
createImageStream = function () { var that = {}; var _config = null; var width = 0, height = 0, frameIdx = 0, paused = true, loaded = false, frame = null, baseUrl, ended = false, size, calculatedWidth, calculatedHeight, _eventNames = ['canrecord', 'ended'], _eventHandlers = {}, _topRight = {x: 0, y: 0}, _canvasSize = {x: 0, y: 0}; function loadImages() { loaded = false; GetPixels(baseUrl, function(err, pixels) { if (err) { console.log(err); exit(1); } loaded = true; console.log(pixels.shape); frame = pixels; width = pixels.shape[0]; height = pixels.shape[1]; calculatedWidth = _config.size ? width/height > 1 ? _config.size : Math.floor((width/height) * _config.size) : width ; calculatedHeight = _config.size ? width/height > 1 ? Math.floor((height/width) * _config.size) : _config.size : height ; _canvasSize.x = calculatedWidth; _canvasSize.y = calculatedHeight; setTimeout(function() { publishEvent("canrecord", []); }, 0); }); } function publishEvent(eventName, args) { var j, handlers = _eventHandlers[eventName]; if (handlers && handlers.length > 0) { for ( j = 0; j < handlers.length; j++) { handlers[j].apply(that, args); } } } that.trigger = publishEvent; that.getWidth = function() { return calculatedWidth; }; that.getHeight = function() { return calculatedHeight; }; that.setWidth = function(width) { calculatedWidth = width; }; that.setHeight = function(height) { calculatedHeight = height; }; that.getRealWidth = function() { return width; }; that.getRealHeight = function() { return height; }; that.setInputStream = function(stream) { _config = stream; baseUrl = _config.src; size = 1; loadImages(); }; that.ended = function() { return ended; }; that.setAttribute = function() {}; that.getConfig = function() { return _config; }; that.pause = function() { paused = true; }; that.play = function() { paused = false; }; that.setCurrentTime = function(time) { frameIdx = time; }; that.addEventListener = function(event, f) { if (_eventNames.indexOf(event) !== -1) { if (!_eventHandlers[event]) { _eventHandlers[event] = []; } _eventHandlers[event].push(f); } }; that.setTopRight = function(topRight) { _topRight.x = topRight.x; _topRight.y = topRight.y; }; that.getTopRight = function() { return _topRight; }; that.setCanvasSize = function(size) { _canvasSize.x = size.x; _canvasSize.y = size.y; }; that.getCanvasSize = function() { return _canvasSize; }; that.getFrame = function() { if (!loaded){ return null; } return frame; }; return that; }
...
function initInputStream(cb) {
var video;
if (_config.inputStream.type === "VideoStream") {
video = document.createElement("video");
_inputStream = __WEBPACK_IMPORTED_MODULE_11_input_stream__["a" /* default */].createVideoStream(video);
} else if (_config.inputStream.type === "ImageStream") {
_inputStream = __WEBPACK_IMPORTED_MODULE_11_input_stream__["a" /* default */].
createImageStream();
} else if (_config.inputStream.type === "LiveStream") {
var $viewport = getViewPort();
if ($viewport) {
video = $viewport.querySelector("video");
if (!video) {
video = document.createElement("video");
$viewport.appendChild(video);
...