wasm-camera-qr
v0.1.5
Published
A high-performance QR code scanner for camera streams powered by WebAssembly.
Readme
import init, { CameraProcessor } from "wasm-camera-qr";
export class QrScanner {
constructor(canvasId) {
// Display (High Res, 60fps)
this.displayCanvas = document.getElementById(canvasId);
this.displayCtx = this.displayCanvas.getContext('2d', { alpha: false });
// Analysis (Medium Res, 10fps)
this.analysisCanvas = document.createElement('canvas');
this.analysisCtx = this.analysisCanvas.getContext('2d', { willReadFrequently: true });
this.video = document.createElement('video');
this.video.setAttribute("playsinline", "true");
this.video.setAttribute("webkit-playsinline", "true");
this.video.setAttribute("muted", "true");
this.video.autoplay = true;
this.processor = null;
this.memory = null;
this.stream = null;
this.isRunning = false;
// Loop State
this.lastScanTime = 0;
this.scanInterval = 100; // 100ms = 10 scans/sec (Balanced)
this.lastResult = null;
this.lastResultTime = 0;
this.onResult = null;
}
async init() {
if (!this.processor) {
const wasm = await init();
this.memory = wasm.memory;
this.processor = CameraProcessor.new(640, 480);
}
}
async start(deviceId = null) {
if (this.isRunning) this.stop();
const constraints = {
audio: false,
video: {
deviceId: deviceId ? { exact: deviceId } : undefined,
facingMode: deviceId ? undefined : "environment",
// Request 1080p. Browser will downscale if not supported.
width: { ideal: 1920 },
height: { ideal: 1080 }
}
};
try {
this.stream = await navigator.mediaDevices.getUserMedia(constraints);
this.video.srcObject = this.stream;
this.applyFocusMode();
// Wait for video metadata
await new Promise((resolve) => {
if (this.video.readyState >= 1) resolve();
else this.video.onloadedmetadata = () => resolve();
});
await this.video.play();
this.setupResolution();
this.isRunning = true;
this.loop();
return true;
} catch (err) {
console.error("Camera Start Error:", err);
if (!deviceId) return this.startBasic();
throw err;
}
}
async startBasic() {
try {
this.stream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: "environment" } });
this.video.srcObject = this.stream;
await this.video.play();
this.setupResolution();
this.isRunning = true;
this.loop();
} catch(e) { console.error("Basic start failed", e); }
}
setupResolution() {
const vW = this.video.videoWidth;
const vH = this.video.videoHeight;
if (vW === 0 || vH === 0) return;
// 1. Display: Native Video Resolution
this.displayCanvas.width = vW;
this.displayCanvas.height = vH;
// 2. Analysis: Cap at 1280px width (Increased from 640px)
// This is critical for detecting small codes on paper.
const MAX_ANALYSIS_WIDTH = 1280;
const scale = Math.min(1, MAX_ANALYSIS_WIDTH / vW);
this.analysisCanvas.width = vW * scale;
this.analysisCanvas.height = vH * scale;
this.processor.resize(this.analysisCanvas.width, this.analysisCanvas.height);
}
async applyFocusMode() {
if (!this.stream) return;
const track = this.stream.getVideoTracks()[0];
const caps = track.getCapabilities();
if (caps.focusMode && caps.focusMode.includes('continuous')) {
try { await track.applyConstraints({ advanced: [{ focusMode: 'continuous' }] }); }
catch (e) { /* ignore */ }
}
}
stop() {
this.isRunning = false;
if (this.stream) {
this.stream.getTracks().forEach(t => t.stop());
this.stream = null;
}
this.video.pause();
this.video.srcObject = null;
}
toggleFlashlight(active) {
if (!this.stream) return;
const track = this.stream.getVideoTracks()[0];
track.applyConstraints({ advanced: [{ torch: active }] }).catch(() => {});
}
loop() {
if (!this.isRunning) return;
requestAnimationFrame(() => this.loop());
if (this.video.videoWidth === 0) return;
// 1. Fluid Render (60 FPS)
this.displayCtx.drawImage(this.video, 0, 0, this.displayCanvas.width, this.displayCanvas.height);
// Draw persistent result overlay
if (this.lastResult && (performance.now() - this.lastResultTime < 500)) {
this.drawBounds(this.lastResult);
}
// 2. Throttled Analysis (10 FPS)
const now = performance.now();
if (now - this.lastScanTime > this.scanInterval) {
this.lastScanTime = now;
this.scanFrame();
}
}
scanFrame() {
const w = this.analysisCanvas.width;
const h = this.analysisCanvas.height;
// Downscale video to analysis canvas
this.analysisCtx.drawImage(this.video, 0, 0, w, h);
const imageData = this.analysisCtx.getImageData(0, 0, w, h);
const ptr = this.processor.get_buffer_ptr();
const wasmSlice = new Uint8Array(this.memory.buffer, ptr, w * h * 4);
wasmSlice.set(imageData.data);
const resultJson = this.processor.scan_stream();
const result = JSON.parse(resultJson);
if (result.found) {
// Coordinate Mapping: Scale up from Analysis -> Display
const scaleX = this.displayCanvas.width / this.analysisCanvas.width;
const scaleY = this.displayCanvas.height / this.analysisCanvas.height;
const scaledCorners = result.corners.map(p => ({
x: p.x * scaleX,
y: p.y * scaleY
}));
this.lastResult = scaledCorners;
this.lastResultTime = performance.now();
if (this.onResult && result.content) {
this.onResult(result.content);
}
}
}
drawBounds(points) {
if (!points || points.length < 4) return;
this.displayCtx.strokeStyle = "#FFFFFF";
this.displayCtx.lineWidth = 5;
this.displayCtx.lineJoin = "round";
this.displayCtx.beginPath();
this.displayCtx.moveTo(points[0].x, points[0].y);
for (let i = 1; i < points.length; i++) {
this.displayCtx.lineTo(points[i].x, points[i].y);
}
this.displayCtx.closePath();
this.displayCtx.stroke();
}
}