Web scraping is an ongoing battle. One moment a bot runs smoothly, and the next a new anti-bot system silently blocks its requests. WebGL fingerprinting is one of the strongest defenses in that toolkit, because it goes beyond the browser and identifies the underlying hardware. However, WebGL fingerprinting isn’t just a “browser check.” It’s a hardware-level signature. And that makes it tough to fake. But it’s not impossible. Below, you’ll learn what WebGL fingerprinting is, how it works, and four proven ways to reduce your risk while scraping.

WebGL is the web's graphics engine, allowing browsers to display 2D and 3D visuals without plugins. The important point is that WebGL relies on the GPU.
Each GPU renders images in its own way, so even two GPUs from the same manufacturer can produce slightly different results. Factors like drivers, motherboard design, and browser implementation influence the output, and that is what WebGL fingerprinting uses.
It is similar to seeing the same ad on multiple TVs in a store but noticing subtle differences in color and clarity. WebGL fingerprinting looks for these tiny, hardware-driven inconsistencies. It only works when JavaScript is enabled, so a static HTML page cannot generate a WebGL fingerprint.
A website typically creates a hidden , initializes WebGL, reads GPU details with gl.getParameter(), renders a test image, and grabs pixel data via gl.readPixels(). It then hashes the results into a fingerprint ID. You can see this live in BrowserLeaks' WebGL report to see how unique your fingerprint is.
Below is a complete example of a WebGL fingerprinting script. You can use it for testing, diagnostics, or debugging. Only run it on devices you own or when you have permission. Don't use it to track or deanonymize users. Treat results as sensitive data.
// Lightweight FNV-1a hash for strings/byte arrays (synchronous, simple)
function fnv1aHashFromBytes(bytes) {
let h = 0x811c9dc5 >>> 0;
for (let i = 0; i < bytes.length; i++) {
h ^= bytes[i];
h = (h + ((h << 1) + (h << 4) + (h << 7) + (h << 8) + (h << 24))) >>> 0;
}
return ('00000000' + (h >>> 0).toString(16)).slice(-8);
}
function fnv1aHashFromString(s) {
const bytes = new Uint8Array(s.length);
for (let i = 0; i < s.length; i++) bytes[i] = s.charCodeAt(i) & 0xff;
return fnv1aHashFromBytes(bytes);
}
// Create an (optionally hidden) canvas and return WebGLRenderingContext / WebGL2RenderingContext
function createWebGLContext(opts = {webgl2Preferred: true, preserveDrawingBuffer: false}) {
const canvas = document.createElement('canvas');
canvas.width = 256; canvas.height = 256;
canvas.style.display = 'none';
document.body && document.body.appendChild(canvas);
let gl = null;
if (opts.webgl2Preferred) {
try { gl = canvas.getContext('webgl2', {preserveDrawingBuffer: opts.preserveDrawingBuffer}); } catch (e) {}
}
if (!gl) {
try { gl = canvas.getContext('webgl', {preserveDrawingBuffer: opts.preserveDrawingBuffer}) || canvas.getContext('experimental-webgl'); } catch (e) {}
}
return {gl, canvas};
}
This creates an invisible canvas and attempts WebGL2 first. If WebGL2 isn't supported, it falls back to WebGL1.
function getWebGLHardwareInfo(gl) {
const info = {};
info.maxTextureSize = gl.getParameter(gl.MAX_TEXTURE_SIZE);
info.maxCombinedTextureImageUnits = gl.getParameter(gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS);
info.maxVertexAttribs = gl.getParameter(gl.MAX_VERTEX_ATTRIBS);
info.maxVertexUniformVectors = gl.getParameter(gl.MAX_VERTEX_UNIFORM_VECTORS);
info.maxFragmentUniformVectors = gl.getParameter(gl.MAX_FRAGMENT_UNIFORM_VECTORS);
info.shaderPrecision = {
vertexHighFloat: gl.getShaderPrecisionFormat(gl.VERTEX_SHADER, gl.HIGH_FLOAT),
fragmentHighFloat: gl.getShaderPrecisionFormat(gl.FRAGMENT_SHADER, gl.HIGH_FLOAT)
};
const dbg = gl.getExtension('WEBGL_debug_renderer_info');
if (dbg) {
info.unmaskedVendor = gl.getParameter(dbg.UNMASKED_VENDOR_WEBGL);
info.unmaskedRenderer = gl.getParameter(dbg.UNMASKED_RENDERER_WEBGL);
} else {
info.unmaskedVendor = null;
info.unmaskedRenderer = null;
}
info.extensions = gl.getSupportedExtensions() || [];
const extAniso = gl.getExtension('EXT_texture_filter_anisotropic') ||
gl.getExtension('MOZ_EXT_texture_filter_anisotropic') ||
gl.getExtension('WEBKIT_EXT_texture_filter_anisotropic');
if (extAniso) {
info.maxAnisotropy = gl.getParameter(extAniso.MAX_TEXTURE_MAX_ANISOTROPY_EXT);
}
return info;
}
This captures GPU limits, shader precision, extensions, and vendor info.
function probeShaderPrecisions(gl) {
const precisions = {};
const shaderTypes = [
{type: gl.VERTEX_SHADER, name: 'vertex'},
{type: gl.FRAGMENT_SHADER, name: 'fragment'}
];
const levels = [{p: gl.LOW_FLOAT, name: 'low'}, {p: gl.MEDIUM_FLOAT, name: 'medium'}, {p: gl.HIGH_FLOAT, name: 'high'}];
for (const sh of shaderTypes) {
precisions[sh.name] = {};
for (const lv of levels) {
const pf = gl.getShaderPrecisionFormat(sh.type, lv.p);
precisions[sh.name][lv.name] = {rangeMin: pf.rangeMin, rangeMax: pf.rangeMax, precision: pf.precision};
}
}
return precisions;
}
WebGL fingerprinting reads these precision ranges because they vary by GPU and driver.
const VERT = \`#version 300 es
in vec2 a_pos;
out vec2 v_uv;
void main() {
v_uv = a_pos * 0.5 + 0.5;
gl_Position = vec4(a_pos, 0.0, 1.0);
}\`;
const FRAG = \`#version 300 es
precision highp float;
in vec2 v_uv;
out vec4 outColor;
void main() {
float r = sin(v_uv.x * 123.456) * 0.5 + 0.5;
float g = cos(v_uv.y * 78.9) * 0.5 + 0.5;
float b = fract((v_uv.x + v_uv.y) * 9876.54321);
outColor = vec4(r, g, b, 1.0);
}\`;
function createProgramWebGL2(gl, vertSrc, fragSrc) {
function compile(src, type) {
const s = gl.createShader(type);
gl.shaderSource(s, src);
gl.compileShader(s);
if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) {
const err = gl.getShaderInfoLog(s);
gl.deleteShader(s);
throw new Error('Shader compile error: ' + err);
}
return s;
}
const vs = compile(vertSrc, gl.VERTEX_SHADER);
const fs = compile(fragSrc, gl.FRAGMENT_SHADER);
const program = gl.createProgram();
gl.attachShader(program, vs);
gl.attachShader(program, fs);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
const err = gl.getProgramInfoLog(program);
throw new Error('Program link error: ' + err);
}
gl.deleteShader(vs);
gl.deleteShader(fs);
return program;
}
function renderTestAndHash(gl, canvas) {
const isWebGL2 = typeof WebGL2RenderingContext !== 'undefined' && gl instanceof WebGL2RenderingContext;
if (!isWebGL2) {
console.warn('Example uses WebGL2 for deterministic shader pipeline; fallback may require shader edits.');
}
const prog = createProgramWebGL2(gl, VERT, FRAG);
gl.useProgram(prog);
const posLoc = gl.getAttribLocation(prog, 'a_pos');
const quad = new Float32Array([-1, -1, 1, -1, -1, 1, 1, 1]);
const vb = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vb);
gl.bufferData(gl.ARRAY_BUFFER, quad, gl.STATIC_DRAW);
gl.enableVertexAttribArray(posLoc);
gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0);
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0,0,0,1);
gl.clear(gl.COLOR_BUFFER_BIT);
const t0 = performance.now();
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
gl.finish();
const t1 = performance.now();
const renderTime = t1 - t0;
const px = new Uint8Array(canvas.width * canvas.height * 4);
gl.readPixels(0, 0, canvas.width, canvas.height, gl.RGBA, gl.UNSIGNED_BYTE, px);
const pixelHash = fnv1aHashFromBytes(px);
gl.deleteBuffer(vb);
gl.deleteProgram(prog);
return {pixelHash, renderTime};
}
The script renders a deterministic scene, reads pixels, and hashes them. The result is unique to your hardware.
function collectWebGLFingerprint(opts = {}) {
const {gl, canvas} = createWebGLContext({webgl2Preferred: true, preserveDrawingBuffer: true});
if (!gl) throw new Error('No WebGL available');
const hw = getWebGLHardwareInfo(gl);
const precisions = probeShaderPrecisions(gl);
const supportsFloatTextures = !!gl.getExtension('OES_texture_float') || !!gl.getExtension('EXT_color_buffer_float');
const {pixelHash, renderTime} = renderTestAndHash(gl, canvas);
const parts = [
navigator.userAgent || '',
hw.unmaskedVendor || '',
hw.unmaskedRenderer || '',
hw.maxTextureSize,
hw.maxCombinedTextureImageUnits,
hw.maxVertexAttribs,
supportsFloatTextures ? 'floatTex' : '',
hw.extensions ? hw.extensions.join(',') : '',
JSON.stringify(precisions),
pixelHash,
String(Math.round(renderTime))
];
const raw = parts.join('|');
const fingerprint = fnv1aHashFromString(raw);
return {
fingerprint,
raw,
components: {
ua: navigator.userAgent,
hardware: hw,
precisions,
pixelHash,
renderTime
}
};
}
try {
const fp = collectWebGLFingerprint();
console.log('WebGL fingerprint:', fp.fingerprint);
console.log('Components:', fp.components);
} catch (e) {
console.error('Could not collect WebGL fingerprint:', e);
}
This step combines everything into one compact ID. The whole process runs in milliseconds and is invisible to the user.
WebGL fingerprinting is hard to bypass because it uses real hardware data. You can clear cookies, rotate proxies, and change user agents, but the GPU fingerprint stays consistent. That consistency is what makes it reliable for anti-bot systems.
It also makes spoofing risky. If your GPU claims to be Intel but renders like NVIDIA, the mismatch flags you instantly. Many modern detection systems catch spoofing attempts quickly. But there are still practical options.
Tools like Playwright and Selenium can tweak parameters like screen size and user agent. Add stealth plugins and you can alter WebGL-related signals too.
This method works for small to mid-sized projects. It fails when the target uses advanced anti-bot detection. And browser extensions often break rendering consistency.
This method is powerful. You replace the physical GPU with a software renderer or virtual GPU. It can change driver-level outputs and the GPU signature. But it's expensive and complex. Scaling it is even harder. If you're not technically strong, it's not the first tool you should reach for.
If the site offers a public or private API, use it. APIs return structured data and are usually less protected than rendered pages. It's not always available, but when it is, it's the easiest and safest option.
This is the simplest way to remove WebGL signals entirely. It also breaks many modern sites.
Chrome (Flag method)
Type chrome://flags → find "Disable WebGL" → set to Disabled → relaunch.
Chrome (Command line method)
Add --disable-webgl to the Chrome shortcut target.
Firefox
Go to about:config → set webgl.disable to true.
Safari
Disable WebGL via Preferences → Advanced → Experimental Features.
WebGL fingerprinting is a hardware-level defense, making it much harder to bypass than traditional signals. For most projects, the best move is to rely on APIs or reduce dependence on rendered pages. If spoofing becomes necessary, treat it as a last resort and focus on stability and consistency rather than a one-off workaround.