Mirror: A frag-canvas custom element to apply Shadertoy fragment shaders to a canvas or image/video element
1const VERSION_300 = '#version 300 es';
2
3const VS_SOURCE_100 =
4 'attribute vec2 vPos;\n' +
5 'void main() {\n' +
6 ' gl_Position = vec4(vPos, 0.0, 1.0);\n' +
7 '}';
8const VS_SOURCE_300 =
9 `${VERSION_300}\n` +
10 'in vec4 vPos;\n' +
11 'void main() {\n' +
12 ' gl_Position = vPos;\n' +
13 '}';
14
15const makeDateVector = () => {
16 const DATE = new Date();
17 const year = DATE.getFullYear();
18 const month = DATE.getMonth() + 1;
19 const day = DATE.getDate();
20 const time =
21 DATE.getHours() * 60 * 60 +
22 DATE.getMinutes() * 60 +
23 DATE.getSeconds() +
24 DATE.getMilliseconds() * 0.001;
25 return [year, month, day, time] as const;
26};
27
28const isImageElement = (tex: TexImageSource): tex is HTMLImageElement =>
29 (tex as Element).tagName === 'IMG';
30
31const preprocessShader = (source: string) => {
32 let header = '';
33 let output = source.trim();
34 let isES300 = false;
35 if (output.startsWith(VERSION_300)) {
36 isES300 = true;
37 output = output.slice(VERSION_300.length + 1);
38 header += `${VERSION_300}\n`;
39 }
40
41 if (!/^\s*precision /.test(output)) header += 'precision highp float;\n';
42
43 if (!/main\s*\(/.test(output)) {
44 const ioRe = /\(\s*out\s+vec4\s+(\S+)\s*,\s*in\s+vec2\s+(\S+)\s*\)/g;
45 const io = ioRe.exec(source);
46 output = output.replace(/mainImage\s*\(/, 'main(').replace(ioRe, '()');
47 if (isES300 && io) {
48 header += `out vec4 ${io[1]};\n`;
49 if (io[2] !== 'gl_FragCoord')
50 header += `#define ${io[2]} gl_FragCoord.xy\n`;
51 } else if (io) {
52 if (io[1] !== 'gl_FragColor') header += `#define ${io[1]} gl_FragColor\n`;
53 if (io[2] !== 'gl_FragCoord')
54 header += `#define ${io[2]} gl_FragCoord.xy\n`;
55 }
56 }
57
58 if (isES300 && output.includes('gl_FragColor')) {
59 header += 'out vec4 aFragColor;\n';
60 header += '#define gl_FragColor aFragColor.xy\n';
61 }
62
63 if (output.includes('iChannel0')) header += 'uniform sampler2D iChannel0;\n';
64 if (output.includes('iResolution')) header += 'uniform vec2 iResolution;\n';
65 if (output.includes('iChannelResolution'))
66 header += 'uniform vec3 iChannelResolution[1];\n';
67 if (output.includes('iTime')) header += 'uniform float iTime;\n';
68 if (output.includes('iTimeDelta')) header += 'uniform float iTimeDelta;\n';
69 if (output.includes('iFrame')) header += 'uniform float iFrame;\n';
70 if (output.includes('iChannel')) header += 'uniform float iChannel;\n';
71 if (output.includes('iDate')) header += 'uniform vec4 iDate;\n';
72
73 if (isES300) output = output.replace(/texture2D\s*\(/g, 'texture(');
74
75 return {
76 source: `${header}\n${output}`,
77 isES300,
78 };
79};
80
81interface InitState {
82 width: number;
83 height: number;
84 fragSource: string;
85}
86
87function createState(gl: WebGL2RenderingContext, init: InitState) {
88 const program = gl.createProgram();
89
90 const vertShader300 = gl.createShader(gl.VERTEX_SHADER);
91 const vertShader100 = gl.createShader(gl.VERTEX_SHADER);
92
93 const fragShader = gl.createShader(gl.FRAGMENT_SHADER);
94 if (!vertShader100 || !vertShader300 || !fragShader) {
95 return null;
96 }
97
98 gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
99
100 gl.shaderSource(vertShader100, VS_SOURCE_100);
101 gl.compileShader(vertShader100);
102 gl.shaderSource(vertShader300, VS_SOURCE_300);
103 gl.compileShader(vertShader300);
104
105 const screenVertex = new Float32Array([-1, -1, 1, -1, -1, 1, 1, 1]);
106 const vertexBuffer = gl.createBuffer();
107 gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
108 gl.bufferData(gl.ARRAY_BUFFER, screenVertex, gl.STATIC_DRAW);
109
110 const texture = gl.createTexture();
111 gl.activeTexture(gl['TEXTURE0']);
112 gl.bindTexture(gl.TEXTURE_2D, texture);
113 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
114 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
115 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
116 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
117
118 let width = init.width;
119 let height = init.height;
120
121 let vertexPos: GLint = 0;
122 let iResolution: WebGLUniformLocation | null = null;
123 let iChannelResolution: WebGLUniformLocation | null = null;
124 let iTime: WebGLUniformLocation | null = null;
125 let iTimeDelta: WebGLUniformLocation | null = null;
126 let iFrame: WebGLUniformLocation | null = null;
127 let iChannel: WebGLUniformLocation | null = null;
128 let iDate: WebGLUniformLocation | null = null;
129
130 let frameCount = 0;
131 let prevTimestamp: DOMHighResTimeStamp;
132 let prevSource: string | null;
133
134 const state = {
135 draw(source: TexImageSource, timestamp: DOMHighResTimeStamp) {
136 prevTimestamp = timestamp;
137
138 gl.useProgram(program);
139
140 if (isImageElement(source)) {
141 if (source.complete) {
142 const { currentSrc } = source;
143 if (prevSource !== currentSrc) {
144 prevSource = currentSrc;
145 gl.texImage2D(
146 gl.TEXTURE_2D,
147 0,
148 gl.RGBA,
149 gl.RGBA,
150 gl.UNSIGNED_BYTE,
151 source
152 );
153 }
154 }
155 } else if (source) {
156 prevSource = null;
157 gl.texImage2D(
158 gl.TEXTURE_2D,
159 0,
160 gl.RGBA,
161 gl.RGBA,
162 gl.UNSIGNED_BYTE,
163 source
164 );
165 if (iChannelResolution)
166 gl.uniform3fv(iChannelResolution, [width, height, 0]);
167 } else {
168 prevSource = null;
169 if (iChannelResolution) gl.uniform3fv(iChannelResolution, [0, 0, 0]);
170 }
171
172 if (iResolution) gl.uniform2f(iResolution, width, height);
173 if (iTime) gl.uniform1f(iTime, timestamp / 1000);
174 if (iTimeDelta) gl.uniform1f(iTime, (timestamp - prevTimestamp) / 1000);
175 if (iFrame) gl.uniform1f(iFrame, frameCount++);
176 if (iChannel) gl.uniform1i(iChannel, 0);
177 if (iDate) gl.uniform4f(iDate, ...makeDateVector());
178
179 gl.enableVertexAttribArray(vertexPos);
180 gl.vertexAttribPointer(vertexPos, 2, gl.FLOAT, false, 0, 0);
181 gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
182 },
183
184 updateViewport(newWidth: number, newHeight: number) {
185 gl.canvas.width = width = newWidth;
186 gl.canvas.height = height = newHeight;
187 gl.viewport(0, 0, width, height);
188 },
189
190 updateFragShader(fragSource: string) {
191 const preprocessed = preprocessShader(fragSource);
192 gl.shaderSource(fragShader, preprocessed.source);
193 gl.compileShader(fragShader);
194 const vertShader = preprocessed.isES300 ? vertShader300 : vertShader100;
195 gl.attachShader(program, vertShader);
196 gl.attachShader(program, fragShader);
197
198 gl.linkProgram(program);
199
200 vertexPos = gl.getAttribLocation(program, 'vPos');
201 iResolution = gl.getUniformLocation(program, 'iResolution');
202 iChannelResolution = gl.getUniformLocation(program, 'iChannelResolution');
203 iTime = gl.getUniformLocation(program, 'iTime');
204 iTimeDelta = gl.getUniformLocation(program, 'iTimeDelta');
205 iFrame = gl.getUniformLocation(program, 'iFrame');
206 iChannel = gl.getUniformLocation(program, 'iChannel');
207 iDate = gl.getUniformLocation(program, 'iDate');
208 },
209
210 drawImmediate() {
211 gl.useProgram(program);
212 gl.enableVertexAttribArray(vertexPos);
213 gl.vertexAttribPointer(vertexPos, 2, gl.FLOAT, false, 0, 0);
214 gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
215 },
216 };
217
218 state.updateViewport(width, height);
219 state.updateFragShader(init.fragSource);
220 return state;
221}
222
223class FragCanvas extends HTMLElement implements HTMLCanvasElement {
224 static observedAttributes = [];
225
226 private state: ReturnType<typeof createState> | null = null;
227 private input: HTMLCanvasElement | HTMLImageElement | HTMLVideoElement;
228 private output: HTMLCanvasElement;
229
230 #mutationObserver = new MutationObserver(() => {
231 if (this.state) {
232 this.state.updateFragShader(this.source);
233 }
234 });
235
236 #resizeObserver = new ResizeObserver(entries => {
237 const entry = entries[0];
238 if (this.state && entry) {
239 const width = entry.devicePixelContentBoxSize[0].inlineSize;
240 const height = entry.devicePixelContentBoxSize[0].blockSize;
241 if (this.autoresize) {
242 this.input.width = width;
243 this.input.height = height;
244 }
245 this.state.updateViewport(width, height);
246 this.state.drawImmediate();
247 this.#rescheduleDraw();
248 }
249 });
250
251 constructor() {
252 super();
253
254 const sheet = new CSSStyleSheet();
255 sheet.insertRule(':host([hidden]) { display: none; }');
256 sheet.insertRule(':host { display: block; position: relative; }');
257 sheet.insertRule(
258 ':host * { position: absolute; width: 100%; height: 100%; }'
259 );
260 sheet.insertRule(':host *:not(:last-child) { visibility: hidden; }');
261
262 const shadow = this.attachShadow({ mode: 'closed' });
263 const output = (this.output = document.createElement('canvas'));
264 const input = (this.input =
265 this.querySelector(':not(canvas, script)') ||
266 document.createElement('canvas'));
267
268 shadow.adoptedStyleSheets = [sheet];
269 shadow.appendChild(input);
270 shadow.appendChild(output);
271 }
272
273 getContext(
274 contextId: '2d',
275 options?: CanvasRenderingContext2DSettings
276 ): CanvasRenderingContext2D | null;
277 getContext(
278 contextId: 'bitmaprenderer',
279 options?: ImageBitmapRenderingContextSettings
280 ): ImageBitmapRenderingContext | null;
281 getContext(
282 contextId: 'webgl',
283 options?: WebGLContextAttributes
284 ): WebGLRenderingContext | null;
285 getContext(
286 contextId: 'webgl2',
287 options?: WebGLContextAttributes
288 ): WebGL2RenderingContext | null;
289
290 getContext(contextId: string, options?: any) {
291 if (!(this.input instanceof HTMLCanvasElement)) {
292 return null;
293 }
294 this.input.width = this.width;
295 this.input.height = this.height;
296 return this.input.getContext(contextId, {
297 alpha: true,
298 desynchronized: true,
299 preserveDrawingBuffer: true,
300 ...options,
301 });
302 }
303
304 toBlob(callback: BlobCallback, type?: string, quality?: any): void {
305 return this.output.toBlob(callback, type, quality);
306 }
307
308 toDataURL(type?: string, quality?: any): string {
309 return this.output.toDataURL(type, quality);
310 }
311
312 captureStream(frameRequestRate?: number): MediaStream {
313 return this.output.captureStream(frameRequestRate);
314 }
315
316 transferControlToOffscreen(): OffscreenCanvas {
317 return (
318 this.input instanceof HTMLCanvasElement ? this.input : this.output
319 ).transferControlToOffscreen();
320 }
321
322 get autoresize() {
323 return this.hasAttribute('autoresize');
324 }
325
326 set autoresize(autoresize: boolean) {
327 if (autoresize) {
328 this.setAttribute('autoresize', '');
329 } else {
330 this.removeAttribute('autoresize');
331 }
332 }
333
334 get source() {
335 let text = '';
336 for (const child of this.childNodes) {
337 if (child.nodeType === Node.TEXT_NODE) {
338 text += child.textContent || '';
339 } else if (child instanceof HTMLScriptElement) {
340 text = child.textContent || '';
341 break;
342 }
343 }
344 return text.trim();
345 }
346
347 get width() {
348 if (this.state) {
349 return this.output.width;
350 } else {
351 return this.clientWidth * devicePixelRatio;
352 }
353 }
354
355 set width(width) {
356 this.input.width = width;
357 }
358
359 get height() {
360 if (this.state) {
361 return this.output.height;
362 } else {
363 return this.clientHeight * devicePixelRatio;
364 }
365 }
366
367 set height(height) {
368 this.input.height = height;
369 }
370
371 #frameID: number | undefined;
372 #rescheduleDraw() {
373 const self = this;
374 if (this.#frameID !== undefined) {
375 cancelAnimationFrame(this.#frameID);
376 this.#frameID = undefined;
377 }
378 this.#frameID = requestAnimationFrame(function draw(
379 timestamp: DOMHighResTimeStamp
380 ) {
381 if (self.state) {
382 self.state.draw(self.input, timestamp);
383 self.#frameID = requestAnimationFrame(draw);
384 }
385 });
386 }
387
388 connectedCallback() {
389 const gl = this.output.getContext('webgl2', {
390 alpha: true,
391 desynchronized: true,
392 preserveDrawingBuffer: true,
393 });
394
395 const init = {
396 fragSource: this.source,
397 width: this.clientWidth * devicePixelRatio,
398 height: this.clientHeight * devicePixelRatio,
399 };
400
401 const state = (this.state = gl && createState(gl, init));
402 if (state) {
403 this.#mutationObserver.observe(this, {
404 subtree: true,
405 characterData: true,
406 });
407 this.#resizeObserver.observe(this, { box: 'device-pixel-content-box' });
408 this.#rescheduleDraw();
409 }
410 }
411
412 disconnectedCallback() {
413 this.#mutationObserver.disconnect();
414 this.#resizeObserver.disconnect();
415 if (this.#frameID !== undefined) {
416 cancelAnimationFrame(this.#frameID);
417 this.#frameID = undefined;
418 }
419 }
420}
421
422customElements.define('frag-canvas', FragCanvas);
423export { FragCanvas };