This is a very quick example of how to use postprocessing on the HTML like world 3D objects. As a follow up to the first article here
The effect is a water like trail/ripple onto a screen. And as we now have HTML like structure to our 3D objects in terms of widths, heights and positions, we can now render this to a render target and use postprocessing on it.
Below is a quick video of it:
As you can see it looks pretty good, but the down side is when you resize or rotate you will get quite bad artefacts. Yet to be ironed out. But you have an example of this to work off and change effects etc.
Why Replicate layout of HTML in THREE?
The aim is to mimic the HTML in a THREE setting using calculated positions, widths and heights. This way when postprocessing renders the canvas to the screen we can apply affects to effectively the HTML via the display of THREE components.
If we didnt do this we are limitted drastically as to how we apply postprocessing effects to the rendered threejs scene.
Side note on details
As there is a live example of this, Im only going to briefly describe the process rather than go into detail and copy and paste the code snippets.
Effect Composer Setup
The effect composer is built to accomodate a feedback loop between renders, i.e. the render result is fed into the start of the effect/renderloop. And then the maths in the shaders propergates the effect.
I am not the creator of the ripple effect so please give credit where its due “A simple water effect by Tom@2016 (shadertoy) - based on PolyCube version: http://polycu.be/edit/?h=W2L7zN”
I have modified it to work with the postprocessing library and created a custom effect.
So how do we do the feedback loop, see below for a code dump:
1import { useRef, forwardRef, useState, useMemo, useEffect } from "react";2import { useFrame, useThree, createPortal } from "@react-three/fiber";3import { OrthographicCamera, useFBO, Stats } from "@react-three/drei";4import { EffectComposer } from "@react-three/postprocessing";5import * as THREE from "three";67import { RippleEffect } from "./Ripple";8import OffScreenScene from "./OffScreenScene";910const cameraZ = 1;11let lastMousePosition = new THREE.Vector2();12const vec3 = new THREE.Vector3();13const zoomCalcBox3 = new THREE.Box3();14const mouseCenter = new THREE.Vector3(0.5, 0.5, 1.1);15const res = new THREE.Vector2();1617function calculateOrthographicZoom(camera, object, distance) {18 const boundingBox = zoomCalcBox3.setFromObject(object);19 const objectSize = boundingBox.getSize(vec3);2021 const screenWidth = camera.right - camera.left;22 const screenHeight = camera.top - camera.bottom;2324 const zoomX = screenWidth / objectSize.x;25 const zoomY = screenHeight / objectSize.y;2627 // Choose the larger of the two zoom values to ensure the object fills the screen28 return Math.max(zoomX, zoomY);29}3031function calculateDistanceToCamera(camera, object) {32 const cameraPosition = vec3;33 const objectPosition = vec3;3435 camera.getWorldPosition(cameraPosition);36 object.getWorldPosition(objectPosition);3738 const distance = cameraPosition.distanceTo(objectPosition);3940 return distance;41}4243export default forwardRef(function Plane({ ...props }, crocVectors) {44 const { size, gl, set, scene } = useThree();45 const offScreen = useRef();46 const composerRef = useRef(null);47 const rippleShaderPassRef = useRef(null);4849 // const planeRef = useRef()5051 const [orthoZoom, setOrthoZoom] = useState(1);5253 const offScreenFBOTexture = useFBO(size.width, size.height);54 const onScreenFBOTexture = useFBO(size.width, size.height);5556 const [offScreenScene] = useState(() => new THREE.Scene());57 const offScreenCameraRef = useRef(null);5859 let textureA = offScreenFBOTexture;60 let textureB = onScreenFBOTexture;61 res.x = size.width;62 res.y = size.height;6364 useFrame((state) => {65 const { gl, clock, pointer } = state;66 gl.setRenderTarget(textureB);67 gl.render(offScreenScene, offScreenCameraRef.current);6869 //Swap textureA and B70 var t = textureA;71 textureA = textureB;72 textureB = t;7374 rippleShaderPassRef.current.uniforms.get("bufferTexture").value =75 textureB.texture;76 offScreen.current.material.uniforms.bufferTexture.value = textureA.texture;7778 if (rippleShaderPassRef.current) {79 console.log({ test: Math.round(clock.elapsedTime) % 4 });80 if (Math.round(clock.elapsedTime / 0.1) % 24.0 === 0) {81 offScreen.current.material.uniforms.mouse.value.x = 0.5;82 offScreen.current.material.uniforms.mouse.value.y = 0.5;83 offScreen.current.material.uniforms.mouse.value.z = 1.01;84 rippleShaderPassRef.current.uniforms.get("mouse").value.x = 0.5;85 rippleShaderPassRef.current.uniforms.get("mouse").value.y = 0.5;86 rippleShaderPassRef.current.uniforms.get("mouse").value.z = 1.01;87 } else {88 offScreen.current.material.uniforms.mouse.value.x = 0.5;89 offScreen.current.material.uniforms.mouse.value.y = 0.5;90 offScreen.current.material.uniforms.mouse.value.z = 0.0;91 rippleShaderPassRef.current.uniforms.get("mouse").value.x = 0.5;92 rippleShaderPassRef.current.uniforms.get("mouse").value.y = 0.5;93 rippleShaderPassRef.current.uniforms.get("mouse").value.z = 1.01;94 // rippleShaderPassRef.current.uniforms.get('mouse').value.x = 0.595 // rippleShaderPassRef.current.uniforms.get('mouse').value.y = 0.596 // rippleShaderPassRef.current.uniforms.get('mouse').value.z = 0.097 }98 }99100 offScreen.current.material.uniforms.time.value = clock.elapsedTime;101 offScreen.current.material.uniforms.res.value = res;102103 gl.setRenderTarget(null);104 composerRef.current.render();105 });106107 useEffect(() => {108 const zoom = calculateOrthographicZoom(109 offScreenCameraRef.current,110 offScreen.current,111 calculateDistanceToCamera(offScreenCameraRef.current, offScreen.current)112 );113114 setOrthoZoom(zoom);115116 const camera = new THREE.PerspectiveCamera(117 50,118 window.innerWidth / window.innerHeight,119 0.1,120 1000121 );122 camera.position.set(0, 0, cameraZ);123 set({ camera: camera });124 }, [set]);125126 return (127 <>128 <Stats />129 <EffectComposer ref={composerRef} multisampling={8} depthBuffer={true}>130 <RippleEffect131 ref={rippleShaderPassRef}132 bufferTexture={onScreenFBOTexture.texture}133 res={new THREE.Vector2(size.width, size.height)}134 mouse={mouseCenter}135 />136 </EffectComposer>137 {createPortal(138 <>139 <OffScreenScene140 ref={offScreen}141 bufferTexture={offScreenFBOTexture.texture}142 />143144 <OrthographicCamera145 makeDefault146 position={[0, 0, 2]}147 args={[-1, 1, 1, -1, 1, 10]}148 aspect={size.width / size.height}149 ref={offScreenCameraRef}150 zoom={orthoZoom}151 />152 </>,153 offScreenScene154 )}155 </>156 );157});
What we visually see is in the onScreen EffectComposer:
1<EffectComposer ref={composerRef} multisampling={8} depthBuffer={true}>2 <RippleEffect3 ref={rippleShaderPassRef}4 bufferTexture={onScreenFBOTexture.texture}5 res={new THREE.Vector2(size.width, size.height)}6 mouse={mouseCenter}7 />8</EffectComposer>
Pretty basic here and dont forget the EffectComposer is from the postprocessing library.
And the propagation of the ripple occurs in this part, the offscreen scene:
1{createPortal(2 <>3 <OffScreenScene4 ref={offScreen}5 bufferTexture={offScreenFBOTexture.texture}6 />78 <OrthographicCamera9 makeDefault10 position={[0, 0, 2]}11 args={[-1, 1, 1, -1, 1, 10]}12 aspect={size.width / size.height}13 ref={offScreenCameraRef}14 zoom={orthoZoom}15 />16 </>,17 offScreenScene18)}
Image createPortal allows you to insert another scene into your current scene, with its own camera and scene. This will allow you to grab the ref in this case (offScreenScene) and then render it in the renderLoop. voilla a offscreen render, independent of the main scene. Perfect for feedbacks and multiple buffer shaders from shaderToy.
Vertex Shader
The vertex shader is just a bog standard one with the matricies and the position attribute.
Fragment Shader
There are two fragment shaders, the onscreen and offscreen one.
onScreen:
This is where we add the color and the ripple like effect:
1uniform vec2 res;2uniform sampler2D bufferTexture;3uniform sampler2D colorTexture;4uniform sampler2D crocTexture;5uniform vec3 mouse;67// A simple water effect by Tom@20168//9// based on PolyCube version:10// http://polycu.be/edit/?h=W2L7zN11//12// As people give me too much credit for this one,13// it's based on: http://freespace.virgin.net/hugo.elias/graphics/x_water.htm14// A very old Hugo Elias water tutorial :)15//16// Note:17// I could use one buffer only as in https://www.shadertoy.com/view/4sd3WB18// with a clever trick to utilize two channels19// and keep buffer A in x/r and buffer B in y/g.20// However, now I render every second simulation step,21// so the animation is more dynamic.22//23// Here is 1-buffer version for comparison:24// https://www.shadertoy.com/view/4dK3Ww25//2627const float kernel = 10.0f;28const float weight = 1.0f;2930void mainImage(const in vec4 inputColor, const in vec2 uv, out vec4 outputColor) {3132 vec2 center = mouse.xy;3334 //creates a shorthand for sin(iTime)35 float sSin = 2.1f;3637 //changes the vUv to zoom in on the screen38 vec2 q = uv;3940 vec3 e = vec3(vec2(1.f) / res.xy, 0.f);41 float p10 = texture(bufferTexture, q - e.zy).x;4243 float p01 = texture(bufferTexture, q - e.xz).x;4445 float p21 = texture(bufferTexture, q + e.xz).x;4647 float p12 = texture(bufferTexture, q + e.zy).x;4849 // Totally fake displacement and shading:50 vec3 grad = normalize(vec3(p21 - p01, p12 - p10, .1f)) * 1.25f;5152 vec4 c = texture(inputBuffer, uv + grad.xy * 0.01f);53 vec3 light = normalize(vec3(24.0f, 2.5f, 10.0f));54 float diffuse = 16.0f - dot(grad, light);55 float spec = pow(max(0.f, reflect(light, grad).z), 10.f);5657 vec4 backgroundColor = texture(inputBuffer, uv);5859 outputColor = vec4(mix(c.rgb * max(diffuse, 1.f) + spec * uv.x, (vec3(0.0f, 0.0f, 0.0f)), 0.3f) * 0.1, 1.0f);60}
offScreen:
And this is where the propagation happens:
1export const offScreenFragmentShader = `2uniform sampler2D bufferTexture;3uniform vec3 mouse;4uniform float time;5uniform vec2 res;67varying vec2 vUv;89// Make this a smaller number for a smaller timestep.10// Don't make it bigger than 1.4 or the universe will explode.11const float delta = 1.8f;1213void main() {14 vec3 e = vec3(vec2(1.f) / res.xy / 2.0f, 0.f);15 vec2 q = vUv;1617 vec4 c = texture(bufferTexture, q);1819 float p11 = c.y;2021 float p10 = texture(bufferTexture, q - e.zy).x;22 float p01 = texture(bufferTexture, q - e.xz).x;23 float p21 = texture(bufferTexture, q + e.xz).x;24 float p12 = texture(bufferTexture, q + e.zy).x;2526 float d = 0.f;2728 if(mouse.z > 1.f) {29 // Mouse interaction:30 d = smoothstep(4.0f, 0.0f, length(mouse.xy * res.xy - vUv.xy * res.xy));31 }3233 // The actual propagation:34 d += -(p11 - .5f) * 2.f + (p10 + p01 + p21 + p12 - 2.f);35 d *= .995f; // dampening3637 d *= float(time >= 2.0f); // clear the buffer at iFrame < 238 d = d * .5f + .5f;3940 gl_FragColor = vec4(d, c.x, 0, 0);41}42`;
Quick run down, we store the current and previous ripple values in the x/y channels of the output, this way we can propagate the ripple or wave like effect. And as we have a feedback between the inputs and outputs it can happen perpetually. This is where this comes into practice:
1d *= .995f; // dampening
which disipates the ripples over time we only pass 0.05% of the ripple onto the next frame.
Postprocessing Effect
This is the custom effect which is basically a clone of the @react-three/postprocessing library.
1/* eslint-disable react/display-name */2import React, { forwardRef, useMemo } from "react";3import { Uniform } from "three";4import { Effect } from "postprocessing";5import { onScreenFragmentShader } from "./shaders/onScreenFragmentShader";67let _bufferTexture;8let _res;9let _mouse;10let _colorTexture;11let _crocTexture;1213// Effect implementation14class MyCustomEffectImpl extends Effect {15 constructor({16 bufferTexture = null,17 res = null,18 mouse = null,19 colorTexture = null,20 crocTexture = null,21 } = {}) {22 super("RippleEffect", onScreenFragmentShader, {23 uniforms: new Map([24 ["bufferTexture", new Uniform(bufferTexture)],25 ["res", new Uniform(res)],26 ["mouse", new Uniform(mouse)],27 ["colorTexture", new Uniform(colorTexture)],28 ["crocTexture", new Uniform(crocTexture)],29 ]),30 // blendFunction: BlendFunction.NORMAL31 });3233 _bufferTexture = bufferTexture;34 _res = res;35 _mouse = mouse;36 _colorTexture = colorTexture;37 _crocTexture = crocTexture;38 }3940 update(renderer, inputBuffer, deltaTime) {41 this.uniforms.get("bufferTexture").value = _bufferTexture;42 this.uniforms.get("res").value = _res;43 this.uniforms.get("mouse").value = _mouse;44 this.uniforms.get("colorTexture").value = _colorTexture;45 this.uniforms.get("crocTexture").value = _crocTexture;46 }47}48// Effect component49const RippleEffect = forwardRef(50 ({ bufferTexture, res, mouse, colorTexture, crocTexture }, ref) => {51 const effect = useMemo(() => {52 return new MyCustomEffectImpl({53 bufferTexture,54 res,55 mouse,56 colorTexture,57 crocTexture,58 });59 }, [bufferTexture, res, mouse, colorTexture, crocTexture]);60 return <primitive ref={ref} object={effect} />;61 }62);6364export { RippleEffect };
Go explore the sandbox and play around with the code abit.
We have a basic setup now for doing complex postprocessing ontop of html like structured threejs objects. And off / on screen renders.