//--------------- ------------------------// pc.extend(pc, function () { // - var RefractionPostEffect = function (graphicsDevice, vs, fs, buffer) { var fragmentShader = "precision " + graphicsDevice.precision + " float;\n"; fragmentShader = fragmentShader + fs; // this.shader = new pc.Shader(graphicsDevice, { attributes: { aPosition: pc.SEMANTIC_POSITION }, vshader: vs, fshader: fs }); this.buffer = buffer; }; // pc.PostEffect RefractionPostEffect = pc.inherits(RefractionPostEffect, pc.PostEffect); RefractionPostEffect.prototype = pc.extend(RefractionPostEffect.prototype, { // render, // , , // render: function (inputTarget, outputTarget, rect) { var device = this.device; var scope = device.scope; // . , scope.resolve("uColorBuffer").setValue(inputTarget.colorBuffer); // . . // pc.drawFullscreenQuad(device, outputTarget, this.vertexBuffer, this.shader, rect); } }); return { RefractionPostEffect: RefractionPostEffect }; }()); //--------------- ------------------------// var Refraction = pc.createScript('refraction'); Refraction.attributes.add('vs', { type: 'asset', assetType: 'shader', title: 'Vertex Shader' }); Refraction.attributes.add('fs', { type: 'asset', assetType: 'shader', title: 'Fragment Shader' }); // initialize Refraction.prototype.initialize = function() { var effect = new pc.RefractionPostEffect(this.app.graphicsDevice, this.vs.resource, this.fs.resource); // postEffects var queue = this.entity.camera.postEffects; queue.addEffect(effect); this.effect = effect; // this.savedVS = this.vs.resource; this.savedFS = this.fs.resource; }; Refraction.prototype.update = function(){ if(this.savedFS != this.fs.resource || this.savedVS != this.vs.resource){ this.swap(this); } }; Refraction.prototype.swap = function(old){ this.entity.camera.postEffects.removeEffect(old.effect); this.initialize(); };
RefractionPostEffect
class, which can be applied to the camera. For rendering, it needs vertex and fragment shaders. Attributes are already configured, so let's create a Refraction.frag with the following contents: precision highp float; uniform sampler2D uColorBuffer; varying vec2 vUv0; void main() { vec4 color = texture2D(uColorBuffer, vUv0); gl_FragColor = color; }
attribute vec2 aPosition; varying vec2 vUv0; void main(void) { gl_Position = vec4(aPosition, 0.0, 1.0); vUv0 = (aPosition.xy + 1.0) * 0.5; }
var RefractionPostEffect = function (graphicsDevice, vs, fs) { var fragmentShader = "precision " + graphicsDevice.precision + " float;\n"; fragmentShader = fragmentShader + fs; // this.shader = new pc.Shader(graphicsDevice, { attributes: { aPosition: pc.SEMANTIC_POSITION }, vshader: vs, fshader: fs }); // >>>>>>>>>>>>> this.time = 0; };
RefractionPostEffect.prototype = pc.extend(RefractionPostEffect.prototype, { // render, // , // render: function (inputTarget, outputTarget, rect) { var device = this.device; var scope = device.scope; // . , scope.resolve("uColorBuffer").setValue(inputTarget.colorBuffer); /// >>>>>>>>>>>>>>>>>> uniform- scope.resolve("uTime").setValue(this.time); this.time += 0.1; // . . // pc.drawFullscreenQuad(device, outputTarget, this.vertexBuffer, this.shader, rect); } });
precision highp float; uniform sampler2D uColorBuffer; uniform float uTime; varying vec2 vUv0; void main() { vec2 pos = vUv0; float X = pos.x*15.+uTime*0.5; float Y = pos.y*15.+uTime*0.5; pos.y += cos(X+Y)*0.01*cos(Y); pos.x += sin(XY)*0.01*sin(Y); vec4 color = texture2D(uColorBuffer, pos); gl_FragColor = color; }
Task 1: make the distortion apply only to the bottom of the screen.
Water.attributes.add('isMask', {type:'boolean',title:"Is Mask?"});
material.setParameter('isMask',this.isMask);
. Then declare it in Water.frag and paint the pixel white if the attribute is true. // uniform uniform bool isMask; // , // true if(isMask){ color = vec4(1.0); }
// initialize CameraMask.prototype.initialize = function() { // 512x512x24- var colorBuffer = new pc.Texture(this.app.graphicsDevice, { width: 512, height: 512, format: pc.PIXELFORMAT_R8_G8_B8, autoMipmap: true }); colorBuffer.minFilter = pc.FILTER_LINEAR; colorBuffer.magFilter = pc.FILTER_LINEAR; var renderTarget = new pc.RenderTarget(this.app.graphicsDevice, colorBuffer, { depth: true }); this.entity.camera.renderTarget = renderTarget; };
Refraction.prototype.initialize = function() { var cameraMask = this.app.root.findByName('CameraMask'); var maskBuffer = cameraMask.camera.renderTarget.colorBuffer; var effect = new pc.RefractionPostEffect(this.app.graphicsDevice, this.vs.resource, this.fs.resource, maskBuffer); // ... // , };
//// var RefractionPostEffect = function (graphicsDevice, vs, fs, buffer) { var fragmentShader = "precision " + graphicsDevice.precision + " float;\n"; fragmentShader = fragmentShader + fs; // this.shader = new pc.Shader(graphicsDevice, { attributes: { aPosition: pc.SEMANTIC_POSITION }, vshader: vs, fshader: fs }); this.time = 0; //// <<<<<<<<<<<<< this.buffer = buffer; };
scope.resolve("uMaskBuffer").setValue(this.buffer);
Task 2: render uMaskBuffer to the screen to make sure that it is the output of the second camera.
CameraMask.prototype.update = function(dt) { var pos = this.CameraToFollow.getPosition(); var rot = this.CameraToFollow.getRotation(); this.entity.setPosition(pos.x,pos.y,pos.z); this.entity.setRotation(rot); };
CameraToFollow
: this.CameraToFollow = this.app.root.findByName('Camera');
AND
between its mask and camera mask is 1. ~(1 << N) >>> 0
// , 2 this.entity.camera.camera.cullingMask &= ~(1 << 2) >>> 0; // , 3 this.CameraToFollow.camera.camera.cullingMask &= ~(1 << 3) >>> 0; // , : // console.log((this.CameraToFollow.camera.camera.cullingMask >>> 0).toString(2));
// initialize Water.js // var bit = this.isMask ? 3 : 2; meshInstance.mask = 0; meshInstance.mask |= (1 << bit);
// , vec4 maskColor = texture2D(uMaskBuffer, pos); vec4 maskColor2 = texture2D(uMaskBuffer, vUv0); // ? if(maskColor != vec4(1.0) || maskColor2 != vec4(1.0)){ // pos = vUv0; }
Source: https://habr.com/ru/post/417091/
All Articles