将摄像头视频实时渲染成多彩沙尘滤镜效果
代码语言:html
所属分类:视觉差异
代码描述:将摄像头视频实时渲染成多彩沙尘滤镜效果
下面为部分代码预览,完整代码请点击下载或在bfwstudio webide中打开
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <style> body { margin:0; } canvas { position: fixed; } </style> </head> <body translate="no"> <canvas id="webgl" width="500" height="1758"></canvas> <script id="vertexShader" type="x-shader/x-vertex"> attribute vec4 a_position; uniform mat4 u_modelViewMatrix; uniform mat4 u_projectionMatrix; void main() { gl_Position = a_position; } </script> <script id="fragmentShader" type="x-shader/x-fragment"> precision highp float; uniform vec2 u_resolution; uniform vec2 u_mouse; uniform float u_time; uniform sampler2D u_noise; uniform int u_frame; uniform sampler2D u_buffer; uniform bool u_bufferpass; uniform vec2 u_video_size; uniform float u_video_ratio; uniform sampler2D u_video; vec2 getScreenSpace() { vec2 uv = (gl_FragCoord.xy - 0.5 * u_resolution.xy) / min(u_resolution.y, u_resolution.x); return uv; } float rand(vec2 co) { return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453); } vec2 rotate(float a, vec2 v) { float c = cos(a); float s = sin(a); return mat2(c, -s, s, c) * v; } vec3 rgb2hsv(vec3 rgb) { float Cmax = max(rgb.r, max(rgb.g, rgb.b)); float Cmin = min(rgb.r, min(rgb.g, rgb.b)); float delta = Cmax - Cmin; vec3 hsv = vec3(0., 0., Cmax); if (Cmax > Cmin) { hsv.y = delta / Cmax; if (rgb.r == Cmax) hsv.x = (rgb.g - rgb.b) / delta; else { if (rgb.g == Cmax) hsv.x = 2. + (rgb.b - rgb.r) / delta; else hsv.x = 4. + (rgb.r - rgb.g) / delta; } hsv.x = fract(hsv.x / 6.); } return hsv; } vec4 render_effect() { if (u_frame < 1) { return vec4(0.); } vec2 uv = getScreenSpace(); vec2 s = gl_FragCoord.xy/u_resolution.xy; float t = u_resolution.x / u_resolution.y; float video_ratio = u_video_size.x/u_video_size.y; float diff = t / video_ratio; vec2 sample = (uv) * vec2(1., video_ratio)+.5; if(t < video_ratio) { sample = (s) * vec2(diff, 1.); sample.x += (video_ratio - t) * .5; } else { sample = (s) / vec2(1., diff); } vec3 old_col = texture2D(u_video, sample).rgb; #define PI 3.141592653589793 #define TAU 6.283185307179586 #define pow2(x) (x * x) #define OCTAVES 8 const vec3 blue = vec3(0./255., 230./255., 300./255.); const vec3 green = vec3(0./255., 200./255., 0./255.); const vec3 pink = vec3(243./255., 98./255., 121./255.); const vec3 yellow = vec3(249./255., 234./255., 53./255.); const float aMult = 2.293; const float bMult = 4.15; const float cMult = 2.2; vec2 _uv = uv; _uv *= 8.; _uv.x += u_time; _uv.y += sin(u_time)*5.; float a=1.0; float b=1.0; float c=1.0; float d=0.0; for(int s=0;s<OCTAVES;s++) { vec2 r; r=vec2(cos(_uv.y*a-d+u_time/b),sin(_uv.x*a-d+u_time/b))/c; r+=vec2(-r.y,r.x)*0.3; _uv.xy+=r; _uv = rotate(sin(length(r))*.05, _uv); a *= aMult; b *= bMult; c *= cMult; d += 0.05+0.1*u_time*b; } vec3 oldcamcol = old_col; old_col = mix( old_col, blue*blue*.5, smoothstep(0.5, 1., sin(_uv.x+_uv.y*2.) * (1.-old_col)) ); old_col *= mix( pink*pink*2., blue*blue*.9, smoothstep(-.8, .8, sin(_uv.x)) ); float mixer = sin((_uv.y+u_time))*0.5+0.5; old_col = mix(old_col, mix( green, yellow, smoothstep(1.1, .9, mixer) ), mixer*oldcamcol ); // old_col = blur(u_video, sample); vec2 dsp = rgb2hsv(old_col).xy; float e = 1e-4; vec2 sc = s * 2.; dsp = normalize(_uv); vec2 buffer_sample = s + (rotate(atan(old_col.x, old_col.y), dsp)) / u_resolution.y; vec3 new_col = texture2D(u_buffer, buffer_sample).rgb; vec3 col = mix(old_col, new_col, float(rand(uv + u_time) < .98)); // vec3 col = (old_col + new_col * .95)*.5; return vec4(col, 1.); } void main() { if(u_bufferpass) { gl_FragColor = render_effect(); } else { vec4 tex = texture2D(u_buffer, gl_FragCoord.xy/u_resolution.xy); gl_FragColor = vec4(tex.rgb, 1.); } } </script> <script > /** * A basic Web GL class. This provides a very basic setup for GLSL shader code. * Currently it doesn't support anything except for clip-space 3d, but this was * done so that we could start writing fragments right out of the gate. My * Intention is to update it with particle and polygonal 3d support later on. * * @class WTCGL * @author Liam Egan <liam@wethecollective.com> * @version 0.0.8 * @created Jan 16, 2019 */ class WTCGL { /** * The WTCGL Class constructor. If construction of the webGL context fails * for any reason this will return null. * * @TODO make the dimension properties properly optional * @TODO provide the ability to allow for programmable buffers * * @constructor * @param {HTMLElement} el The canvas element to use as the root * @param {string} vertexShaderSource The vertex shader source * @param {string} fragmentShaderSource The fragment shader source * @param {number} [width] The width of the webGL context. This will default to the canvas dimensions * @param {number} [height] The height of the webGL context. This will default to the canvas dimensions * @param {number} [pxratio=1] The pixel aspect ratio of the canvas * @param {boolean} [styleElement] A boolean indicating whether to apply a style property to the canvas (resizing the canvas by the inverse of the pixel ratio) * @param {boolean} [webgl2] A boolean indicating whether to try to create a webgl2 context instead of a regulart context */ constructor(el, vertexShaderSource, fragmentShaderSource, width, height, pxratio, styleElement, webgl2) { this.run = this.run.bind(this); this._onRun = () => {}; // Destructure if an object is aprovided instead a series of parameters if (el instanceof Object && el.el) { ({ el, vertexShaderSource, fragmentShaderSource, width, height, pxratio, webgl2, styleElement } = el); } // If the HTML element isn't a canvas, return null if (!el instanceof HTMLElement || el.nodeName.toLowerCase() !== 'canvas') { console.log('Provided element should be a canvas element'); return null; } this._el = el; // The context should be either webgl2, webgl or experimental-webgl if (webgl2 === true) { this.isWebgl2 = true; this._ctx = this._el.getContext("webgl2", this.webgl_params) || this._el.getContext("webgl", this.webgl_params) || this._el.getContext("experimental-webgl", this.webgl_params); } else { this.isWebgl2 = false; this._ctx = this._el.getContext("webgl", this.webgl_params) || this._el.getContext("experimental-webgl", this.webgl_params); } // Set up the extensions this._ctx.getExtension('OES_standard_derivatives'); this._ctx.getExtension('EXT_shader_texture_lod'); this._ctx.getExtension('OES_texture_float'); this._ctx.getExtension('WEBGL_color_buffer_float'); this._ctx.getExtension('OES_texture_float_linear'); this._ctx.getExtension('EXT_color_buffer_float'); // We can't make the context so return an error if (!this._ctx) { console.log('Browser doesn\'t support WebGL '); return null; } // Create the shaders this._vertexShader = WTCGL.createShaderOfType(this._ctx, this._ctx.VERTEX_SHADER, vertexShaderSource); this._fragmentShader = WTCGL.createShaderOfType(this._ctx, this._ctx.FRAGMENT_SHADER, fragmentShaderSource); // Create the program and link the shaders this._program = this._ctx.createProgram(); this._ctx.attachShader(this._program, this._vertexShader); this._ctx.attachShader(this._program, this._fragmentShader); this._ctx.linkProgram(this._program); // If we can't set up the params, this means the shaders have failed for some reason if (!this._ctx.getProgramParameter(this._program, this._ctx.LINK_STATUS)) { console.log('Unable to initialize the shader program: ' + this._ctx.getProgramInfoLog(this._program)); return null; } // Initialise the vertex buffers this.initBuffers([ -1.0, 1.0, -1., 1.0, 1.0, -1., -1.0, -1.0, -1., 1.0, -1.0, -1.]); // Initialise the frame buffers this.frameBuffers = []; // The program information object. This is essentially a state machine for the webGL instance this._programInfo = { attribs: { vertexPosition: this._ctx.getAttribLocation(this._program, 'a_position') }, uniforms: { projectionMatrix: this._ctx.getUniformLocation(this._program, 'u_projectionMatrix'), modelViewMatrix: this._ctx.getUniformLocation(this._program, 'u_modelViewMatrix'), resolution: this._ctx.getUniformLocation(this._program, 'u_resolution'), time: this._ctx.getUniformLocation(this._program, 'u_time') } }; // Tell WebGL to use our program when drawing this._ctx.useProgram(this._program); this.pxratio = pxratio; this.styleElement = styleElement !== true; this.resize(width, height); } /** * Public methods */ addFrameBuffer(w, h, tiling = 0, buffertype = 0) { // create to render to const gl = this._ctx; const targetTextureWidth = w * this.pxratio; const targetTextureHeight = h * this.pxratio; const targetTexture = gl.createTexture(); gl.bindTexture(gl.TEXTURE_2D, targetTexture); { // define size and format of level 0 const level = 0; let internalFormat = gl.RGBA; const border = 0; let format = gl.RGBA; let t; if (buffertype & WTCGL.TEXTYPE_FLOAT) { const e = gl.getExtension('OES_texture_float'); window.extension = e; t = e.FLOAT; // internalFormat = gl.RGBA32F; } else if (buffertype & WTCGL.TEXTYPE_HALF_FLOAT_OES) { // t = gl.renderer.isWebgl2 ? e.HALF_FLOAT : e.HALF_FLOAT_OES; // gl.renderer.extensions['OES_texture_half_float'] ? gl.renderer.extensions['OES_texture_half_float'].HALF_FLOAT_OES : // gl.UNSIGNED_BYTE; const e = gl.getExtension('OES_texture_half_float'); t = this.isWebgl2 ? gl.HALF_FLOAT : e.HALF_FLOAT_OES; // format = gl.RGBA; // internalFormat = gl.RGB32F; // format = gl.RGB32F; // window.gl = gl // t = e.HALF_FLOAT_OES; } else { t = gl.UNSIGNED_BYTE; } const type = t; const data = null; gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, targetTextureWidth, targetTextureHeight, border, .........完整代码请登录后点击上方下载按钮下载查看
网友评论0