說明
【跟月影學可視化】學習筆記。
邊緣模糊
在周遊像素點的時候,同時計算目前像素點到圖檔中心點的距離,然後根據距離設定透明度,就可以實作邊緣模糊的效果。
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>圖檔邊緣模糊的效果</title>
</head>
<body>
<img
src="https://images.unsplash.com/photo-1666552982368-dd0e2bb96993?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=2070&q=80"
alt=""
/>
<canvas id="paper" width="0" height="0"></canvas>
<script type="module">
import {
loadImage,
getImageData,
traverse,
} from "./common/lib/util.js";
const canvas = document.getElementById("paper");
const context = canvas.getContext("2d");
(async function () {
// 異步加載圖檔
const img = await loadImage(
"https://images.unsplash.com/photo-1666552982368-dd0e2bb96993?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=2070&q=80"
);
// 擷取圖檔的 imageData 資料對象
const imageData = getImageData(img);
console.log("imageData---->", imageData);
// 周遊 imageData 資料對象:traverse 函數會自動周遊圖檔的每個像素點,把獲得的像素資訊傳給參數中的回調函數處理
traverse(imageData, ({r, g, b, a, x, y}) => {
const d = Math.hypot((x - 0.5), (y - 0.5));
a *= 1.0 - 2 * d;
return [r, g, b, a];
});
// 更新canvas内容
canvas.width = imageData.width;
canvas.height = imageData.height;
// 将資料從已有的 ImageData 對象繪制到位圖
context.putImageData(imageData, 0, 0);
})();
</script>
</body>
</html>
圖檔融合
給一張照片加上陽光照耀的效果。具體操作就是,把下面的透明的圖檔疊加到一張照片上。這種能疊加到其他照片上的圖檔,通常被稱為紋理(
Texture
),疊加後的效果也叫做紋理效果。
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>紋理與圖檔疊加</title>
</head>
<body>
<img
src="./assets/img/flower.jpg"
alt=""
/>
<canvas id="paper" width="0" height="0"></canvas>
<script type="module">
import {
loadImage,
getImageData,
traverse,
getPixel,
} from "./common/lib/util.js";
import {
transformColor,
brightness,
saturate,
} from "./common/lib/color-matrix.js";
const canvas = document.getElementById("paper");
const context = canvas.getContext("2d");
(async function () {
// 異步加載圖檔
const img = await loadImage(
"./assets/img/flower.jpg"
);
// 陽光效果圖
const sunlight = await loadImage(
"./assets/img/sunlight-texture.png"
);
// 擷取圖檔的 imageData 資料對象
const imageData = getImageData(img);
console.log("imageData---->", imageData);
const texture = getImageData(sunlight);
console.log("texture---->", texture);
// 周遊 imageData 資料對象:traverse 函數會自動周遊圖檔的每個像素點,把獲得的像素資訊傳給參數中的回調函數處理
traverse(imageData, ({ r, g, b, a, index }) => {
const texColor = getPixel(texture, index);
return transformColor(
[r, g, b, a],
brightness(1 + 0.7 * texColor[3]),
saturate(2 - texColor[3])
);
});
// 更新canvas内容
canvas.width = imageData.width;
canvas.height = imageData.height;
// 将資料從已有的 ImageData 對象繪制到位圖
context.putImageData(imageData, 0, 0);
})();
</script>
</body>
</html>
弊端:必須循環周遊圖檔上的每個像素點,圖檔一大計算量很大。
片元着色器是怎麼處理像素的?
在 WebGL 中,先通過圖檔或者 Canvas 對象來建立紋理對象,紋理對象包括了整張圖檔的所有像素點的顔色資訊,然後通過 uniform 傳遞給着色器,再通過紋理坐标 vUv 來讀取對應的具體坐标處像素的顔色資訊。
紋理坐标是一個變量,類型是二維向量,x、y 的值從 0 到 1。
webgl 實作濾鏡
建立紋理對象
function createTexture(gl, img) {
// 建立紋理對象
const texture = gl.createTexture();
// 設定預處理函數,由于圖檔坐标系和WebGL坐标的Y軸是反的,這個設定可以将圖檔Y坐标翻轉一下
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
// 激活指定紋理單元,WebGL有多個紋理單元,是以在Shader中可以使用多個紋理
gl.activeTexture(gl.TEXTURE0);
// 将紋理綁定到目前上下文
gl.bindTexture(gl.TEXTURE_2D, texture);
// 指定紋理圖像
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
// 設定紋理的一些參數
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
// 解除紋理綁定
gl.bindTexture(gl.TEXTURE_2D, null);
return texture;
}
設定紋理
function setTexture(gl, idx) {
// 激活紋理單元
gl.activeTexture(gl.TEXTURE0 + idx);
// 綁定紋理
gl.bindTexture(gl.TEXTURE_2D, texture);
// 擷取shader中紋理變量
const loc = gl.getUniformLocation(program, 'tMap');
// 将對應的紋理單元寫入shader變量
gl.uniform1i(loc, idx);
// 解除紋理綁定
gl.bindTexture(gl.TEXTURE_2D, null);
}
在 Shader 中使用紋理對象
uniform sampler2D tMap;
...
// 從紋理中提取顔色,vUv是紋理坐标
vec3 color = texture2D(tMap, vUv);
這裡直接使用
gl-renderer
庫
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>webgl 實作濾鏡</title>
</head>
<body>
<canvas width="1920" height="1080"></canvas>
<script src="./common/lib/gl-renderer.js"></script>
<script>
const vertex = `
attribute vec2 a_vertexPosition;
attribute vec2 uv;
varying vec2 vUv;
void main() {
gl_PointSize = 1.0;
vUv = uv;
gl_Position = vec4(a_vertexPosition, 1, 1);
}
`;
const fragment = `
#ifdef GL_ES
precision highp float;
#endif
uniform sampler2D tMap;
uniform mat4 colorMatrix;
varying vec2 vUv;
void main() {
vec4 color = texture2D(tMap, vUv);
gl_FragColor = colorMatrix * vec4(color.rgb, 1.0);
gl_FragColor.a = color.a;
}
`;
const canvas = document.querySelector('canvas');
const renderer = new GlRenderer(canvas);
// load fragment shader and createProgram
const program = renderer.compileSync(fragment, vertex);
renderer.useProgram(program);
(async function () {
const texture = await renderer.loadTexture('./assets/img/flower.jpg');
renderer.uniforms.tMap = texture;
const r = 0.2126,
g = 0.7152,
b = 0.0722;
renderer.uniforms.colorMatrix = [
r, r, r, 0,
g, g, g, 0,
b, b, b, 0,
0, 0, 0, 1,
];
renderer.setMeshData([{
positions: [
[-1, -1],
[-1, 1],
[1, 1],
[1, -1],
],
attributes: {
uv: [
[0, 0],
[0, 1],
[1, 1],
[1, 0],
],
},
cells: [[0, 1, 2], [2, 0, 3]],
}]);
renderer.render();
}());
</script>
</script>
</body>
</html>
webgl 實作圖檔的粒子化
将圖形網格化,因為原始圖像的圖檔像素寬高是
1920px 和 1080px
,是以我們用
vec2 st = vUv * vec2(192, 108)
就可以得到
10px X 10px
大小的網格。
為了取出來的顔色是一個亂序的色值。可以使用僞随機函數 random 根據網格随機一個偏移量,因為這個偏移量是
0~1
之間的值,我們将它乘以 2 再用 1 減去它,就能得到一個範圍在
-1~1
之間的随機偏移。
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>webgl 實作圖檔的粒子化</title>
</head>
<body>
<canvas width="1920" height="1080"></canvas>
<script src="./common/lib/gl-renderer.js"></script>
<script>
const vertex = `
attribute vec2 a_vertexPosition;
attribute vec2 uv;
varying vec2 vUv;
void main() {
gl_PointSize = 1.0;
vUv = uv;
gl_Position = vec4(a_vertexPosition, 1, 1);
}
`;
const fragment = `
#ifdef GL_ES
precision highp float;
#endif
uniform sampler2D tMap;
uniform float uTime;
varying vec2 vUv;
float random (vec2 st) {
return fract(sin(dot(st.xy, vec2(12.9898,78.233)))*43758.5453123);
}
void main() {
vec2 st = vUv * vec2(192, 108);
vec2 uv = vUv + 1.0 - 2.0 * random(floor(st));
vec4 color = texture2D(tMap, mix(uv, vUv, min(uTime, 1.0)));
gl_FragColor.rgb = color.rgb;
gl_FragColor.a = color.a * uTime;
}
`;
const canvas = document.querySelector('canvas');
const renderer = new GlRenderer(canvas);
// load fragment shader and createProgram
const program = renderer.compileSync(fragment, vertex);
renderer.useProgram(program);
(async function () {
const texture = await renderer.loadTexture('./assets/img/flower.jpg');
renderer.uniforms.tMap = texture;
renderer.uniforms.uTime = 0;
renderer.setMeshData([{
positions: [
[-1, -1],
[-1, 1],
[1, 1],
[1, -1],
],
attributes: {
uv: [
[0, 0],
[0, 1],
[1, 1],
[1, 0],
],
},
cells: [[0, 1, 2], [2, 0, 3]],
}]);
renderer.render();
function update(t) {
renderer.uniforms.uTime = t / 5000;
requestAnimationFrame(update);
}
update(0);
}());
</script>
</script>
</body>
</html>
實作效果如下:
webgl 實作圖像合成
使用 shader 技術可以把綠幕圖檔合成到其他圖檔上
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>webgl 實作圖像合成</title>
</head>
<body>
<canvas width="1920" height="1080"></canvas>
<script src="./common/lib/gl-renderer.js"></script>
<script>
const vertex = `
attribute vec2 a_vertexPosition;
attribute vec2 uv;
varying vec2 vUv;
void main() {
gl_PointSize = 1.0;
vUv = uv;
gl_Position = vec4(a_vertexPosition, 1, 1);
}
`;
const fragment = `
#ifdef GL_ES
precision highp float;
#endif
uniform sampler2D tMap;
uniform sampler2D tCat;
varying vec2 vUv;
void main() {
vec4 color = texture2D(tMap, vUv);
vec2 st = vUv * 3.0 - vec2(1.2, 0.5);
vec4 cat = texture2D(tCat, st);
gl_FragColor.rgb = cat.rgb;
if(cat.r < 0.5 && cat.g > 0.6) {
gl_FragColor.rgb = color.rgb;
}
gl_FragColor.a = color.a;
}
`;
const canvas = document.querySelector('canvas');
const renderer = new GlRenderer(canvas);
// load fragment shader and createProgram
const program = renderer.compileSync(fragment, vertex);
renderer.useProgram(program);
(async function () {
const [picture, cat] = await Promise.all([
renderer.loadTexture('./assets/img/flower.jpg'),
renderer.loadTexture('./assets/img/cat.png'),
]);
renderer.uniforms.tMap = picture;
renderer.uniforms.tCat = cat;
renderer.setMeshData([{
positions: [
[-1, -1],
[-1, 1],
[1, 1],
[1, -1],
],
attributes: {
uv: [
[0, 0],
[0, 1],
[1, 1],
[1, 0],
],
},
cells: [[0, 1, 2], [2, 0, 3]],
}]);
renderer.render();
}());
</script>
</script>
</body>
</html>