/**
* A class creating buffers for a textured box to render it with WebGL
*/
class RasterTextureBox {
/**
* Creates all WebGL buffers for the textured box
* 6 ------- 7
* / | / |
* 3 ------- 2 |
* | | | |
* | 5 -----|- 4
* | / | /
* 0 ------- 1
* looking in negative z axis direction
* @param {WebGLContext} gl - The canvas' context
* @param {Vector} minPoint - The minimal x,y,z of the box
* @param {Vector} maxPoint - The maximal x,y,z of the box
*/
constructor(gl, minPoint, maxPoint, texture) {
this.gl = gl;
const mi = minPoint;
const ma = maxPoint;
let vertices = [
// front
mi.x, mi.y, ma.z, ma.x, mi.y, ma.z, ma.x, ma.y, ma.z,
ma.x, ma.y, ma.z, mi.x, ma.y, ma.z, mi.x, mi.y, ma.z,
// back
ma.x, mi.y, mi.z, mi.x, mi.y, mi.z, mi.x, ma.y, mi.z,
mi.x, ma.y, mi.z, ma.x, ma.y, mi.z, ma.x, mi.y, mi.z,
// right
ma.x, mi.y, ma.z, ma.x, mi.y, mi.z, ma.x, ma.y, mi.z,
ma.x, ma.y, mi.z, ma.x, ma.y, ma.z, ma.x, mi.y, ma.z,
// top
mi.x, ma.y, ma.z, ma.x, ma.y, ma.z, ma.x, ma.y, mi.z,
ma.x, ma.y, mi.z, mi.x, ma.y, mi.z, mi.x, ma.y, ma.z,
// left
mi.x, mi.y, mi.z, mi.x, mi.y, ma.z, mi.x, ma.y, ma.z,
mi.x, ma.y, ma.z, mi.x, ma.y, mi.z, mi.x, mi.y, mi.z,
// bottom
mi.x, mi.y, mi.z, ma.x, mi.y, mi.z, ma.x, mi.y, ma.z,
ma.x, mi.y, ma.z, mi.x, mi.y, ma.z, mi.x, mi.y, mi.z
];
const vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
this.vertexBuffer = vertexBuffer;
this.elements = vertices.length / 3;
let cubeTexture = gl.createTexture();
let cubeImage = new Image();
cubeImage.onload = function () {
gl.bindTexture(gl.TEXTURE_2D, cubeTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, cubeImage);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
}
cubeImage.src = texture;
this.texBuffer = cubeTexture;
let uv = [
// front
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// back
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// right
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// top
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// left
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// bottom
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
];
let uvBuffer = this.gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer);
gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array(uv),
gl.STATIC_DRAW);
this.texCoords = uvBuffer;
}
render(shader) {
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.vertexBuffer);
const positionLocation = shader.getAttributeLocation("a_position");
this.gl.enableVertexAttribArray(positionLocation);
this.gl.vertexAttribPointer(positionLocation, 3, this.gl.FLOAT, false, 0, 0);
// Bind the texture coordinates in this.texCoords
// to their attribute in the shader
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.texCoords);
const texCoordLocation = shader.getAttributeLocation("a_texCoord");
this.gl.enableVertexAttribArray(texCoordLocation);
this.gl.vertexAttribPointer(texCoordLocation, 2, this.gl.FLOAT, false, 0, 0);
this.gl.activeTexture(gl.TEXTURE0);
this.gl.bindTexture(gl.TEXTURE_2D, this.texBuffer);
shader.getUniformInt("sampler").set(0);
this.gl.drawArrays(this.gl.TRIANGLES, 0, this.elements);
this.gl.disableVertexAttribArray(positionLocation);
//disable texture vertex attrib array
this.gl.disableVertexAttribArray(texCoordLocation);
}
}
/**
* Class representing a 4x4 Matrix
*/
class Matrix {
constructor(mat) {
this.data = new Float32Array(16);
for (let row = 0; row < 4; row++) {
for (let col = 0; col < 4; col++) {
this.data[row * 4 + col] = mat[col * 4 + row];
}
}
}
getVal(row, col) {
return this.data[col * 4 + row];
}
setVal(row, col, val) {
this.data[col * 4 + row] = val;
}
static translation(translation) {
let m = Matrix.identity();
m.setVal(0, 3, translation.x);
m.setVal(1, 3, translation.y);
m.setVal(2, 3, translation.z);
return m;
}
static rotation(axis, angle) {
let m = Matrix.identity()
let sin = Math.sin(angle);
let cos = Math.cos(angle);
if (axis.x != 0) {
m.setVal(1, 1, cos);
m.setVal(1, 2, -sin);
m.setVal(2, 1, sin);
m.setVal(2, 2, cos);
} else if (axis.y != 0) {
m.setVal(0, 0, cos);
m.setVal(0, 2, sin);
m.setVal(2, 0, -sin);
m.setVal(2, 2, cos);
} else {
m.setVal(0, 0, cos);
m.setVal(0, 1, -sin);
m.setVal(1, 0, sin);
m.setVal(1, 1, cos);
}
return m;
}
static scaling(scale) {
let m = Matrix.identity();
m.setVal(0, 0, scale.x);
m.setVal(1, 1, scale.y);
m.setVal(2, 2, scale.z);
return m;
}
/**
* Constructs a lookat matrix
* @param {Vector} eye - The position of the viewer
* @param {Vector} center - The position to look at
* @param {Vector} up - The up direction
* @return {Matrix} The resulting lookat matrix
*/
static lookat(eye, center, up) {
let fBig = center.sub(eye);
// Vom Eye zum Center Punkt
let f = fBig.normalised();
// UP-Vektor
let upNorm = up.normalised();
// Kreuzprodukt
let s = f.cross(upNorm);
let u = s.normalised().cross(f);
// s, u und f sind die Vektoren des Kamerakoordinatensystems
// Lookat Matrix, 3x3 betrifft Rotation und Skalierung
let mat = new Matrix([
s.x, s.y, s.z, 0,
u.x, u.y, u.z, 0, -f.x, -f.y, -f.z, 0,
0, 0, 0, 1
]);
// Noch weitere Berechnungen? Translation
let trans = Matrix.translation(eye.mul(-1));
mat = mat.mul(trans);
return mat;
}
static frustum(left, right, bottom, top, near, far) {
// TODO [exercise 9]
const n2 = 2 * near;
const rpl = right + left;
const rml = right - left;
const tpb = top + bottom;
const tmb = top - bottom;
const fpn = far + near;
const fmn = far - near;
const n2f = n2 * far;
return new Matrix([
n2 / rml, 0, rpl / rml, 0,
0, n2 / tmb, tpb / tmb, 0,
0, 0, -fpn / fmn, -n2f / fmn,
0, 0, -1, 0
]);
}
static perspective(fovy, aspect, near, far) {
// frustum Methode verwenden. Foliensatz 10
const top = near * Math.tan((Math.PI / 180) * (fovy / 2));
const bottom = -top;
const right = top * aspect;
const left = -right;
return Matrix.frustum(left, right, bottom, top, near, far);
}
/**
* Returns the identity matrix
*/
static identity() {
return new Matrix([
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
]);
}
mul(other) {
if (other instanceof Matrix) {
// [exercise 7]
let m = Matrix.identity();
for (let row = 0; row < 4; row++) {
for (let col = 0; col < 4; col++) {
let sum = 0;
for (let i = 0; i < 4; i++) {
sum += this.getVal(row, i) * other.getVal(i, col);
}
m.setVal(row, col, sum);
}
}
return m;
} else {
let v = [0, 0, 0, 0];
for (let row = 0; row < 4; row++) {
for (let i = 0; i < 4; i++) {
v[row] += this.getVal(row, i) * other.valueOf()[i];
}
}
return new Vector(v[0], v[1], v[2], v[3]);
}
}
transpose() {
let m = Matrix.identity();
for (let row = 0; row < 4; row++) {
for (let col = 0; col < 4; col++) {
m.setVal(row, col, this.getVal(col, row));
}
}
return m;
}
invert() {
let mat = this.data;
let dst = new Float32Array(16); //ret.getValues();
let tmp = new Float32Array(12);
/* temparray for pairs */
let src = new Float32Array(16); //new float[16];
/* array of transpose source matrix */
let det;
for (let i = 0; i < 4; i++) {
src[i] = mat[i * 4];
src[i + 4] = mat[i * 4 + 1];
src[i + 8] = mat[i * 4 + 2];
src[i + 12] = mat[i * 4 + 3];
}
tmp[0] = src[10] * src[15];
tmp[1] = src[11] * src[14];
tmp[2] = src[9] * src[15];
tmp[3] = src[11] * src[13];
tmp[4] = src[9] * src[14];
tmp[5] = src[10] * src[13];
tmp[6] = src[8] * src[15];
tmp[7] = src[11] * src[12];
tmp[8] = src[8] * src[14];
tmp[9] = src[10] * src[12];
tmp[10] = src[8] * src[13];
tmp[11] = src[9] * src[12];
dst[0] = tmp[0] * src[5] + tmp[3] * src[6] + tmp[4] * src[7];
dst[0] -= tmp[1] * src[5] + tmp[2] * src[6] + tmp[5] * src[7];
dst[1] = tmp[1] * src[4] + tmp[6] * src[6] + tmp[9] * src[7];
dst[1] -= tmp[0] * src[4] + tmp[7] * src[6] + tmp[8] * src[7];
dst[2] = tmp[2] * src[4] + tmp[7] * src[5] + tmp[10] * src[7];
dst[2] -= tmp[3] * src[4] + tmp[6] * src[5] + tmp[11] * src[7];
dst[3] = tmp[5] * src[4] + tmp[8] * src[5] + tmp[11] * src[6];
dst[3] -= tmp[4] * src[4] + tmp[9] * src[5] + tmp[10] * src[6];
dst[4] = tmp[1] * src[1] + tmp[2] * src[2] + tmp[5] * src[3];
dst[4] -= tmp[0] * src[1] + tmp[3] * src[2] + tmp[4] * src[3];
dst[5] = tmp[0] * src[0] + tmp[7] * src[2] + tmp[8] * src[3];
dst[5] -= tmp[1] * src[0] + tmp[6] * src[2] + tmp[9] * src[3];
dst[6] = tmp[3] * src[0] + tmp[6] * src[1] + tmp[11] * src[3];
dst[6] -= tmp[2] * src[0] + tmp[7] * src[1] + tmp[10] * src[3];
dst[7] = tmp[4] * src[0] + tmp[9] * src[1] + tmp[10] * src[2];
dst[7] -= tmp[5] * src[0] + tmp[8] * src[1] + tmp[11] * src[2];
tmp[0] = src[2] * src[7];
tmp[1] = src[3] * src[6];
tmp[2] = src[1] * src[7];
tmp[3] = src[3] * src[5];
tmp[4] = src[1] * src[6];
tmp[5] = src[2] * src[5];
tmp[6] = src[0] * src[7];
tmp[7] = src[3] * src[4];
tmp[8] = src[0] * src[6];
tmp[9] = src[2] * src[4];
tmp[10] = src[0] * src[5];
tmp[11] = src[1] * src[4];
dst[8] = tmp[0] * src[13] + tmp[3] * src[14] + tmp[4] * src[15];
dst[8] -= tmp[1] * src[13] + tmp[2] * src[14] + tmp[5] * src[15];
dst[9] = tmp[1] * src[12] + tmp[6] * src[14] + tmp[9] * src[15];
dst[9] -= tmp[0] * src[12] + tmp[7] * src[14] + tmp[8] * src[15];
dst[10] = tmp[2] * src[12] + tmp[7] * src[13] + tmp[10] * src[15];
dst[10] -= tmp[3] * src[12] + tmp[6] * src[13] + tmp[11] * src[15];
dst[11] = tmp[5] * src[12] + tmp[8] * src[13] + tmp[11] * src[14];
dst[11] -= tmp[4] * src[12] + tmp[9] * src[13] + tmp[10] * src[14];
dst[12] = tmp[2] * src[10] + tmp[5] * src[11] + tmp[1] * src[9];
dst[12] -= tmp[4] * src[11] + tmp[0] * src[9] + tmp[3] * src[10];
dst[13] = tmp[8] * src[11] + tmp[0] * src[8] + tmp[7] * src[10];
dst[13] -= tmp[6] * src[10] + tmp[9] * src[11] + tmp[1] * src[8];
dst[14] = tmp[6] * src[9] + tmp[11] * src[11] + tmp[3] * src[8];
dst[14] -= tmp[10] * src[11] + tmp[2] * src[8] + tmp[7] * src[9];
dst[15] = tmp[10] * src[10] + tmp[4] * src[8] + tmp[9] * src[9];
dst[15] -= tmp[8] * src[9] + tmp[11] * src[10] + tmp[5] * src[8];
det = src[0] * dst[0] + src[1] * dst[1] + src[2] * dst[2] + src[3] * dst[3];
if (det == 0.0) {
throw new Error("singular matrix is not invertible");
}
/* calculate matrix inverse */
det = 1 / det;
for (let j = 0; j < 16; j++) {
dst[j] *= det;
}
let ret = Matrix.identity();
ret.data = dst;
return ret;
}
}
/**
* Class representing a vector in 4D space
*/
class Vector {
/**
* Create a vector
* @param {number} x - The x component
* @param {number} y - The y component
* @param {number} z - The z component
* @param {number} w - The w component
* @return {number} The resulting vector
*/
constructor(x, y, z, w) {
this.data = [x, y, z, w];
}
//has getter and setter
add(other) {
return new Vector(
this.x + other.x,
this.y + other.y,
this.z + other.z,
this.w + other.w
);
}
sub(other) {
return new Vector(
this.x - other.x,
this.y - other.y,
this.z - other.z,
this.w - other.w
);
}
mul(other) {
return new Vector(
this.x * other,
this.y * other,
this.z * other,
this.w
);
}
div(other) {
return new Vector(
this.x / other,
this.y / other,
this.z / other,
this.w
);
}
dot(other) {
if (other instanceof Vector) {
return this.x * other.x + this.y * other.y + this.z * other.z;
} else {
throw new Error("Dot product only works with vectors!");
}
}
cross(other) {
if (other instanceof Vector) {
return new Vector(
this.y * other.z - this.z * other.y,
this.z * other.x - this.x * other.z,
this.x * other.y - this.y * other.x,
0
);
} else {
throw new Error("Dot product only works with vectors!");
}
}
valueOf() {
return this.data;
}
normalised() {
const l = this.length;
return this.div(l);
}
equals(other) {
return (
Math.abs(this.x - other.x) <= Number.EPSILON &&
Math.abs(this.y - other.y) <= Number.EPSILON &&
Math.abs(this.z - other.z) <= Number.EPSILON &&
((!this.w && !other.w) || Math.abs(this.w - other.w) <= Number.EPSILON)
);
}
get length() {
return Math.sqrt(this.x * this.x + this.y * this.y + this.z * this.z);
}
}
/**
* Class representing a Node in a Scenegraph
*/
class Node {
/**
* Accepts a visitor according to the visitor pattern
* @param {Visitor} visitor - The visitor
*/
accept(visitor) { }
}
/**
* Class representing a GroupNode in the Scenegraph.
* A GroupNode holds a transformation and is able
* to have child nodes attached to it.
* @extends Node
*/
class GroupNode extends Node {
/**
* Constructor
* @param {Matrix} mat - A matrix describing the node's transformation
*/
constructor(mat) {
super();
this.matrix = mat;
// TODO [exercise 8]
this.children = [];
}
/**
* Accepts a visitor according to the visitor pattern
* @param {Visitor} visitor - The visitor
*/
accept(visitor) {
// TODO [exercise 8]
visitor.visitGroupNode(this);
}
/**
* Adds a child node
* @param {Node} childNode - The child node to add
*/
add(childNode) {
// TODO [exercise 8]
this.children.push(childNode);
}
}
/**
* Class representing a Textured Axis Aligned Box in the Scenegraph
* @extends Node
*/
class TextureBoxNode extends Node {
constructor(minPoint, maxPoint, texture) {
super();
this.minPoint = minPoint;
this.maxPoint = maxPoint;
this.texture = texture;
}
accept(visitor) {
// TODO [exercise 8]
visitor.visitTextureBoxNode(this);
}
}
//Texture Fragment Shader
precision mediump float;
uniform sampler2D sampler;
varying vec2 v_texCoord;
void main( void ) {
//gl_FragColor = vec4( 0.0, 0.0, 0.5, 1.0 );
// Read fragment color from texture
// TODO [exercise 9]
gl_FragColor = texture2D(sampler, vec2(v_texCoord.s, v_texCoord.t));
}
//Texture Vertex Shader
attribute vec3 a_position;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
uniform mat4 M;
uniform mat4 V;
uniform mat4 P;
void main() {
gl_Position = P * V * M * vec4( a_position, 1.0 );
v_texCoord = a_texCoord;
}
// Phong Vertex Shader
attribute vec3 a_position;
attribute vec3 a_normal;
// Pass color as attribute and forward it
// to the fragment shader
attribute vec4 a_color;
uniform mat4 M;
uniform mat4 V;
uniform mat4 P;
uniform mat4 N; // normal matrix
varying vec3 v_normal;
// Pass the vertex position in view space
// to the fragment shader
// TODO [exercise 9]
varying vec4 v_position;
varying vec4 v_color;
void main() {
gl_Position = P * V * M * vec4( a_position, 1.0 );
// Pass the color and transformed vertex position through
v_position = gl_Position;
v_color = a_color;
v_normal = (N * vec4(a_normal, 0)).xyz;
}
//Phong Fragment Shader
//precision mediump float;
// TODO [exercise 5]
//void main( void ) {
//gl_FragColor = vec4( 0.0, 0.0, 0.5, 1.0 );
// TODO [exercise 5]
//}
// Wird mindestens einmal pro Pixel ausgefuehrt
precision mediump float;
// TODO [exercise 5]
varying vec4 v_color;
varying vec4 v_position;
varying vec3 v_normal;
const vec3 lightPos = vec3(0.2,-1.0,-1.0);
const float shininess = 16.0;
const float k_a = 1.0;
const float k_d = 0.6;
const float k_s = 0.3;
// Farbe von Vertex shader durchreichen und Interpolieren
void main( void ) {
// Rot, Gruen, Blau, Alpha
//gl_FragColor = vec4( 0.0, 0.0, 0.5, 1.0 );
// TODO [exercise 5]
vec3 vertPos = vec3(v_position) / v_position.w;
vec3 N = normalize(v_normal);
vec3 L = normalize(lightPos - vertPos);
vec4 L_j = vec4(1,1,1,1);
vec4 diffuse = L_j * max(dot(N, L), 0.0);
vec3 R = reflect(-L, N);
vec3 V = normalize(-vertPos);
float specAngle = max(dot(R, V), 0.0);
vec4 specular = L_j * pow(specAngle, shininess);
vec4 color = vec4(k_a * v_color + k_d * diffuse + k_s * specular);
gl_FragColor = color;
}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>ICG-11 Animation</title>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u"
crossorigin="anonymous">
</head>
<body>
<div class="container text-center">
<h1>ICG Animation</h1>
<hr>
<p>Implement a Rasteriser with WebGL using a Scenegraph.</p>
<canvas id="rasteriser" width="500" height="500"></canvas>
<script src="vector.js"></script>
<script src="raster-texture-box.js"></script>
<script src="matrix.js"></script>
<script src="nodes.js"></script>
<script src="rastervisitor.js"></script>
<script src="shader.js"></script>
<script src="animation-nodes.js"></script>
<script>
const canvas = document.getElementById("rasteriser");
const gl = canvas.getContext("webgl");
// construct scene graph
const sg = new GroupNode(Matrix.scaling(new Vector(0.2, 0.2, 0.2)));
const gn1 = new GroupNode(Matrix.translation(new Vector(1, 1, 0)));
sg.add(gn1);
let gn2 = new GroupNode(Matrix.translation(new Vector(-.7, -0.4, .1)));
sg.add(gn2);
const cube = new TextureBoxNode(
new Vector(-1, -1, -1, 1),
new Vector(1, 1, 1, 1),
'diffuse.png'
);
gn2.add(cube);
// setup for rendering
const setupVisitor = new RasterSetupVisitor(gl);
setupVisitor.setup(sg);
const visitor = new RasterVisitor(gl);
let camera = {
eye: new Vector(-.5, .5, -1, 1),
center: new Vector(0, 0, 0, 1),
up: new Vector(0, 1, 0, 0),
fovy: 60,
aspect: canvas.width / canvas.height,
near: 0.1,
far: 100
};
const phongShader = new Shader(gl,
"phong-vertex-perspective-shader.glsl",
"phong-fragment-shader.glsl"
);
visitor.shader = phongShader;
const textureShader = new Shader(gl,
"texture-vertex-perspective-shader.glsl",
"texture-fragment-shader.glsl"
);
visitor.textureshader = textureShader;
let animationNodes = [
new RotationNode(gn2, new Vector(0, 0, 1))
];
function simulate(deltaT) {
for (animationNode of animationNodes) {
animationNode.simulate(deltaT);
}
}
let lastTimestamp = performance.now();
function animate(timestamp) {
simulate(timestamp - lastTimestamp);
visitor.render(sg, camera);
lastTimestamp = timestamp;
window.requestAnimationFrame(animate);
}
Promise.all(
[textureShader.load(), phongShader.load()]
).then(x =>
window.requestAnimationFrame(animate)
);
</script>
</div>
</body>
</html>
嘿,我从现在开始尝试添加第二个纹理
到我的立方体并做一些凹凸贴图。但是我是Progam入门者,所以对我来说有点困难。我所有关于矩阵和向量的数学运算都在相同的js.files中。我还必须使用各种着色器,纹理和phong着色器。现在每个人都说我必须计算法线,但是我该怎么做呢?在哪里
期待您的帮助!
最佳答案
使用问题中的法线贴图,可以执行Bump mapping。在凹凸贴图中,从法线贴图读取片段的法线矢量,并将其用于光照计算。
通常,入射光矢量tor变换到纹理空间,这是法线贴图在对象(片段)上的“方向”。为了建立描述地图方向的3 * 3方向矩阵,切线向量,双切线向量和法线向量必须是已知的。如果不存在切向量和双切向量,则可以通过片段着色器中顶点位置和纹理坐标的偏导数来近似计算这些向量。
因此,至少需要纹理坐标和法线矢量属性。在片段着色器中,分别在世界空间和纹理空间中进行计算。顶点着色器很简单:
precision highp float;
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec2 a_texCoord;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform mat4 P;
uniform mat4 V;
uniform mat4 M;
void main()
{
o_uv = a_texCoord;
w_nv = normalize(mat3(M) * a_normal);
vec4 worldPos = M * vec4(a_position, 1.0);
w_pos = worldPos.xyz;
gl_Position = P * V * worldPos;
}
在片段着色器中,从法线贴图读取法线向量:
vec3 mapN = normalize(texture2D(u_normal_map, o_uv.st).xyz * 2.0 - 1.0);
光向量被转换为纹理空间:
vec3 L = tbn_inv * normalize(u_light_pos - w_pos);
使用此向量,可以执行光计算:
float kd = max(0.0, dot(mapN, L));
为了计算将世界空间转换为纹理空间的矩阵,需要偏导函数(
dFdx
, dFdy
)。这导致必须启用“ OES_standard_derivatives”(或“ webgl2”上下文):gl = canvas.getContext( "experimental-webgl" );
var standard_derivatives = gl.getExtension("OES_standard_derivatives");
在另一个答案-How to calculate Tangent and Binormal?中解释了计算切向量和双法线向量的算法。
最终片段着色器:
#extension GL_OES_standard_derivatives : enable
precision mediump float;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform vec3 u_light_pos;
uniform sampler2D u_diffuse;
uniform sampler2D u_normal_map;
void main()
{
vec3 N = normalize(w_nv);
vec3 dp1 = dFdx( w_pos );
vec3 dp2 = dFdy( w_pos );
vec2 duv1 = dFdx( o_uv );
vec2 duv2 = dFdy( o_uv );
vec3 dp2perp = cross(dp2, N);
vec3 dp1perp = cross(N, dp1);
vec3 T = dp2perp * duv1.x + dp1perp * duv2.x;
vec3 B = dp2perp * duv1.y + dp1perp * duv2.y;
float invmax = inversesqrt(max(dot(T, T), dot(B, B)));
mat3 tm = mat3(T * invmax, B * invmax, N);
mat3 tbn_inv = mat3(vec3(tm[0].x, tm[1].x, tm[2].x), vec3(tm[0].y, tm[1].y, tm[2].y), vec3(tm[0].z, tm[1].z, tm[2].z));
vec3 L = tbn_inv * normalize(u_light_pos - w_pos);
vec3 mapN = normalize(texture2D(u_normal_map, o_uv.st).xyz * 2.0 - 1.0);
float kd = max(0.0, dot(mapN, L));
vec3 color = texture2D(u_diffuse, o_uv.st).rgb;
vec3 light_col = (0.0 + kd) * color.rgb;
gl_FragColor = vec4(clamp(light_col, 0.0, 1.0), 1.0);
}
并会产生这样的凹凸贴图:
如果知道切向量,那么
tbn_inv
矩阵的计算可以大大简化:mat3 tm = mat3(normalize(w_tv), normalize(cross(w_nv, w_tv)), normalize(w_nv));
mat3 tbn_inv = mat3(vec3(tm[0].x, tm[1].x, tm[2].x), vec3(tm[0].y, tm[1].y, tm[2].y), vec3(tm[0].z, tm[1].z, tm[2].z));
如果您想要像Parallax mapping这样的Example,则也需要位移图。
此地图上的白色区域被“推入”对象。在 LearnOpengl - Parallax Mapping处详细描述了该算法。
这个想法是每个片段都与位移图的高度相关联。可以想象这是一块立在碎片上的矩形柱子。跟踪光线,直到碰到位移的碎片。
对于高性能算法,采用位移纹理的样本。识别出片段后,即可读取法线贴图和漫反射纹理的相应片段。这给出了几何图形的三维外观。请注意,该算法可以处理剪影。
具有陡峭视差映射的最终片段着色器:
#extension GL_OES_standard_derivatives : enable
precision mediump float;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform float u_height_scale;
uniform vec3 u_light_pos;
uniform vec3 u_view_pos;
uniform sampler2D u_diffuse;
uniform sampler2D u_normal_map;
uniform sampler2D u_displacement_map;
vec2 ParallaxMapping (vec2 texCoord, vec3 viewDir)
{
float numLayers = 32.0 - 31.0 * abs(dot(vec3(0.0, 0.0, 1.0), viewDir));
float layerDepth = 1.0 / numLayers;
vec2 P = viewDir.xy / viewDir.z * u_height_scale;
vec2 deltaTexCoords = P / numLayers;
vec2 currentTexCoords = texCoord;
float currentLayerDepth = 0.0;
float currentDepthMapValue = texture2D(u_displacement_map, currentTexCoords).r;
for (int i=0; i<32; ++ i)
{
if (currentLayerDepth >= currentDepthMapValue)
break;
currentTexCoords -= deltaTexCoords;
currentDepthMapValue = texture2D(u_displacement_map, currentTexCoords).r;
currentLayerDepth += layerDepth;
}
vec2 prevTexCoords = currentTexCoords + deltaTexCoords;
float afterDepth = currentDepthMapValue - currentLayerDepth;
float beforeDepth = texture2D(u_displacement_map, prevTexCoords).r - currentLayerDepth + layerDepth;
float weight = afterDepth / (afterDepth - beforeDepth);
return prevTexCoords * weight + currentTexCoords * (1.0 - weight);
}
void main()
{
vec3 N = normalize(w_nv);
vec3 dp1 = dFdx( w_pos );
vec3 dp2 = dFdy( w_pos );
vec2 duv1 = dFdx( o_uv );
vec2 duv2 = dFdy( o_uv );
vec3 dp2perp = cross(dp2, N);
vec3 dp1perp = cross(N, dp1);
vec3 T = dp2perp * duv1.x + dp1perp * duv2.x;
vec3 B = dp2perp * duv1.y + dp1perp * duv2.y;
float invmax = inversesqrt(max(dot(T, T), dot(B, B)));
mat3 tm = mat3(T * invmax, B * invmax, N);
mat3 tbn_inv = mat3(vec3(tm[0].x, tm[1].x, tm[2].x), vec3(tm[0].y, tm[1].y, tm[2].y), vec3(tm[0].z, tm[1].z, tm[2].z));
vec3 view_dir = tbn_inv * normalize(w_pos - u_view_pos);
vec2 uv = ParallaxMapping(o_uv, view_dir);
if (uv.x > 1.0 || uv.y > 1.0 || uv.x < 0.0 || uv.y < 0.0)
discard;
vec3 L = tbn_inv * normalize(u_light_pos - w_pos);
vec3 mapN = normalize(texture2D(u_normal_map, uv.st).xyz * 2.0 - 1.0);
float kd = max(0.0, dot(mapN, L));
vec3 color = texture2D(u_diffuse, uv.st).rgb;
vec3 light_col = (0.1 + kd) * color.rgb;
gl_FragColor = vec4(clamp(light_col, 0.0, 1.0), 1.0);
}
结果更加令人印象深刻:
(function loadscene() {
var gl, progDraw, vp_size;
var bufCube = {};
var diffuse_tex = 1;
var height_tex = 2;
var normal_tex = 3;
function render(deltaMS){
var height_scale = 0.3 * document.getElementById("height").value / 100.0;
// setup view projection and model
vp_size = [canvas.width, canvas.height];
camera.Update( vp_size );
var prjMat = camera.Perspective();
var viewMat = camera.LookAt();
var modelMat = camera.AutoModelMatrix();
gl.viewport( 0, 0, vp_size[0], vp_size[1] );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
gl.frontFace(gl.CCW)
gl.cullFace(gl.BACK)
gl.enable(gl.CULL_FACE)
// set up draw shader
ShProg.Use( progDraw );
ShProg.SetF3( progDraw, "u_view_pos", camera.pos )
ShProg.SetF3( progDraw, "u_light_pos", [0.0, 5.0, 5.0] )
ShProg.SetF1( progDraw, "u_height_scale", height_scale );
ShProg.SetI1( progDraw, "u_diffuse", diffuse_tex );
ShProg.SetI1( progDraw, "u_displacement_map", height_tex );
ShProg.SetI1( progDraw, "u_normal_map", normal_tex );
ShProg.SetM44( progDraw, "P", prjMat );
ShProg.SetM44( progDraw, "V", viewMat );
ShProg.SetM44( progDraw, "M", modelMat );
// draw scene
VertexBuffer.Draw( bufCube );
requestAnimationFrame(render);
}
function initScene() {
canvas = document.getElementById( "canvas");
gl = canvas.getContext( "experimental-webgl" );
var standard_derivatives = gl.getExtension("OES_standard_derivatives"); // dFdx, dFdy
if (!standard_derivatives)
alert('no standard derivatives support (no dFdx, dFdy)');
//gl = canvas.getContext( "webgl2" );
if ( !gl )
return null;
progDraw = ShProg.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
if ( !progDraw.progObj )
return null;
progDraw.inPos = ShProg.AttrI( progDraw, "a_position" );
progDraw.inNV = ShProg.AttrI( progDraw, "a_normal" );
progDraw.inUV = ShProg.AttrI( progDraw, "a_texCoord" );
// create cube
let Pos = [ -1,-1,1, 1,-1,1, 1,1,1, -1,1,1, -1,-1,-1, 1,-1,-1, 1,1,-1, -1,1,-1 ];
let Col = [ 1,0,0, 1,0.5,0, 1,0,1, 1,1,0, 0,1,0, 0, 0, 1 ];
let NV = [ 0,0,1, 1,0,0, 0,0,-1, -1,0,0, 0,1,0, 0,-1,0 ];
let TV = [ 1,0,0, 0,0,-1, -1,0,0, 0,0,1, 1,0,0, -1,0,0 ];
var cubeHlpInx = [ 0,1,2,3, 1,5,6,2, 5,4,7,6, 4,0,3,7, 3,2,6,7, 1,0,4,5 ];
var cubePosData = [];
for ( var i = 0; i < cubeHlpInx.length; ++ i ) cubePosData.push(Pos[cubeHlpInx[i]*3], Pos[cubeHlpInx[i]*3+1], Pos[cubeHlpInx[i]*3+2] );
var cubeNVData = [];
for ( var i1 = 0; i1 < 6; ++ i1 ) {
for ( i2 = 0; i2 < 4; ++ i2 ) cubeNVData.push(NV[i1*3], NV[i1*3+1], NV[i1*3+2]);
}
var cubeTVData = [];
for ( var i1 = 0; i1 < 6; ++ i1 ) {
for ( i2 = 0; i2 < 4; ++ i2 ) cubeTVData.push(TV[i1*3], TV[i1*3+1], TV[i1*3+2]);
}
var cubeColData = [];
for ( var is = 0; is < 6; ++ is ) {
for ( var ip = 0; ip < 4; ++ ip ) cubeColData.push(Col[is*3], Col[is*3+1], Col[is*3+2]);
}
var cubeTexData = []
for ( var i = 0; i < 6; ++ i ) cubeTexData.push( 0, 0, 1, 0, 1, 1, 0, 1 );
var cubeInxData = [];
for ( var i = 0; i < cubeHlpInx.length; i += 4 ) cubeInxData.push( i, i+1, i+2, i, i+2, i+3 );
bufCube = VertexBuffer.Create(
[ { data : cubePosData, attrSize : 3, attrLoc : progDraw.inPos },
{ data : cubeNVData, attrSize : 3, attrLoc : progDraw.inNV },
//{ data : cubeTVData, attrSize : 3, attrLoc : progDraw.inTV },
{ data : cubeTexData, attrSize : 2, attrLoc : progDraw.inUV },
//{ data : cubeColData, attrSize : 3, attrLoc : progDraw.inCol },
],
cubeInxData, gl.TRIANGLES );
Texture.LoadTexture2D( diffuse_tex, "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/woodtiles.jpg" );
Texture.LoadTexture2D( height_tex, "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/toy_box_disp.png" );
Texture.LoadTexture2D( normal_tex, "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/toy_box_normal.png" );
camera = new Camera( [0, 3, 0], [0, 0, 0], [0, 0, 1], 90, vp_size, 0.5, 100 );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight];
//vp_size = [256, 256];
canvas.width = vp_size[0];
canvas.height = vp_size[1];
}
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( deltaTime, interval ) {
return Fract( deltaTime / (1000*interval) ) * 2.0 * Math.PI;
}
function CalcMove( deltaTime, interval, range ) {
var pos = self.Fract( deltaTime / (1000*interval) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
function IdentM44() {
return [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ];
};
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = matA.slice(0);
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Rotate(matA, angRad, axis) {
var s = Math.sin(angRad), c = Math.cos(angRad);
var x = axis[0], y = axis[1], z = axis[2];
matB = [
x*x*(1-c)+c, x*y*(1-c)-z*s, x*z*(1-c)+y*s, 0,
y*x*(1-c)+z*s, y*y*(1-c)+c, y*z*(1-c)-x*s, 0,
z*x*(1-c)-y*s, z*y*(1-c)+x*s, z*z*(1-c)+c, 0,
0, 0, 0, 1 ];
return Multiply(matA, matB);
}
function Multiply(matA, matB) {
matC = IdentM44();
for (var i0=0; i0<4; ++i0 )
for (var i1=0; i1<4; ++i1 )
matC[i0*4+i1] = matB[i0*4+0] * matA[0*4+i1] + matB[i0*4+1] * matA[1*4+i1] + matB[i0*4+2] * matA[2*4+i1] + matB[i0*4+3] * matA[3*4+i1]
return matC;
}
function Cross( a, b ) { return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0], 0.0 ]; }
function Dot( a, b ) { return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]; }
function Normalize( v ) {
var len = Math.sqrt( v[0] * v[0] + v[1] * v[1] + v[2] * v[2] );
return [ v[0] / len, v[1] / len, v[2] / len ];
}
Camera = function( pos, target, up, fov_y, vp, near, far ) {
this.Time = function() { return Date.now(); }
this.pos = pos;
this.target = target;
this.up = up;
this.fov_y = fov_y;
this.vp = vp;
this.near = near;
this.far = far;
this.orbit_mat = this.current_orbit_mat = this.model_mat = this.current_model_mat = IdentM44();
this.mouse_drag = this.auto_spin = false;
this.auto_rotate = true;
this.mouse_start = [0, 0];
this.mouse_drag_axis = [0, 0, 0];
this.mouse_drag_angle = 0;
this.mouse_drag_time = 0;
this.drag_start_T = this.rotate_start_T = this.Time();
this.Ortho = function() {
var fn = this.far + this.near;
var f_n = this.far - this.near;
var w = this.vp[0];
var h = this.vp[1];
return [
2/w, 0, 0, 0,
0, 2/h, 0, 0,
0, 0, -2/f_n, 0,
0, 0, -fn/f_n, 1 ];
};
this.Perspective = function() {
var n = this.near;
var f = this.far;
var fn = f + n;
var f_n = f - n;
var r = this.vp[0] / this.vp[1];
var t = 1 / Math.tan( Math.PI * this.fov_y / 360 );
return [
t/r, 0, 0, 0,
0, t, 0, 0,
0, 0, -fn/f_n, -1,
0, 0, -2*f*n/f_n, 0 ];
};
this.LookAt = function() {
var mz = Normalize( [ this.pos[0]-this.target[0], this.pos[1]-this.target[1], this.pos[2]-this.target[2] ] );
var mx = Normalize( Cross( this.up, mz ) );
var my = Normalize( Cross( mz, mx ) );
var tx = Dot( mx, this.pos );
var ty = Dot( my, this.pos );
var tz = Dot( [-mz[0], -mz[1], -mz[2]], this.pos );
return [mx[0], my[0], mz[0], 0, mx[1], my[1], mz[1], 0, mx[2], my[2], mz[2], 0, tx, ty, tz, 1];
};
this.AutoModelMatrix = function() {
return this.auto_rotate ? Multiply(this.current_model_mat, this.model_mat) : this.model_mat;
};
this.Update = function(vp_size) {
if (vp_size)
this.vp = vp_size;
var current_T = this.Time();
this.current_model_mat = IdentM44()
var auto_angle_x = Fract( (current_T - this.rotate_start_T) / 13000.0 ) * 2.0 * Math.PI;
var auto_angle_y = Fract( (current_T - this.rotate_start_T) / 17000.0 ) * 2.0 * Math.PI;
this.current_model_mat = RotateAxis( this.current_model_mat, auto_angle_x, 0 );
this.current_model_mat = RotateAxis( this.current_model_mat, auto_angle_y, 1 );
};
}
var Texture = {};
Texture.HandleLoadedTexture2D = function( texture, flipY ) {
gl.activeTexture( gl.TEXTURE0 + texture.unit );
gl.bindTexture( gl.TEXTURE_2D, texture.obj );
gl.pixelStorei( gl.UNPACK_FLIP_Y_WEBGL, flipY != undefined && flipY == true );
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.image );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT );
return texture;
}
Texture.LoadTexture2D = function( unit, name ) {
var texture = {};
texture.obj = gl.createTexture();
texture.unit = unit;
texture.image = new Image();
texture.image.setAttribute('crossorigin', 'anonymous');
texture.image.onload = function () {
Texture.HandleLoadedTexture2D( texture, false )
}
texture.image.src = name;
return texture;
}
var ShProg = {
Create: function (shaderList) {
var shaderObjs = [];
for (var i_sh = 0; i_sh < shaderList.length; ++i_sh) {
var shderObj = this.Compile(shaderList[i_sh].source, shaderList[i_sh].stage);
if (shderObj) shaderObjs.push(shderObj);
}
var prog = {}
prog.progObj = this.Link(shaderObjs)
if (prog.progObj) {
prog.attrInx = {};
var noOfAttributes = gl.getProgramParameter(prog.progObj, gl.ACTIVE_ATTRIBUTES);
for (var i_n = 0; i_n < noOfAttributes; ++i_n) {
var name = gl.getActiveAttrib(prog.progObj, i_n).name;
prog.attrInx[name] = gl.getAttribLocation(prog.progObj, name);
}
prog.uniLoc = {};
var noOfUniforms = gl.getProgramParameter(prog.progObj, gl.ACTIVE_UNIFORMS);
for (var i_n = 0; i_n < noOfUniforms; ++i_n) {
var name = gl.getActiveUniform(prog.progObj, i_n).name;
prog.uniLoc[name] = gl.getUniformLocation(prog.progObj, name);
}
}
return prog;
},
AttrI: function (prog, name) { return prog.attrInx[name]; },
UniformL: function (prog, name) { return prog.uniLoc[name]; },
Use: function (prog) { gl.useProgram(prog.progObj); },
SetI1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1i(prog.uniLoc[name], val); },
SetF1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1f(prog.uniLoc[name], val); },
SetF2: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform2fv(prog.uniLoc[name], arr); },
SetF3: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform3fv(prog.uniLoc[name], arr); },
SetF4: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform4fv(prog.uniLoc[name], arr); },
SetM33: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix3fv(prog.uniLoc[name], false, mat); },
SetM44: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix4fv(prog.uniLoc[name], false, mat); },
Compile: function (source, shaderStage) {
var shaderScript = document.getElementById(source);
if (shaderScript)
source = shaderScript.text;
var shaderObj = gl.createShader(shaderStage);
gl.shaderSource(shaderObj, source);
gl.compileShader(shaderObj);
var status = gl.getShaderParameter(shaderObj, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : null;
},
Link: function (shaderObjs) {
var prog = gl.createProgram();
for (var i_sh = 0; i_sh < shaderObjs.length; ++i_sh)
gl.attachShader(prog, shaderObjs[i_sh]);
gl.linkProgram(prog);
status = gl.getProgramParameter(prog, gl.LINK_STATUS);
if ( !status ) alert(gl.getProgramInfoLog(prog));
return status ? prog : null;
} };
var VertexBuffer = {
Create: function(attribs, indices, type) {
var buffer = { buf: [], attr: [], inx: gl.createBuffer(), inxLen: indices.length, primitive_type: type ? type : gl.TRIANGLES };
for (var i=0; i<attribs.length; ++i) {
buffer.buf.push(gl.createBuffer());
buffer.attr.push({ size : attribs[i].attrSize, loc : attribs[i].attrLoc });
gl.bindBuffer(gl.ARRAY_BUFFER, buffer.buf[i]);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( attribs[i].data ), gl.STATIC_DRAW);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer.inx);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return buffer;
},
Draw: function(bufObj) {
for (var i=0; i<bufObj.buf.length; ++i) {
gl.bindBuffer(gl.ARRAY_BUFFER, bufObj.buf[i]);
gl.vertexAttribPointer(bufObj.attr[i].loc, bufObj.attr[i].size, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray( bufObj.attr[i].loc);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, bufObj.inx);
gl.drawElements(bufObj.primitive_type, bufObj.inxLen, gl.UNSIGNED_SHORT, 0);
for (var i=0; i<bufObj.buf.length; ++i)
gl.disableVertexAttribArray(bufObj.attr[i].loc);
gl.bindBuffer( gl.ARRAY_BUFFER, null );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
} };
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision highp float;
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec2 a_texCoord;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform mat4 P;
uniform mat4 V;
uniform mat4 M;
void main()
{
o_uv = a_texCoord;
w_nv = normalize(mat3(M) * a_normal);
vec4 worldPos = M * vec4(a_position, 1.0);
w_pos = worldPos.xyz;
gl_Position = P * V * worldPos;
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
#extension GL_OES_standard_derivatives : enable
precision mediump float;
varying vec3 w_pos;
varying vec3 w_nv;
varying vec2 o_uv;
uniform float u_height_scale;
uniform vec3 u_light_pos;
uniform vec3 u_view_pos;
uniform sampler2D u_diffuse;
uniform sampler2D u_normal_map;
uniform sampler2D u_displacement_map;
vec2 ParallaxMapping (vec2 texCoord, vec3 viewDir)
{
float numLayers = 32.0 - 31.0 * abs(dot(vec3(0.0, 0.0, 1.0), viewDir));
float layerDepth = 1.0 / numLayers;
vec2 P = viewDir.xy / viewDir.z * u_height_scale;
vec2 deltaTexCoords = P / numLayers;
vec2 currentTexCoords = texCoord;
float currentLayerDepth = 0.0;
float currentDepthMapValue = texture2D(u_displacement_map, currentTexCoords).r;
for (int i=0; i<32; ++ i)
{
if (currentLayerDepth >= currentDepthMapValue)
break;
currentTexCoords -= deltaTexCoords;
currentDepthMapValue = texture2D(u_displacement_map, currentTexCoords).r;
currentLayerDepth += layerDepth;
}
vec2 prevTexCoords = currentTexCoords + deltaTexCoords;
float afterDepth = currentDepthMapValue - currentLayerDepth;
float beforeDepth = texture2D(u_displacement_map, prevTexCoords).r - currentLayerDepth + layerDepth;
float weight = afterDepth / (afterDepth - beforeDepth);
return prevTexCoords * weight + currentTexCoords * (1.0 - weight);
}
void main()
{
vec3 N = normalize(w_nv);
vec3 dp1 = dFdx( w_pos );
vec3 dp2 = dFdy( w_pos );
vec2 duv1 = dFdx( o_uv );
vec2 duv2 = dFdy( o_uv );
vec3 dp2perp = cross(dp2, N);
vec3 dp1perp = cross(N, dp1);
vec3 T = dp2perp * duv1.x + dp1perp * duv2.x;
vec3 B = dp2perp * duv1.y + dp1perp * duv2.y;
float invmax = inversesqrt(max(dot(T, T), dot(B, B)));
mat3 tm = mat3(T * invmax, B * invmax, N);
mat3 tbn_inv = mat3(vec3(tm[0].x, tm[1].x, tm[2].x), vec3(tm[0].y, tm[1].y, tm[2].y), vec3(tm[0].z, tm[1].z, tm[2].z));
vec3 view_dir = tbn_inv * normalize(w_pos - u_view_pos);
vec2 uv = ParallaxMapping(o_uv, view_dir);
if (uv.x > 1.0 || uv.y > 1.0 || uv.x < 0.0 || uv.y < 0.0)
discard;
vec3 L = tbn_inv * normalize(u_light_pos - w_pos);
vec3 mapN = normalize(texture2D(u_normal_map, uv.st).xyz * 2.0 - 1.0);
float kd = max(0.0, dot(mapN, L));
vec3 color = texture2D(u_diffuse, uv.st).rgb;
vec3 light_col = (0.1 + kd) * color.rgb;
gl_FragColor = vec4(clamp(light_col, 0.0, 1.0), 1.0);
}
</script>
<body>
<div>
<form id="gui" name="inputs">
<table>
<tr>
<td> <font color=#CCF>height scale</font> </td>
<td> <input type="range" id="height" min="0" max="100" value="50"/></td>
</tr>
</table>
</form>
</div>
<canvas id="canvas" style="border: none;" width="100%" height="100%"></canvas>
关于javascript - 使用javascript和glsl进行凹凸贴图,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/51988629/