ShaderToy学习笔记 05.3D旋转
1. 3D旋转
1.1. 汇制立方体
由于立方体没有旋转,所以正对着看过去时,看起来是正方形的,所以需要旋转一下,才能看到立方体的样子。
常见几何体的SDF
BOX 的SDF为
float sdBox( vec3 p, vec3 b )
{vec3 q = abs(p) - b;return length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0);
}
添加了offset及color的公式为
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color )
{vec3 q = abs(p-offset) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}
完整代码如下
#define PIXW (1./iResolution.y)const int MAX_STEPS = 100;
const float START_DIST = 0.001;
const float MAX_DIST = 100.0;
const float EPSILON = 0.0001;struct SDFResult
{float d;vec3 color;
};vec3 getBackgroundColor(vec2 uv)
{
//uv.y [-1,1]
//y: [0,1] float y=(uv.y+1.)/2.; return mix(vec3(1,0,1),vec3(0,1,1),y);
}SDFResult sdSphere(vec3 p, float r,vec3 offset,vec3 color)
{return SDFResult(length(p-offset)-r,color);
}
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color )
{vec3 q = abs(p-offset) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}SDFResult minWithColor(SDFResult a,SDFResult b)
{if (a.d<b.d){return a;}return b;
}
SDFResult sdScene(vec3 p)
{SDFResult result1=sdSphere(p,1.0,vec3(-2.5,0.5,-2),vec3(0.,0.8,0.8));SDFResult result2=sdSphere(p,1.0,vec3(2.5,0.5,-2),vec3(1.,0.58,0.29));//SDFResult result=minWithColor(result1,result2);SDFResult result=sdBox(p,vec3(1.0,1.0,.5),vec3(0.2,0.2,0.2),vec3(1.,0.,0.));return result;
}
//法线计算
vec3 calcNormal(vec3 p) {vec2 e = vec2(1.0, -1.0) * 0.0005; // epsilonfloat r = 1.; // radius of spherereturn normalize(e.xyy * sdScene(p + e.xyy).d +e.yyx * sdScene(p + e.yyx).d +e.yxy * sdScene(p + e.yxy).d +e.xxx * sdScene(p + e.xxx).d);
}SDFResult rayMarch(vec3 ro, vec3 rd,float start,float end)
{float d=start;float r=1.0;SDFResult result;for(int i=0;i<MAX_STEPS;i++){vec3 p=ro+rd*d;result=sdScene(p);d+=result.d;if(result.d<EPSILON || d>end) break;}result.d=d;return result;
}void mainImage( out vec4 fragColor, in vec2 fragCoord )
{// Normalized pixel coordinates (from -1 to 1)vec2 uv = (2.0*fragCoord-iResolution.xy)/iResolution.xx;float r=0.3;vec3 backgroundColor = vec3(0.835, 1, 1);//vec3 c=getBackgroundColor(uv);vec3 c=backgroundColor;vec3 ro = vec3(0, 0, 3); // ray origin that represents camera positionvec3 rd = normalize(vec3(uv, -1)); // ray directionSDFResult result=rayMarch(ro,rd,START_DIST,MAX_DIST);float d=result.d;if(d<MAX_DIST){//平行光源的漫反射计算vec3 p=ro+rd*d;vec3 n=calcNormal(p);vec3 lightPosition=vec3(2,2,7);//vec3 light_direction=normalize(vec3(1,0,5));vec3 light_direction=normalize(lightPosition-p);vec3 light_color=vec3(1,1,1);float diffuse=max(0.0,dot(n,light_direction));diffuse=clamp(diffuse,0.1,1.0);c=light_color*diffuse*result.color+backgroundColor*0.2;}// Output to screenfragColor = vec4(vec3(c),1.0);
}
1.2. 旋转
在三维空间中,我们需要分别考虑绕 X 轴、Y 轴和 Z 轴的旋转。每个轴的旋转都有其对应的旋转矩阵:
绕 X 轴旋转 θ 角度的矩阵:
R x ( θ ) = ( 1 0 0 0 0 cos θ − sin θ 0 0 sin θ cos θ 0 0 0 0 1 ) R_x(\theta) = \begin{pmatrix} 1 & 0 & 0 & 0 \\ 0 & \cos\theta & -\sin\theta & 0 \\ 0 & \sin\theta & \cos\theta & 0 \\ 0 & 0 & 0 & 1 \end{pmatrix} Rx(θ)= 10000cosθsinθ00−sinθcosθ00001
绕 Y 轴旋转 θ 角度的矩阵:
R y ( θ ) = ( cos θ 0 sin θ 0 0 1 0 0 − sin θ 0 cos θ 0 0 0 0 1 ) R_y(\theta) = \begin{pmatrix} \cos\theta & 0 & \sin\theta & 0 \\ 0 & 1 & 0 & 0 \\ -\sin\theta & 0 & \cos\theta & 0 \\ 0 & 0 & 0 & 1 \end{pmatrix} Ry(θ)= cosθ0−sinθ00100sinθ0cosθ00001
绕 Z 轴旋转 θ 角度的矩阵:
R z ( θ ) = ( cos θ − sin θ 0 0 sin θ cos θ 0 0 0 0 1 0 0 0 0 1 ) R_z(\theta) = \begin{pmatrix} \cos\theta & -\sin\theta & 0 & 0 \\ \sin\theta & \cos\theta & 0 & 0 \\ 0 & 0 & 1 & 0 \\ 0 & 0 & 0 & 1 \end{pmatrix} Rz(θ)= cosθsinθ00−sinθcosθ0000100001
对于任意点 P(x, y, z),旋转后的坐标 P’(x’, y’, z’) 可以通过矩阵乘法得到:
P ′ = R ⋅ P P' = R \cdot P P′=R⋅P
如果需要进行多个轴的组合旋转,最终的旋转矩阵是各个轴向旋转矩阵的乘积。注意矩阵乘法的顺序会影响最终结果:
R t o t a l = R z ⋅ R y ⋅ R x R_{total} = R_z \cdot R_y \cdot R_x Rtotal=Rz⋅Ry⋅Rx
在实际应用中,我们通常会使用四阶矩阵(4x4)来表示这些变换,以便与其他变换(如平移、缩放)进行组合。
1.3. X 轴旋转
代码实现
mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color ,mat4 transform)
{p=(transform*vec4(p-offset,1.0)).xyz;vec3 q = abs(p) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}
因为 GLSL 中矩阵乘法是右乘的,vec3 类型的点需要先扩展为 vec4(齐次坐标),然后与 mat4 相乘,最后提取出 .xyz 部分。
1.3.1. 完整代码
#define PIXW (1./iResolution.y)const int MAX_STEPS = 100;
const float START_DIST = 0.001;
const float MAX_DIST = 100.0;
const float EPSILON = 0.0001;
const float PI = 3.1415926535897932384626433832795;
struct SDFResult
{float d;vec3 color;
};mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}
vec3 getBackgroundColor(vec2 uv)
{
//uv.y [-1,1]
//y: [0,1] float y=(uv.y+1.)/2.; return mix(vec3(1,0,1),vec3(0,1,1),y);
}SDFResult sdSphere(vec3 p, float r,vec3 offset,vec3 color)
{return SDFResult(length(p-offset)-r,color);
}
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color ,mat4 transform)
{p=(transform*vec4(p-offset,1.0)).xyz;vec3 q = abs(p) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}SDFResult minWithColor(SDFResult a,SDFResult b)
{if (a.d<b.d){return a;}return b;
}
SDFResult sdScene(vec3 p)
{SDFResult result1=sdSphere(p,1.0,vec3(-2.5,0.5,-2),vec3(0.,0.8,0.8));SDFResult result2=sdSphere(p,1.0,vec3(2.5,0.5,-2),vec3(1.,0.58,0.29));//SDFResult result=minWithColor(result1,result2);SDFResult result=sdBox(p,vec3(1.0,1.0,.5),vec3(0.2,0.2,0.2),vec3(1.,0.,0.),rotationX(0.05*PI*iTime));return result;
}
//法线计算
vec3 calcNormal(vec3 p) {vec2 e = vec2(1.0, -1.0) * 0.0005; // epsilonfloat r = 1.; // radius of spherereturn normalize(e.xyy * sdScene(p + e.xyy).d +e.yyx * sdScene(p + e.yyx).d +e.yxy * sdScene(p + e.yxy).d +e.xxx * sdScene(p + e.xxx).d);
}SDFResult rayMarch(vec3 ro, vec3 rd,float start,float end)
{float d=start;float r=1.0;SDFResult result;for(int i=0;i<MAX_STEPS;i++){vec3 p=ro+rd*d;result=sdScene(p);d+=result.d;if(result.d<EPSILON || d>end) break;}result.d=d;return result;
}void mainImage( out vec4 fragColor, in vec2 fragCoord )
{// Normalized pixel coordinates (from -1 to 1)vec2 uv = (2.0*fragCoord-iResolution.xy)/iResolution.xx;float r=0.3;vec3 backgroundColor = vec3(0.835, 1, 1);//vec3 c=getBackgroundColor(uv);vec3 c=backgroundColor;vec3 ro = vec3(0, 0, 3); // ray origin that represents camera positionvec3 rd = normalize(vec3(uv, -1)); // ray directionSDFResult result=rayMarch(ro,rd,START_DIST,MAX_DIST);float d=result.d;if(d<MAX_DIST){//平行光源的漫反射计算vec3 p=ro+rd*d;vec3 n=calcNormal(p);vec3 lightPosition=vec3(2,2,7);//vec3 light_direction=normalize(vec3(1,0,5));vec3 light_direction=normalize(lightPosition-p);vec3 light_color=vec3(1,1,1);float diffuse=max(0.0,dot(n,light_direction));diffuse=clamp(diffuse,0.1,1.0);c=light_color*diffuse*result.color+backgroundColor*0.2;}// Output to screenfragColor = vec4(vec3(c),1.0);
}
1.4. 绕x,y,z轴旋转
核心代码
mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}mat4 rotationY(float theta)
{return mat4(cos(theta), 0.0, sin(theta), 0.0,0.0, 1.0, 0.0, 0.0,-sin(theta), 0.0, cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}
mat4 rotationZ(float theta)
{return mat4(cos(theta), -sin(theta), 0.0, 0.0,sin(theta), cos(theta), 0.0, 0.0,0.0, 0.0, 1.0, 0.0,0.0, 0.0, 0.0, 1.0);
}
完整代码
#define PIXW (1./iResolution.y)const int MAX_STEPS = 100;
const float START_DIST = 0.001;
const float MAX_DIST = 100.0;
const float EPSILON = 0.0001;
const float PI = 3.1415926535897932384626433832795;
struct SDFResult
{float d;vec3 color;
};mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}mat4 rotationY(float theta)
{return mat4(cos(theta), 0.0, sin(theta), 0.0,0.0, 1.0, 0.0, 0.0,-sin(theta), 0.0, cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}
mat4 rotationZ(float theta)
{return mat4(cos(theta), -sin(theta), 0.0, 0.0,sin(theta), cos(theta), 0.0, 0.0,0.0, 0.0, 1.0, 0.0,0.0, 0.0, 0.0, 1.0);
}vec3 getBackgroundColor(vec2 uv)
{
//uv.y [-1,1]
//y: [0,1] float y=(uv.y+1.)/2.; return mix(vec3(1,0,1),vec3(0,1,1),y);
}SDFResult sdSphere(vec3 p, float r,vec3 offset,vec3 color)
{return SDFResult(length(p-offset)-r,color);
}
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color ,mat4 transform)
{p=(transform*vec4(p-offset,1.0)).xyz;vec3 q = abs(p) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}SDFResult minWithColor(SDFResult a,SDFResult b)
{if (a.d<b.d){return a;}return b;
}
SDFResult sdScene(vec3 p)
{SDFResult result1=sdSphere(p,1.0,vec3(-2.5,0.5,-2),vec3(0.,0.8,0.8));SDFResult result2=sdSphere(p,1.0,vec3(2.5,0.5,-2),vec3(1.,0.58,0.29));//SDFResult result=minWithColor(result1,result2);SDFResult result=sdBox(p,vec3(1.0,1.0,.5),vec3(0.2,0.2,0.2),vec3(1.,0.,0.),rotationX(0.05*PI*iTime)*rotationY(0.05*PI*iTime)*rotationY(0.05*PI*iTime)* rotationZ(0.05*PI*iTime));return result;
}
//法线计算
vec3 calcNormal(vec3 p) {vec2 e = vec2(1.0, -1.0) * 0.0005; // epsilonfloat r = 1.; // radius of spherereturn normalize(e.xyy * sdScene(p + e.xyy).d +e.yyx * sdScene(p + e.yyx).d +e.yxy * sdScene(p + e.yxy).d +e.xxx * sdScene(p + e.xxx).d);
}SDFResult rayMarch(vec3 ro, vec3 rd,float start,float end)
{float d=start;float r=1.0;SDFResult result;for(int i=0;i<MAX_STEPS;i++){vec3 p=ro+rd*d;result=sdScene(p);d+=result.d;if(result.d<EPSILON || d>end) break;}result.d=d;return result;
}void mainImage( out vec4 fragColor, in vec2 fragCoord )
{// Normalized pixel coordinates (from -1 to 1)vec2 uv = (2.0*fragCoord-iResolution.xy)/iResolution.xx;float r=0.3;vec3 backgroundColor = vec3(0.835, 1, 1);//vec3 c=getBackgroundColor(uv);vec3 c=backgroundColor;vec3 ro = vec3(0, 0, 3); // ray origin that represents camera positionvec3 rd = normalize(vec3(uv, -1)); // ray directionSDFResult result=rayMarch(ro,rd,START_DIST,MAX_DIST);float d=result.d;if(d<MAX_DIST){//平行光源的漫反射计算vec3 p=ro+rd*d;vec3 n=calcNormal(p);vec3 lightPosition=vec3(2,2,7);//vec3 light_direction=normalize(vec3(1,0,5));vec3 light_direction=normalize(lightPosition-p);vec3 light_color=vec3(1,1,1);float diffuse=max(0.0,dot(n,light_direction));diffuse=clamp(diffuse,0.1,1.0);c=light_color*diffuse*result.color+backgroundColor*0.2;}// Output to screenfragColor = vec4(vec3(c),1.0);
}
1.5. 缩放
缩放是指改变物体的大小。在计算机图形学中,我们可以使用矩阵来表示缩放。为了按我们预期的方式缩放物体,网格的中心需要是(0, 0, 0)。即缩放矩阵是针对网格的中心点进行缩放的,而不是针对世界空间的原点进行缩放。
1.5.1. 二维空间的缩放矩阵
在二维空间中,我们可以使用一个缩放矩阵来改变物体的大小。
缩放矩阵的形式为:
S = |sx 0||0 sy|
其中,sx 和 sy 分别表示物体在 x 轴和 y 轴上的缩放比例。
对于任意点 A(x, y),缩放后的坐标 A’(x’, y’) 可以通过矩阵乘法得到:
A' = S * A
展开后得到缩放公式:
x' = sx * x
y' = sy * y
1.5.2. 三维空间的缩放矩阵
在三维空间中,我们需要分别考虑物体在 x 轴、y 轴和 z 轴上的缩放。
缩放矩阵的形式为:
S = |sx 0 0 0 ||0 sy 0 0 ||0 0 sz 0 ||0 0 0 1 |
其中,sx、sy 和 sz 分别表示物体在 x 轴、y 轴和 z 轴上的缩放比例。
对于任意点 A(x, y, z),缩放后的坐标 A’(x’, y’, z’) 可以通过矩阵乘法得到:
A' = S * A
展开后得到缩放公式:
x' = sx * x
y' = sy * y
z' = sz * z
1.6. 组合变换
在计算机图形学中,我们经常需要将多个变换组合在一起,以创建更复杂的变换效果。例如,我们可以将缩放、旋转和平移组合在一起,以创建更复杂的变换效果。
其处理过程为:先进行缩放,再进行旋转,最后进行平移
由于矩阵乘法不满足交换律,所以变换的顺序十分关键。按照“先缩放,再旋转,最后平移”的顺序,最终的变换矩阵 M 为:
M = T * R * S
其中,T 是平移矩阵,R 是旋转矩阵,S 是缩放矩阵。
2. 参考
- 常见几何体的SDF
- Rendering Worlds with Two Triangles with raytracing on the GPU in 4096 bytes
- OpenGL shader开发实战学习笔记:第五章 使物体动起来-CSDN博客