当前位置: 首页 > news >正文

ShaderToy学习笔记 06.摄像机

1. 摄像机

1.1. 基本概念

摄像机在shader中主要用于定义观察空间,它决定了我们如何看到3D场景。主要包含以下要素:

  1. 位置(Position): 摄像机在3D空间中的位置坐标
  2. 方向(Direction): 摄像机朝向的方向
  3. 上向量(Up Vector): 定义摄像机的上方方向
  4. 视野(FOV): 即Field of View,决定视野的大小

1.2. 创建LookAt矩阵

以下是创建LookAt矩阵的代码,它将摄像机位置和观察点及上向量作为参数,返回一个3x3的矩阵,用于将3D空间中的点转换到观察空间。 通常 upVector 为 (0, 1, 0),但也可以使用其他向量。

mat3 camera(vec3 cameraPos, vec3 lookAtPoint, vec3 upVector) {vec3 cd = normalize(lookAtPoint - cameraPos); // camera directionvec3 cr = normalize(cross(upVector, cd)); // camera rightvec3 cu = normalize(cross(cd, cr)); // camera upreturn mat3(-cr, cu, -cd); //转换为x轴向右,y轴向上,z轴向屏幕外边的右手坐标系
}

注意:mat3(-cr, cu, -cd); 中的负号是必须的,因为世界坐标系采用右手标系,x轴向右,y轴向上,z轴向屏幕外边,而LookAt矩阵中,x轴向右,y轴向上,z轴向屏幕里面,所以需要取反。

观察空间中的坐标系如下图所示:

由 Learn OpenGL 提供的摄像机/视图空间 提供

观察空间的坐标系我们采用右手坐标系,需要三个向量来定义,分别是:

  • forward: 摄像机前向量,指向前方
  • right: 摄像机右向量,指向右方
  • up: 摄像机上向量,指向上方

1.3. 初始场景

创建一个简单的场景,包含三个不同颜色的立方体及地板,如下图所示:

#define PIXW (1./iResolution.y)const int MAX_STEPS = 100;
const float START_DIST = 0.001;
const float MAX_DIST = 100.0;
const float EPSILON = 0.0001;struct SDFResult
{float d;vec3 color;
};
mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}mat4 rotationY(float theta)
{return mat4(cos(theta), 0.0, sin(theta), 0.0,0.0, 1.0, 0.0, 0.0,-sin(theta), 0.0, cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}
mat4 rotationZ(float theta)
{return mat4(cos(theta), -sin(theta), 0.0, 0.0,sin(theta), cos(theta), 0.0, 0.0,0.0, 0.0, 1.0, 0.0,0.0, 0.0, 0.0, 1.0);
}
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color )
{vec3 q = abs(p-offset) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}vec3 getBackgroundColor(vec2 uv)
{
//uv.y [-1,1]
//y: [0,1] float y=(uv.y+1.)/2.; return mix(vec3(1,0,1),vec3(0,1,1),y);
}SDFResult sdSphere(vec3 p, float r,vec3 offset,vec3 color)
{return SDFResult(length(p-offset)-r,color);
}
SDFResult sdFloor(vec3 p,vec3 color)
{float d=p.y+1.;return SDFResult(d,color);
}SDFResult minWithColor(SDFResult a,SDFResult b)
{if (a.d<b.d){return a;}return b;
}
SDFResult sdScene(vec3 p)
{SDFResult result1=sdBox(p,vec3(1.,1.0,1.),vec3(-4,0.2,-4),vec3(1.,0.,0.));SDFResult result2=sdBox(p,vec3(1.,1.0,1.),vec3(0,0.2,-4),vec3(0.,1.,0.));SDFResult result3=sdBox(p,vec3(1.,1.0,1.),vec3(4,0.2,-4),vec3(0.,0.,1.));SDFResult result=minWithColor(result1,result2);result=minWithColor(result,result3);vec3 floorColor = vec3(1. + 0.7*mod(floor(p.x) + floor(p.z), 2.0));result=minWithColor(result, sdFloor(p,floorColor));return result;
}
//法线计算
vec3 calcNormal(vec3 p) {vec2 e = vec2(1.0, -1.0) * 0.0005; // epsilonfloat r = 1.; // radius of spherereturn normalize(e.xyy * sdScene(p + e.xyy).d +e.yyx * sdScene(p + e.yyx).d +e.yxy * sdScene(p + e.yxy).d +e.xxx * sdScene(p + e.xxx).d);
}SDFResult rayMarch(vec3 ro, vec3 rd,float start,float end)
{float d=start;float r=1.0;SDFResult result;for(int i=0;i<MAX_STEPS;i++){vec3 p=ro+rd*d;result=sdScene(p);d+=result.d;if(result.d<EPSILON || d>end) break;}result.d=d;return result;
}void mainImage( out vec4 fragColor, in vec2 fragCoord )
{// Normalized pixel coordinates (from -1 to 1)vec2 uv = (2.0*fragCoord-iResolution.xy)/iResolution.xx;float r=0.3;vec3 backgroundColor = vec3(0.835, 1, 1);//vec3 c=getBackgroundColor(uv);vec3 c=backgroundColor;vec3 ro = vec3(0, 0, 3.); // ray origin that represents camera positionvec3 rd = normalize(vec3(uv, -1)); // ray directionSDFResult result=rayMarch(ro,rd,START_DIST,MAX_DIST);float d=result.d;if(d<MAX_DIST){//平行光源的漫反射计算vec3 p=ro+rd*d;vec3 n=calcNormal(p);vec3 lightPosition=vec3(2,2,7);//vec3 light_direction=normalize(vec3(1,0,5));vec3 light_direction=normalize(lightPosition-p);vec3 light_color=vec3(1,1,1);float diffuse=max(0.0,dot(n,light_direction));diffuse=clamp(diffuse,0.1,1.0);c=light_color*diffuse*result.color+backgroundColor*0.2;}// Output to screenfragColor = vec4(vec3(c),1.0);
}

1.4. 摄像机向右移动

核心代码


void mainImage( out vec4 fragColor, in vec2 fragCoord )
{//other code ...vec3 c=backgroundColor;vec3 lp=vec3(0,0.2,-4);vec3 ro = vec3(5, 0, 3.); // ray origin that represents camera positionvec3 rd = camera(ro,lp,vec3(0,1,0))*normalize(vec3(uv, -1)); // ray direction}

通过调整 camera 函数中的参数,可以控制相机的位置和朝向。 其中 lp为相机看向的位置,即图中绿色的立方体,该立方体将始终在画面的中心。

1.5. 摄像机前后移动

核心代码

void mainImage( out vec4 fragColor, in vec2 fragCoord )
{//other code ...vec3 c=backgroundColor;vec3 lp=vec3(0,0.2,-4);vec3 ro = vec3(0, 0, 3.+sin(iTime)*2.); // ray origin that represents camera positionthat represents camera positionvec3 rd = camera(ro,lp,vec3(0,1,0))*normalize(vec3(uv, -1)); // ray direction}

即移动相机位置即可
完整代码

#define PIXW (1./iResolution.y)const int MAX_STEPS = 100;
const float START_DIST = 0.001;
const float MAX_DIST = 100.0;
const float EPSILON = 0.0001;struct SDFResult
{float d;vec3 color;
};
mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}mat4 rotationY(float theta)
{return mat4(cos(theta), 0.0, sin(theta), 0.0,0.0, 1.0, 0.0, 0.0,-sin(theta), 0.0, cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}
mat4 rotationZ(float theta)
{return mat4(cos(theta), -sin(theta), 0.0, 0.0,sin(theta), cos(theta), 0.0, 0.0,0.0, 0.0, 1.0, 0.0,0.0, 0.0, 0.0, 1.0);
}
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color )
{vec3 q = abs(p-offset) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}vec3 getBackgroundColor(vec2 uv)
{
//uv.y [-1,1]
//y: [0,1] float y=(uv.y+1.)/2.; return mix(vec3(1,0,1),vec3(0,1,1),y);
}SDFResult sdSphere(vec3 p, float r,vec3 offset,vec3 color)
{return SDFResult(length(p-offset)-r,color);
}
SDFResult sdFloor(vec3 p,vec3 color)
{float d=p.y+1.;return SDFResult(d,color);
}SDFResult minWithColor(SDFResult a,SDFResult b)
{if (a.d<b.d){return a;}return b;
}mat3 camera(vec3 cameraPos, vec3 lookAtPoint, vec3 upVector) {vec3 cd = normalize(lookAtPoint - cameraPos); // camera directionvec3 cr = normalize(cross(upVector, cd)); // camera rightvec3 cu = normalize(cross(cd, cr)); // camera upreturn mat3(-cr, cu, -cd); //转换为x轴向右,y轴向上,z轴向屏幕外边的右手坐标系
}SDFResult sdScene(vec3 p)
{SDFResult result1=sdBox(p,vec3(1.,1.0,1.),vec3(-4,0.2,-4),vec3(1.,0.,0.));SDFResult result2=sdBox(p,vec3(1.,1.0,1.),vec3(0,0.2,-4),vec3(0.,1.,0.));SDFResult result3=sdBox(p,vec3(1.,1.0,1.),vec3(4,0.2,-4),vec3(0.,0.,1.));SDFResult result=minWithColor(result1,result2);result=minWithColor(result,result3);vec3 floorColor = vec3(1. + 0.7*mod(floor(p.x) + floor(p.z), 2.0));result=minWithColor(result, sdFloor(p,floorColor));return result;
}
//法线计算
vec3 calcNormal(vec3 p) {vec2 e = vec2(1.0, -1.0) * 0.0005; // epsilonfloat r = 1.; // radius of spherereturn normalize(e.xyy * sdScene(p + e.xyy).d +e.yyx * sdScene(p + e.yyx).d +e.yxy * sdScene(p + e.yxy).d +e.xxx * sdScene(p + e.xxx).d);
}SDFResult rayMarch(vec3 ro, vec3 rd,float start,float end)
{float d=start;float r=1.0;SDFResult result;for(int i=0;i<MAX_STEPS;i++){vec3 p=ro+rd*d;result=sdScene(p);d+=result.d;if(result.d<EPSILON || d>end) break;}result.d=d;return result;
}void mainImage( out vec4 fragColor, in vec2 fragCoord )
{// Normalized pixel coordinates (from -1 to 1)vec2 uv = (2.0*fragCoord-iResolution.xy)/iResolution.xx;float r=0.3;vec3 backgroundColor = vec3(0.835, 1, 1);//vec3 c=getBackgroundColor(uv);vec3 c=backgroundColor;vec3 lp=vec3(0,0.2,-4);vec3 ro = vec3(0, 0, 3.+sin(iTime)*2.); // ray origin that represents camera positionvec3 rd = camera(ro,lp,vec3(0,1,0))*normalize(vec3(uv, -1)); // ray directionSDFResult result=rayMarch(ro,rd,START_DIST,MAX_DIST);float d=result.d;if(d<MAX_DIST){//平行光源的漫反射计算vec3 p=ro+rd*d;vec3 n=calcNormal(p);vec3 lightPosition=vec3(2,2,7);//vec3 light_direction=normalize(vec3(1,0,5));vec3 light_direction=normalize(lightPosition-p);vec3 light_color=vec3(1,1,1);float diffuse=max(0.0,dot(n,light_direction));diffuse=clamp(diffuse,0.1,1.0);c=light_color*diffuse*result.color+backgroundColor*0.2;}// Output to screenfragColor = vec4(vec3(c),1.0);
}

1.6. 摄像机围绕物体旋转

运行结果如下:
外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传

思路:
我们只考虑摄像机在xz平面上的旋转,y轴不变。这样就是要改变 x轴和z轴的位置 。
从顶部向下看 ,摄像机将在黑色圆形路径上移动。

即类似下图:可参考 desmos上创建的图表来试验循环路径。想象一下绿色的立方体位于圆圈的中心

核心代码

    vec3 ro = vec3(0, 0, 3); // ray origin that represents camera positionfloat theta=iTime*0.5;float cameraRadius=10.;ro.x=cameraRadius*cos(theta)+lp.x; //以lp为圆心,以cameraRadius为半径,theta为角度ro.z=cameraRadius*sin(theta)+lp.z;

完整代码

#define PIXW (1./iResolution.y)const int MAX_STEPS = 100;
const float START_DIST = 0.001;
const float MAX_DIST = 100.0;
const float EPSILON = 0.0001;struct SDFResult
{float d;vec3 color;
};
mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}mat4 rotationY(float theta)
{return mat4(cos(theta), 0.0, sin(theta), 0.0,0.0, 1.0, 0.0, 0.0,-sin(theta), 0.0, cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}
mat4 rotationZ(float theta)
{return mat4(cos(theta), -sin(theta), 0.0, 0.0,sin(theta), cos(theta), 0.0, 0.0,0.0, 0.0, 1.0, 0.0,0.0, 0.0, 0.0, 1.0);
}
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color )
{vec3 q = abs(p-offset) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}vec3 getBackgroundColor(vec2 uv)
{
//uv.y [-1,1]
//y: [0,1] float y=(uv.y+1.)/2.; return mix(vec3(1,0,1),vec3(0,1,1),y);
}SDFResult sdSphere(vec3 p, float r,vec3 offset,vec3 color)
{return SDFResult(length(p-offset)-r,color);
}
SDFResult sdFloor(vec3 p,vec3 color)
{float d=p.y+1.;return SDFResult(d,color);
}SDFResult minWithColor(SDFResult a,SDFResult b)
{if (a.d<b.d){return a;}return b;
}mat3 camera(vec3 cameraPos, vec3 lookAtPoint, vec3 upVector) {vec3 cd = normalize(lookAtPoint - cameraPos); // camera directionvec3 cr = normalize(cross(upVector, cd)); // camera rightvec3 cu = normalize(cross(cd, cr)); // camera upreturn mat3(-cr, cu, -cd); //转换为x轴向右,y轴向上,z轴向屏幕外边的右手坐标系
}SDFResult sdScene(vec3 p)
{SDFResult result1=sdBox(p,vec3(1.,1.0,1.),vec3(-4,0.2,-4),vec3(1.,0.,0.));SDFResult result2=sdBox(p,vec3(1.,1.0,1.),vec3(0,0.2,-4),vec3(0.,1.,0.));SDFResult result3=sdBox(p,vec3(1.,1.0,1.),vec3(4,0.2,-4),vec3(0.,0.,1.));SDFResult result=minWithColor(result1,result2);result=minWithColor(result,result3);vec3 floorColor = vec3(1. + 0.7*mod(floor(p.x) + floor(p.z), 2.0));result=minWithColor(result, sdFloor(p,floorColor));return result;
}
//法线计算
vec3 calcNormal(vec3 p) {vec2 e = vec2(1.0, -1.0) * 0.0005; // epsilonfloat r = 1.; // radius of spherereturn normalize(e.xyy * sdScene(p + e.xyy).d +e.yyx * sdScene(p + e.yyx).d +e.yxy * sdScene(p + e.yxy).d +e.xxx * sdScene(p + e.xxx).d);
}SDFResult rayMarch(vec3 ro, vec3 rd,float start,float end)
{float d=start;float r=1.0;SDFResult result;for(int i=0;i<MAX_STEPS;i++){vec3 p=ro+rd*d;result=sdScene(p);d+=result.d;if(result.d<EPSILON || d>end) break;}result.d=d;return result;
}void mainImage( out vec4 fragColor, in vec2 fragCoord )
{// Normalized pixel coordinates (from -1 to 1)vec2 uv = (2.0*fragCoord-iResolution.xy)/iResolution.xx;float r=0.3;vec3 backgroundColor = vec3(0.835, 1, 1);//vec3 c=getBackgroundColor(uv);vec3 c=backgroundColor;vec3 lp=vec3(0,0.2,-4);vec3 ro = vec3(0, 0, 3); // ray origin that represents camera positionfloat theta=iTime*0.5;float cameraRadius=10.;ro.x=cameraRadius*cos(theta)+lp.x;ro.z=cameraRadius*sin(theta)+lp.z;vec3 rd = camera(ro,lp,vec3(0,1,0))*normalize(vec3(uv, -1)); // ray directionSDFResult result=rayMarch(ro,rd,START_DIST,MAX_DIST);float d=result.d;if(d<MAX_DIST){//平行光源的漫反射计算vec3 p=ro+rd*d;vec3 n=calcNormal(p);vec3 lightPosition=vec3(2,2,7);//vec3 light_direction=normalize(vec3(1,0,5));vec3 light_direction=normalize(lightPosition-p);vec3 light_color=vec3(1,1,1);float diffuse=max(0.0,dot(n,light_direction));diffuse=clamp(diffuse,0.1,1.0);c=light_color*diffuse*result.color+backgroundColor*0.2;}// Output to screenfragColor = vec4(vec3(c),1.0);
}

1.7. 参考

  1. 10.1 带有观察点的相机模型 | Shadertoy中文教程
  2. 10.2 围绕目标旋转摄像机 | Shadertoy中文教程
  3. LearnOpenGL - Camera

相关文章:

  • 基本功能学习
  • OpenGL-ES 学习(14) ----顶点指定和基本图元的绘制
  • 小米MiMo:7B模型逆袭AI大模型战场的技术密码
  • 【QNX+Android虚拟化方案】137 - msm-5.4 Kernel U盘 插入中断、枚举、匹配完整流程详解
  • 探秘数据结构:构建高效算法的灵魂密码
  • 【Android】四大组件之ContentProvider
  • POI从入门到上手(三)-轻松完成EasyExcel使用,完成Excel导入导出.
  • Node.js心得笔记
  • 数据库基本概念:数据库的定义、特点、分类、组成、作用
  • llamafactory-cli webui启动报错TypeError: argument of type ‘bool‘ is not iterable
  • Python10天冲刺-函数进行详细的性能分析
  • ES6-Set-Map对象小记
  • KDD 2025 | (8月轮)时间序列(Time Series)论文总结
  • PostgreSQL安装部署
  • 使用HunyuanVideo搭建文本生视频大模型
  • 【2025五一数学建模竞赛B题】 矿山数据处理问题|建模过程+完整代码论文全解全析
  • qemu(4) -- qemu-system-arm使用
  • 从股指到期指,哪些因素影响基差?
  • Selenium3自动化测试,Python3测试开发教程视频测试用例设计
  • 学习与规划的融合Dyna-Q:python从零实现
  • 海南征集民生领域涉嫌垄断违法行为线索,包括行业协会等领域
  • 西安市长安区与航天基地区政合一管理,党政一把手分任基地党工委正副书记
  • 黄仁勋:新一代计算平台GB300三季度上市,AI计算能力每十年提升100万倍
  • 【社论】进一步拧紧过紧日子的制度螺栓
  • 前四月国家铁路发送货物12.99亿吨,同比增长3.6%
  • 全总联合六部门印发工作指引,共保劳动者合法权益