当前位置: 首页 > wzjs >正文

邢台专业做网站品牌整合营销

邢台专业做网站,品牌整合营销,建大型网站,湖南建设监理协会网站1. 摄像机 1.1. 基本概念 摄像机在shader中主要用于定义观察空间,它决定了我们如何看到3D场景。主要包含以下要素: 位置(Position): 摄像机在3D空间中的位置坐标方向(Direction): 摄像机朝向的方向上向量(Up Vector): 定义摄像机的上方方向视野(FOV):…

1. 摄像机

1.1. 基本概念

摄像机在shader中主要用于定义观察空间,它决定了我们如何看到3D场景。主要包含以下要素:

  1. 位置(Position): 摄像机在3D空间中的位置坐标
  2. 方向(Direction): 摄像机朝向的方向
  3. 上向量(Up Vector): 定义摄像机的上方方向
  4. 视野(FOV): 即Field of View,决定视野的大小

1.2. 创建LookAt矩阵

以下是创建LookAt矩阵的代码,它将摄像机位置和观察点及上向量作为参数,返回一个3x3的矩阵,用于将3D空间中的点转换到观察空间。 通常 upVector 为 (0, 1, 0),但也可以使用其他向量。

mat3 camera(vec3 cameraPos, vec3 lookAtPoint, vec3 upVector) {vec3 cd = normalize(lookAtPoint - cameraPos); // camera directionvec3 cr = normalize(cross(upVector, cd)); // camera rightvec3 cu = normalize(cross(cd, cr)); // camera upreturn mat3(-cr, cu, -cd); //转换为x轴向右,y轴向上,z轴向屏幕外边的右手坐标系
}

注意:mat3(-cr, cu, -cd); 中的负号是必须的,因为世界坐标系采用右手标系,x轴向右,y轴向上,z轴向屏幕外边,而LookAt矩阵中,x轴向右,y轴向上,z轴向屏幕里面,所以需要取反。

观察空间中的坐标系如下图所示:

由 Learn OpenGL 提供的摄像机/视图空间 提供

观察空间的坐标系我们采用右手坐标系,需要三个向量来定义,分别是:

  • forward: 摄像机前向量,指向前方
  • right: 摄像机右向量,指向右方
  • up: 摄像机上向量,指向上方

1.3. 初始场景

创建一个简单的场景,包含三个不同颜色的立方体及地板,如下图所示:

#define PIXW (1./iResolution.y)const int MAX_STEPS = 100;
const float START_DIST = 0.001;
const float MAX_DIST = 100.0;
const float EPSILON = 0.0001;struct SDFResult
{float d;vec3 color;
};
mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}mat4 rotationY(float theta)
{return mat4(cos(theta), 0.0, sin(theta), 0.0,0.0, 1.0, 0.0, 0.0,-sin(theta), 0.0, cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}
mat4 rotationZ(float theta)
{return mat4(cos(theta), -sin(theta), 0.0, 0.0,sin(theta), cos(theta), 0.0, 0.0,0.0, 0.0, 1.0, 0.0,0.0, 0.0, 0.0, 1.0);
}
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color )
{vec3 q = abs(p-offset) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}vec3 getBackgroundColor(vec2 uv)
{
//uv.y [-1,1]
//y: [0,1] float y=(uv.y+1.)/2.; return mix(vec3(1,0,1),vec3(0,1,1),y);
}SDFResult sdSphere(vec3 p, float r,vec3 offset,vec3 color)
{return SDFResult(length(p-offset)-r,color);
}
SDFResult sdFloor(vec3 p,vec3 color)
{float d=p.y+1.;return SDFResult(d,color);
}SDFResult minWithColor(SDFResult a,SDFResult b)
{if (a.d<b.d){return a;}return b;
}
SDFResult sdScene(vec3 p)
{SDFResult result1=sdBox(p,vec3(1.,1.0,1.),vec3(-4,0.2,-4),vec3(1.,0.,0.));SDFResult result2=sdBox(p,vec3(1.,1.0,1.),vec3(0,0.2,-4),vec3(0.,1.,0.));SDFResult result3=sdBox(p,vec3(1.,1.0,1.),vec3(4,0.2,-4),vec3(0.,0.,1.));SDFResult result=minWithColor(result1,result2);result=minWithColor(result,result3);vec3 floorColor = vec3(1. + 0.7*mod(floor(p.x) + floor(p.z), 2.0));result=minWithColor(result, sdFloor(p,floorColor));return result;
}
//法线计算
vec3 calcNormal(vec3 p) {vec2 e = vec2(1.0, -1.0) * 0.0005; // epsilonfloat r = 1.; // radius of spherereturn normalize(e.xyy * sdScene(p + e.xyy).d +e.yyx * sdScene(p + e.yyx).d +e.yxy * sdScene(p + e.yxy).d +e.xxx * sdScene(p + e.xxx).d);
}SDFResult rayMarch(vec3 ro, vec3 rd,float start,float end)
{float d=start;float r=1.0;SDFResult result;for(int i=0;i<MAX_STEPS;i++){vec3 p=ro+rd*d;result=sdScene(p);d+=result.d;if(result.d<EPSILON || d>end) break;}result.d=d;return result;
}void mainImage( out vec4 fragColor, in vec2 fragCoord )
{// Normalized pixel coordinates (from -1 to 1)vec2 uv = (2.0*fragCoord-iResolution.xy)/iResolution.xx;float r=0.3;vec3 backgroundColor = vec3(0.835, 1, 1);//vec3 c=getBackgroundColor(uv);vec3 c=backgroundColor;vec3 ro = vec3(0, 0, 3.); // ray origin that represents camera positionvec3 rd = normalize(vec3(uv, -1)); // ray directionSDFResult result=rayMarch(ro,rd,START_DIST,MAX_DIST);float d=result.d;if(d<MAX_DIST){//平行光源的漫反射计算vec3 p=ro+rd*d;vec3 n=calcNormal(p);vec3 lightPosition=vec3(2,2,7);//vec3 light_direction=normalize(vec3(1,0,5));vec3 light_direction=normalize(lightPosition-p);vec3 light_color=vec3(1,1,1);float diffuse=max(0.0,dot(n,light_direction));diffuse=clamp(diffuse,0.1,1.0);c=light_color*diffuse*result.color+backgroundColor*0.2;}// Output to screenfragColor = vec4(vec3(c),1.0);
}

1.4. 摄像机向右移动

核心代码


void mainImage( out vec4 fragColor, in vec2 fragCoord )
{//other code ...vec3 c=backgroundColor;vec3 lp=vec3(0,0.2,-4);vec3 ro = vec3(5, 0, 3.); // ray origin that represents camera positionvec3 rd = camera(ro,lp,vec3(0,1,0))*normalize(vec3(uv, -1)); // ray direction}

通过调整 camera 函数中的参数,可以控制相机的位置和朝向。 其中 lp为相机看向的位置,即图中绿色的立方体,该立方体将始终在画面的中心。

1.5. 摄像机前后移动

核心代码

void mainImage( out vec4 fragColor, in vec2 fragCoord )
{//other code ...vec3 c=backgroundColor;vec3 lp=vec3(0,0.2,-4);vec3 ro = vec3(0, 0, 3.+sin(iTime)*2.); // ray origin that represents camera positionthat represents camera positionvec3 rd = camera(ro,lp,vec3(0,1,0))*normalize(vec3(uv, -1)); // ray direction}

即移动相机位置即可
完整代码

#define PIXW (1./iResolution.y)const int MAX_STEPS = 100;
const float START_DIST = 0.001;
const float MAX_DIST = 100.0;
const float EPSILON = 0.0001;struct SDFResult
{float d;vec3 color;
};
mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}mat4 rotationY(float theta)
{return mat4(cos(theta), 0.0, sin(theta), 0.0,0.0, 1.0, 0.0, 0.0,-sin(theta), 0.0, cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}
mat4 rotationZ(float theta)
{return mat4(cos(theta), -sin(theta), 0.0, 0.0,sin(theta), cos(theta), 0.0, 0.0,0.0, 0.0, 1.0, 0.0,0.0, 0.0, 0.0, 1.0);
}
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color )
{vec3 q = abs(p-offset) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}vec3 getBackgroundColor(vec2 uv)
{
//uv.y [-1,1]
//y: [0,1] float y=(uv.y+1.)/2.; return mix(vec3(1,0,1),vec3(0,1,1),y);
}SDFResult sdSphere(vec3 p, float r,vec3 offset,vec3 color)
{return SDFResult(length(p-offset)-r,color);
}
SDFResult sdFloor(vec3 p,vec3 color)
{float d=p.y+1.;return SDFResult(d,color);
}SDFResult minWithColor(SDFResult a,SDFResult b)
{if (a.d<b.d){return a;}return b;
}mat3 camera(vec3 cameraPos, vec3 lookAtPoint, vec3 upVector) {vec3 cd = normalize(lookAtPoint - cameraPos); // camera directionvec3 cr = normalize(cross(upVector, cd)); // camera rightvec3 cu = normalize(cross(cd, cr)); // camera upreturn mat3(-cr, cu, -cd); //转换为x轴向右,y轴向上,z轴向屏幕外边的右手坐标系
}SDFResult sdScene(vec3 p)
{SDFResult result1=sdBox(p,vec3(1.,1.0,1.),vec3(-4,0.2,-4),vec3(1.,0.,0.));SDFResult result2=sdBox(p,vec3(1.,1.0,1.),vec3(0,0.2,-4),vec3(0.,1.,0.));SDFResult result3=sdBox(p,vec3(1.,1.0,1.),vec3(4,0.2,-4),vec3(0.,0.,1.));SDFResult result=minWithColor(result1,result2);result=minWithColor(result,result3);vec3 floorColor = vec3(1. + 0.7*mod(floor(p.x) + floor(p.z), 2.0));result=minWithColor(result, sdFloor(p,floorColor));return result;
}
//法线计算
vec3 calcNormal(vec3 p) {vec2 e = vec2(1.0, -1.0) * 0.0005; // epsilonfloat r = 1.; // radius of spherereturn normalize(e.xyy * sdScene(p + e.xyy).d +e.yyx * sdScene(p + e.yyx).d +e.yxy * sdScene(p + e.yxy).d +e.xxx * sdScene(p + e.xxx).d);
}SDFResult rayMarch(vec3 ro, vec3 rd,float start,float end)
{float d=start;float r=1.0;SDFResult result;for(int i=0;i<MAX_STEPS;i++){vec3 p=ro+rd*d;result=sdScene(p);d+=result.d;if(result.d<EPSILON || d>end) break;}result.d=d;return result;
}void mainImage( out vec4 fragColor, in vec2 fragCoord )
{// Normalized pixel coordinates (from -1 to 1)vec2 uv = (2.0*fragCoord-iResolution.xy)/iResolution.xx;float r=0.3;vec3 backgroundColor = vec3(0.835, 1, 1);//vec3 c=getBackgroundColor(uv);vec3 c=backgroundColor;vec3 lp=vec3(0,0.2,-4);vec3 ro = vec3(0, 0, 3.+sin(iTime)*2.); // ray origin that represents camera positionvec3 rd = camera(ro,lp,vec3(0,1,0))*normalize(vec3(uv, -1)); // ray directionSDFResult result=rayMarch(ro,rd,START_DIST,MAX_DIST);float d=result.d;if(d<MAX_DIST){//平行光源的漫反射计算vec3 p=ro+rd*d;vec3 n=calcNormal(p);vec3 lightPosition=vec3(2,2,7);//vec3 light_direction=normalize(vec3(1,0,5));vec3 light_direction=normalize(lightPosition-p);vec3 light_color=vec3(1,1,1);float diffuse=max(0.0,dot(n,light_direction));diffuse=clamp(diffuse,0.1,1.0);c=light_color*diffuse*result.color+backgroundColor*0.2;}// Output to screenfragColor = vec4(vec3(c),1.0);
}

1.6. 摄像机围绕物体旋转

运行结果如下:
外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传

思路:
我们只考虑摄像机在xz平面上的旋转,y轴不变。这样就是要改变 x轴和z轴的位置 。
从顶部向下看 ,摄像机将在黑色圆形路径上移动。

即类似下图:可参考 desmos上创建的图表来试验循环路径。想象一下绿色的立方体位于圆圈的中心

核心代码

    vec3 ro = vec3(0, 0, 3); // ray origin that represents camera positionfloat theta=iTime*0.5;float cameraRadius=10.;ro.x=cameraRadius*cos(theta)+lp.x; //以lp为圆心,以cameraRadius为半径,theta为角度ro.z=cameraRadius*sin(theta)+lp.z;

完整代码

#define PIXW (1./iResolution.y)const int MAX_STEPS = 100;
const float START_DIST = 0.001;
const float MAX_DIST = 100.0;
const float EPSILON = 0.0001;struct SDFResult
{float d;vec3 color;
};
mat4 rotationX(float theta)
{return mat4(1.0, 0.0, 0.0, 0.0,0.0, cos(theta), -sin(theta), 0.0,0.0, sin(theta), cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}mat4 rotationY(float theta)
{return mat4(cos(theta), 0.0, sin(theta), 0.0,0.0, 1.0, 0.0, 0.0,-sin(theta), 0.0, cos(theta), 0.0,0.0, 0.0, 0.0, 1.0);
}
mat4 rotationZ(float theta)
{return mat4(cos(theta), -sin(theta), 0.0, 0.0,sin(theta), cos(theta), 0.0, 0.0,0.0, 0.0, 1.0, 0.0,0.0, 0.0, 0.0, 1.0);
}
SDFResult sdBox( vec3 p, vec3 b,vec3 offset,vec3 color )
{vec3 q = abs(p-offset) - b;return SDFResult(length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0),color);
}vec3 getBackgroundColor(vec2 uv)
{
//uv.y [-1,1]
//y: [0,1] float y=(uv.y+1.)/2.; return mix(vec3(1,0,1),vec3(0,1,1),y);
}SDFResult sdSphere(vec3 p, float r,vec3 offset,vec3 color)
{return SDFResult(length(p-offset)-r,color);
}
SDFResult sdFloor(vec3 p,vec3 color)
{float d=p.y+1.;return SDFResult(d,color);
}SDFResult minWithColor(SDFResult a,SDFResult b)
{if (a.d<b.d){return a;}return b;
}mat3 camera(vec3 cameraPos, vec3 lookAtPoint, vec3 upVector) {vec3 cd = normalize(lookAtPoint - cameraPos); // camera directionvec3 cr = normalize(cross(upVector, cd)); // camera rightvec3 cu = normalize(cross(cd, cr)); // camera upreturn mat3(-cr, cu, -cd); //转换为x轴向右,y轴向上,z轴向屏幕外边的右手坐标系
}SDFResult sdScene(vec3 p)
{SDFResult result1=sdBox(p,vec3(1.,1.0,1.),vec3(-4,0.2,-4),vec3(1.,0.,0.));SDFResult result2=sdBox(p,vec3(1.,1.0,1.),vec3(0,0.2,-4),vec3(0.,1.,0.));SDFResult result3=sdBox(p,vec3(1.,1.0,1.),vec3(4,0.2,-4),vec3(0.,0.,1.));SDFResult result=minWithColor(result1,result2);result=minWithColor(result,result3);vec3 floorColor = vec3(1. + 0.7*mod(floor(p.x) + floor(p.z), 2.0));result=minWithColor(result, sdFloor(p,floorColor));return result;
}
//法线计算
vec3 calcNormal(vec3 p) {vec2 e = vec2(1.0, -1.0) * 0.0005; // epsilonfloat r = 1.; // radius of spherereturn normalize(e.xyy * sdScene(p + e.xyy).d +e.yyx * sdScene(p + e.yyx).d +e.yxy * sdScene(p + e.yxy).d +e.xxx * sdScene(p + e.xxx).d);
}SDFResult rayMarch(vec3 ro, vec3 rd,float start,float end)
{float d=start;float r=1.0;SDFResult result;for(int i=0;i<MAX_STEPS;i++){vec3 p=ro+rd*d;result=sdScene(p);d+=result.d;if(result.d<EPSILON || d>end) break;}result.d=d;return result;
}void mainImage( out vec4 fragColor, in vec2 fragCoord )
{// Normalized pixel coordinates (from -1 to 1)vec2 uv = (2.0*fragCoord-iResolution.xy)/iResolution.xx;float r=0.3;vec3 backgroundColor = vec3(0.835, 1, 1);//vec3 c=getBackgroundColor(uv);vec3 c=backgroundColor;vec3 lp=vec3(0,0.2,-4);vec3 ro = vec3(0, 0, 3); // ray origin that represents camera positionfloat theta=iTime*0.5;float cameraRadius=10.;ro.x=cameraRadius*cos(theta)+lp.x;ro.z=cameraRadius*sin(theta)+lp.z;vec3 rd = camera(ro,lp,vec3(0,1,0))*normalize(vec3(uv, -1)); // ray directionSDFResult result=rayMarch(ro,rd,START_DIST,MAX_DIST);float d=result.d;if(d<MAX_DIST){//平行光源的漫反射计算vec3 p=ro+rd*d;vec3 n=calcNormal(p);vec3 lightPosition=vec3(2,2,7);//vec3 light_direction=normalize(vec3(1,0,5));vec3 light_direction=normalize(lightPosition-p);vec3 light_color=vec3(1,1,1);float diffuse=max(0.0,dot(n,light_direction));diffuse=clamp(diffuse,0.1,1.0);c=light_color*diffuse*result.color+backgroundColor*0.2;}// Output to screenfragColor = vec4(vec3(c),1.0);
}

1.7. 参考

  1. 10.1 带有观察点的相机模型 | Shadertoy中文教程
  2. 10.2 围绕目标旋转摄像机 | Shadertoy中文教程
  3. LearnOpenGL - Camera
http://www.dtcms.com/wzjs/237580.html

相关文章:

  • wordpress不跳转页面整站优化包年
  • 怎么做自己的网站百度百度一下你就知道
  • 站内优化怎么做无锡优化网站排名
  • 网站开发语音北京营销网站制作
  • 沈阳网站制作培训百度预测大数据官网
  • 可以用自己电脑做网站吗怎么才能在百度上打广告
  • 海南自贸区资源优化排名网站
  • 免费网站管理软件营销网站建设哪家好
  • 网站建设海南谷歌google官网下载
  • 网站定制化快速优化seo
  • 网站上推广游戏怎么做的google浏览器官方下载
  • 网站开发者取色工具怎么推广自己的公司
  • 域名解析 网站汕头网站建设方案开发
  • 不关网站备案新站网站推广公司
  • 渭南网站建设推广小红书seo
  • 东莞专业网站推广怎么做沙洋县seo优化排名价格
  • php可以做手机网站吗店铺运营方案策划
  • 坪山网站建设资讯广州seo做得比较好的公司
  • 为网站做seo名优网站关键词优化
  • 做网站创业风险分析域名批量注册查询
  • 做断桥铝窗户的网站微信营销软件排行榜
  • 阿里云服务器 做网站百度关键字优化价格
  • 手机商城网站模板北京seo公司公司
  • 宁波快速制作网站企业获客方式
  • 青岛城阳 软件网站开发资源搜索引擎
  • 做网站的英文编辑公司网站域名续费一年多少钱
  • 网站做的最好的网站有哪些百度竞价推广方案范文
  • 侨联网站建设方案今天济南刚刚发生的新闻
  • 南京建站平台软文营销模板
  • 广州网站设计软件b站视频推广网站动漫