我目前正在使用C编写一个基本的光线跟踪程序,我已经成功地制作了一些简单的形状,例如sphere/box/plane/cone/…,我还使用phong照明对它们进行了一些着色。
但我的问题是,我可以掌握如何用射线追踪一个半球,比如有一个定义半球的集合方程,如果因为我找不到它而启发我,或者有一个集合方法来做它,而我却不知道。
我也试过用一个平面来切割球体,只显示上半部分,但没有成功(我对这一切还不熟悉,所以我的理解可能是错误的)。
编辑:好的,我很抱歉,因为我对这一切都很陌生,但这是我所尝试的。

#include "raytacing.h"

t_env  *init_sphere(t_env *e)
{
//sphere position and radius
    e->sph.posi.x = 0;
    e->sph.posi.y = 0;
    e->sph.posi.z = -1;
    e->sph.rad = 0;
    e->sph.color = (t_color){255, 255, 128);
    return (e);
}

t_env  *init_plane(t_env *e)
{
//plane position
    e->plane.posi.x = 0;
    e->olane.posi.y = -0.5;
    e->plane.posi.z = 0;
//plane normal
    e->plane.norm.x = 0;
    e->olane.norm.y = 1;
    e->plane.norm.z = 0;

    e->plane.color = (t_color){0, 255, 0);
    return (e);
}

double         inter_plane(t_env *e, double *t) //calculating plane intersection
{
    t_vect  dist;
    double  norm;

    norm = dot(e->plane.normal, e->r.direction);
    if (fabs(norm) > 1e-6)
    {
        dist = vect_sub(e->plane.posi, e->r.start);
        e->t0 = dot(dist, e->plane.normal) / norm;
        if (e->t0 < *t && e->t0 > 1e-6)
        {
            *t = e->t0;
            return (1);
        }
        else
            return (0);
    }
    return (0);
}

double      inter_sph(t_env *e, double *t) //calculating sphere intersection
{
    double  delta;
    double  sqrtd;
    t_vect  dist;

    e->a = dot(e->r.direction, e->r.direction);
    dist = vect_sub(e->r.start, e->sph.posi);
    e->b = 2 * dot(dist, e->r.direction);
    e->c = dot(dist, dist) - e->sph.rad * e->sph.rad;
    delta = e->b * e->b - 4 * e->a * e->c;
    if (delta < 0)
        return (0);
    sqrtd = sqrt(delta);
    e->t0 = (-e->b + sqrtd) / (2 * e->a);
    e->t1 = (-e->b - sqrtd) / (2 * e->a);
    if (e->t0 > e->t1)
        e->t0 = e->t1;
    if ((e->t0 > 1e-6) && (e->t0 < *t))
    {
        *t = e->t0;
        return (1);
    }
    else
        return (0);
}

double      inter_hemisphere(t_env *e) //calculating hemisphere intersection
{
    t_vect  hit_normal;

    if (inter_sph(e, &e->t) == 1)
    {

        hit_normal = vect_add(e->r.start, vect_scalaire(e->t, e->r.direction));
        hit_normal = vect_normalize(hit_normal);

        if (inter_plane(e, &(e->t)) == 1)
        {
            if (dot(e->plane.normal, hit_normal) < 0)
                return (1);
            return (0);
        }
    }
    return (0);
}

e->t是。应该是离相机最近的距离,这样我就能准确地显示出近距离和远距离的物体
在这里,我尝试应用Spektre所说的,并显示了一些东西,看起来像这样:
当我试着旋转它的时候,我得到了这个:
Edit2:在使用Spektre方法之后,我得到了一个半球的函数交集,这个交集看起来像这样。
double      inter_hemisphere(t_env *e, double *t)
{
    double  delta;
    double  sqrtd;
    t_vect  dist;

    e->a = dot(e->r.direction, e->r.direction);
    dist = vect_sub(e->r.start, e->sph.posi);
    e->b = 2 * dot(dist, e->r.direction);
    e->c = dot(dist, dist) - e->sph.rad * e->sph.rad;
    delta = e->b * e->b - 4 * e->a * e->c;
    if (delta < 0)
        return (0);
    sqrtd = sqrt(delta);
    e->t0 = (-e->b + sqrtd) / (2 * e->a);
    e->t1 = (-e->b - sqrtd) / (2 * e->a);
    t_vect  v2;

    v2 = vect_add(e->r.start, vect_sub(vect_scalaire(e->t0, e->r.direction), e->sph.posi));
    if (dot(e->plane.normal, v2) > 0.0)
        e->t0 =-1.0;
    v2 = vect_add(e->r.start, vect_sub(vect_scalaire(e->t1, e->r.direction), e->sph.posi));
    if (dot(e->plane.normal, v2) > 0.0)
        e->t1 =-1.0;
    if (e->t0 < 0.0)
        e->t0 = e->t1;
    if (e->t1 < 0.0)
        e->t1 = e->t0;
    double tt;
    tt = fmin(e->t0, e->t1);
    if (tt <= 0.0)
        tt = fmax(e->t0, e->t1);
    if (tt > 1e-6 && tt < e->t)
    {
        *t = tt;
        return (1);
    }
    return (0);
}

这里是Result!!

最佳答案

最简单的方法是用平面切割球体。
c - 射线追踪半球-LMLPHP
如果平面法向比任何方向(球体上的点-球体中心)具有与法向相同的方向将被截断。简单地说就是这个条件:

dot(point on sphere - sphere center , plane normal ) > 0.0

但不要忘记测试光线和球体的交点,因为最接近的交点可以在平面的另一边。。。
我试着把这个应用到我的GLSL射线跟踪器中:
Reflection and refraction impossible without recursive ray tracing?
并提出这个更新的片段着色器:
顶点(不变):
//------------------------------------------------------------------
#version 420 core
//------------------------------------------------------------------
uniform float aspect;
uniform float focal_length;
uniform mat4x4 tm_eye;
layout(location=0) in vec2 pos;

out smooth vec2 txt_pos;    // frag position on screen <-1,+1> for debug prints
out smooth vec3 ray_pos;    // ray start position
out smooth vec3 ray_dir;    // ray start direction
//------------------------------------------------------------------
void main(void)
    {
    vec4 p;
    txt_pos=pos;
    // perspective projection
    p=tm_eye*vec4(pos.x/aspect,pos.y,0.0,1.0);
    ray_pos=p.xyz;
    p-=tm_eye*vec4(0.0,0.0,-focal_length,1.0);
    ray_dir=normalize(p.xyz);

    gl_Position=vec4(pos,0.0,1.0);
    }
//------------------------------------------------------------------

碎片(添加半球):
//------------------------------------------------------------------
#version 420 core
//------------------------------------------------------------------
// Ray tracer ver: 1.000
//------------------------------------------------------------------
in smooth vec3      ray_pos;    // ray start position
in smooth vec3      ray_dir;    // ray start direction
uniform float       n0;         // refractive index of camera origin
uniform int         fac_siz;    // square texture x,y resolution size
uniform int         fac_num;    // number of valid floats in texture
uniform sampler2D   fac_txr;    // scene mesh data texture
out layout(location=0) vec4 frag_col;
//---------------------------------------------------------------------------
#define _reflect
#define _refract
//---------------------------------------------------------------------------
void main(void)
    {
    const vec3  light_dir=normalize(vec3(0.1,0.1,1.0));
    const float light_iamb=0.1;                 // dot offset
    const float light_idir=0.5;                 // directional light amplitude
    const vec3 back_col=vec3(0.2,0.2,0.2);      // background color

    const float _zero=1e-6;         // to avoid intrsection with start point of ray
    const int _fac_triangles  =0;   // r,g,b,a, n, triangle   count, { x0,y0,z0,x1,y1,z1,x2,y2,z2 }
    const int _fac_spheres    =1;   // r,g,b,a, n, sphere     count, { x,y,z,r }
    const int _fac_hemispheres=2;   // r,g,b,a, n, hemisphere count,{ x,y,z,r,nx,ny,nz }
    // ray scene intersection
    struct _ray
        {
        dvec3 pos,dir,nor;
        vec3 col;
        float refl,refr;// reflection,refraction intensity coeficients
        float n0,n1;    // refaction index (start,end)
        double l;       // ray length
        int lvl,i0,i1;  // recursion level, reflect, refract
        };
    const int _lvls=4;
    const int _rays=(1<<_lvls)-1;
    _ray ray[_rays]; int rays;

    dvec3 v0,v1,v2,pos;
    vec3 c;
    float refr,refl,n1;
    double tt,t,a;
    int i0,ii,num,id;

    // fac texture access
    vec2 st; int i,j; float ds=1.0/float(fac_siz-1);
    #define fac_get texture(fac_txr,st).r; st.s+=ds; i++; j++; if (j==fac_siz) { j=0; st.s=0.0; st.t+=ds; }
    // enque start ray
    ray[0].pos=ray_pos;
    ray[0].dir=normalize(ray_dir);
    ray[0].nor=vec3(0.0,0.0,0.0);
    ray[0].refl=0.0;
    ray[0].refr=0.0;
    ray[0].n0=n0;
    ray[0].n1=1.0;
    ray[0].l =0.0;
    ray[0].lvl=0;
    ray[0].i0=-1;
    ray[0].i1=-1;
    rays=1;

    // loop all enqued rays
    for (i0=0;i0<rays;i0++)
        {
        // loop through all objects
        // find closest forward intersection between them and ray[i0]
        // strore it to ray[i0].(nor,col)
        // strore it to pos,n1
        t=tt=-1.0; ii=1; ray[i0].l=0.0;
        ray[i0].col=back_col;
        pos=ray[i0].pos; n1=n0;
        for (st=vec2(0.0,0.0),i=j=0;i<fac_num;)
            {
            c.r=fac_get;            // RGBA
            c.g=fac_get;
            c.b=fac_get;
            refl=fac_get;
            refr=fac_get;
            n1=fac_get;             // refraction index
            a=fac_get; id=int(a);   // object type
            a=fac_get; num=int(a);  // face count

            if (id==_fac_triangles)
             for (;num>0;num--)
                {
                v0.x=fac_get; v0.y=fac_get; v0.z=fac_get;
                v1.x=fac_get; v1.y=fac_get; v1.z=fac_get;
                v2.x=fac_get; v2.y=fac_get; v2.z=fac_get;
                dvec3 e1,e2,n,p,q,r;
                double t,u,v,det,idet;
                //compute ray triangle intersection
                e1=v1-v0;
                e2=v2-v0;
                // Calculate planes normal vector
                p=cross(ray[i0].dir,e2);
                det=dot(e1,p);
                // Ray is parallel to plane
                if (abs(det)<1e-8) continue;
                idet=1.0/det;
                r=ray[i0].pos-v0;
                u=dot(r,p)*idet;
                if ((u<0.0)||(u>1.0)) continue;
                q=cross(r,e1);
                v=dot(ray[i0].dir,q)*idet;
                if ((v<0.0)||(u+v>1.0)) continue;
                t=dot(e2,q)*idet;
                if ((t>_zero)&&((t<=tt)||(ii!=0)))
                    {
                    ii=0; tt=t;
                    // store color,n ...
                    ray[i0].col=c;
                    ray[i0].refl=refl;
                    ray[i0].refr=refr;
                    // barycentric interpolate position
                    t=1.0-u-v;
                    pos=(v0*t)+(v1*u)+(v2*v);
                    // compute normal (store as dir for now)
                    e1=v1-v0;
                    e2=v2-v1;
                    ray[i0].nor=cross(e1,e2);
                    }
                }

            if (id==_fac_spheres)
             for (;num>0;num--)
                {
                float r;
                v0.x=fac_get; v0.y=fac_get; v0.z=fac_get; r=fac_get;
                // compute l0 length of ray(p0,dp) to intersection with sphere(v0,r)
                // where rr= r^-2
                double aa,bb,cc,dd,l0,l1,rr;
                dvec3 p0,dp;
                p0=ray[i0].pos-v0;  // set sphere center to (0,0,0)
                dp=ray[i0].dir;
                rr = 1.0/(r*r);
                aa=2.0*rr*dot(dp,dp);
                bb=2.0*rr*dot(p0,dp);
                cc=    rr*dot(p0,p0)-1.0;
                dd=((bb*bb)-(2.0*aa*cc));
                if (dd<0.0) continue;
                dd=sqrt(dd);
                l0=(-bb+dd)/aa;
                l1=(-bb-dd)/aa;
                if (l0<0.0) l0=l1;
                if (l1<0.0) l1=l0;
                t=min(l0,l1); if (t<=_zero) t=max(l0,l1);
                if ((t>_zero)&&((t<=tt)||(ii!=0)))
                    {
                    ii=0; tt=t;
                    // store color,n ...
                    ray[i0].col=c;
                    ray[i0].refl=refl;
                    ray[i0].refr=refr;
                    // position,normal
                    pos=ray[i0].pos+(ray[i0].dir*t);
                    ray[i0].nor=pos-v0;
                    }
                }
            if (id==_fac_hemispheres)
             for (;num>0;num--)
                {
                float r;
                v0.x=fac_get; v0.y=fac_get; v0.z=fac_get; r=fac_get;
                v1.x=fac_get; v1.y=fac_get; v1.z=fac_get;
                // compute l0 length of ray(p0,dp) to intersection with sphere(v0,r)
                // where rr= r^-2
                double aa,bb,cc,dd,l0,l1,rr;
                dvec3 p0,dp;
                p0=ray[i0].pos-v0;  // set sphere center to (0,0,0)
                dp=ray[i0].dir;
                rr = 1.0/(r*r);
                aa=2.0*rr*dot(dp,dp);
                bb=2.0*rr*dot(p0,dp);
                cc=    rr*dot(p0,p0)-1.0;
                dd=((bb*bb)-(2.0*aa*cc));
                if (dd<0.0) continue;
                dd=sqrt(dd);
                l0=(-bb+dd)/aa;
                l1=(-bb-dd)/aa;
                // test both hits-v0 against normal v1
                v2=ray[i0].pos+(ray[i0].dir*l0)-v0; if (dot(v1,v2)>0.0) l0=-1.0;
                v2=ray[i0].pos+(ray[i0].dir*l1)-v0; if (dot(v1,v2)>0.0) l1=-1.0;
                if (l0<0.0) l0=l1;
                if (l1<0.0) l1=l0;
                t=min(l0,l1); if (t<=_zero) t=max(l0,l1);
                if ((t>_zero)&&((t<=tt)||(ii!=0)))
                    {
                    ii=0; tt=t;
                    // store color,n ...
                    ray[i0].col=c;
                    ray[i0].refl=refl;
                    ray[i0].refr=refr;
                    // position,normal
                    pos=ray[i0].pos+(ray[i0].dir*t);
                    ray[i0].nor=pos-v0;
                    }
                }
            }
        ray[i0].l=tt;
        ray[i0].nor=normalize(ray[i0].nor);
        // split ray from pos and ray[i0].nor
        if ((ii==0)&&(ray[i0].lvl<_lvls-1))
            {
            t=dot(ray[i0].dir,ray[i0].nor);

            // reflect
            #ifdef _reflect
            if ((ray[i0].refl>_zero)&&(t<_zero))    // do not reflect inside objects
                {
                ray[i0].i0=rays;
                ray[rays]=ray[i0];
                ray[rays].lvl++;
                ray[rays].i0=-1;
                ray[rays].i1=-1;
                ray[rays].pos=pos;
                ray[rays].dir=ray[rays].dir-(2.0*t*ray[rays].nor);
                ray[rays].n0=ray[i0].n0;
                ray[rays].n1=ray[i0].n0;
                rays++;
                }
            #endif

            // refract
            #ifdef _refract
            if (ray[i0].refr>_zero)
                {
                ray[i0].i1=rays;
                ray[rays]=ray[i0];
                ray[rays].lvl++;
                ray[rays].i0=-1;
                ray[rays].i1=-1;
                ray[rays].pos=pos;

                t=dot(ray[i0].dir,ray[i0].nor);
                if (t>0.0)  // exit object
                    {
                    ray[rays].n0=ray[i0].n0;
                    ray[rays].n1=n0;
                    if (i0==0) ray[i0].n1=n1;
                    v0=-ray[i0].nor; t=-t;
                    }
                else{       // enter object
                    ray[rays].n0=n1;
                    ray[rays].n1=ray[i0].n0;
                    ray[i0  ].n1=n1;
                    v0=ray[i0].nor;
                    }
                n1=ray[i0].n0/ray[i0].n1;
                tt=1.0-(n1*n1*(1.0-t*t));
                if (tt>=0.0)
                    {
                    ray[rays].dir=(ray[i0].dir*n1)-(v0*((n1*t)+sqrt(tt)));
                    rays++;
                    }
                }
            #endif
            }
        else if (i0>0) // ignore last ray if nothing hit
            {
            ray[i0]=ray[rays-1];
            rays--; i0--;
            }
        }
    // back track ray intersections and compute output color col
    // lvl is sorted ascending so backtrack from end
    for (i0=rays-1;i0>=0;i0--)
        {
        // directional + ambient light
        t=abs(dot(ray[i0].nor,light_dir)*light_idir)+light_iamb;
        t*=1.0-ray[i0].refl-ray[i0].refr;
        ray[i0].col.rgb*=float(t);
        // reflect
        ii=ray[i0].i0;
        if (ii>=0) ray[i0].col.rgb+=ray[ii].col.rgb*ray[i0].refl;
        // refract
        ii=ray[i0].i1;
        if (ii>=0) ray[i0].col.rgb+=ray[ii].col.rgb*ray[i0].refr;
        }
    frag_col=vec4(ray[0].col,1.0);
    }
//---------------------------------------------------------------------------

顶点着色器只创建由GPU插值的光线位置和方向,然后片段着色器处理每个光线(每像素)。
我用这个场景:
// init mesh raytracer
ray.gl_init();
ray.beg();
//                 r   g   b rfl rfr   n
ray.add_material(1.0,0.7,0.1,0.3,0.0,_n_glass); ray.add_hemisphere( 0.0, 0.0, 2.0,0.5, 0.0, 0.0, 1.0);
ray.add_material(1.0,1.0,1.0,0.3,0.0,_n_glass); ray.add_box       ( 0.0, 0.0, 6.0,9.0,9.0,0.1);
ray.add_material(1.0,1.0,1.0,0.1,0.8,_n_glass); ray.add_sphere    ( 0.0, 0.0, 0.5,0.5);
ray.add_material(1.0,0.1,0.1,0.3,0.0,_n_glass); ray.add_sphere    (+2.0, 0.0, 2.0,0.5);
ray.add_material(0.1,1.0,0.1,0.3,0.0,_n_glass); ray.add_box       (-2.0, 0.0, 2.0,0.5,0.5,0.5);
ray.add_material(0.1,0.1,1.0,0.3,0.0,_n_glass);
ray.add_tetrahedron
    (
     0.0, 0.0, 3.0,
    -1.0,-1.0, 4.0,
    +1.0,-1.0, 4.0,
     0.0,+1.0, 4.0
    );
ray.end();

包括半径(0.0, 0.0, 2.0)且平面法r=0.5的单个黄色半球。旋转物体可以通过简单地旋转平面法向来完成。
这是预览:
c - 射线追踪半球-LMLPHP
正如你所看到的,半球只是用一个平面切割。。。上面的唯一重要代码是(请参见(0.0, 0.0, 1.0)注释):
if (id==_fac_hemispheres) // *** ignore
 for (;num>0;num--) // *** ignore
    {
    float r;
    // *** here v0 is center, v1 is plane normal and r is radius
    v0.x=fac_get; v0.y=fac_get; v0.z=fac_get; r=fac_get;
    v1.x=fac_get; v1.y=fac_get; v1.z=fac_get;
    // *** this is ray/ellipsoid intersection returning l0,l1 ray distances for both hits
    // compute l0 length of ray(p0,dp) to intersection with sphere(v0,r)
    // where rr= r^-2
    double aa,bb,cc,dd,l0,l1,rr;
    dvec3 p0,dp;
    p0=ray[i0].pos-v0;  // set sphere center to (0,0,0)
    dp=ray[i0].dir;
    rr = 1.0/(r*r);
    aa=2.0*rr*dot(dp,dp);
    bb=2.0*rr*dot(p0,dp);
    cc=    rr*dot(p0,p0)-1.0;
    dd=((bb*bb)-(2.0*aa*cc));
    if (dd<0.0) continue;
    dd=sqrt(dd);
    l0=(-bb+dd)/aa;
    l1=(-bb-dd)/aa;
    // *** this thro away hits on wrong side of plane
    // test both hits-v0 against normal v1
    v2=ray[i0].pos+(ray[i0].dir*l0)-v0; if (dot(v1,v2)>0.0) l0=-1.0;
    v2=ray[i0].pos+(ray[i0].dir*l1)-v0; if (dot(v1,v2)>0.0) l1=-1.0;
    // *** this is just using closer valid hit
    if (l0<0.0) l0=l1;
    if (l1<0.0) l1=l0;
    t=min(l0,l1); if (t<=_zero) t=max(l0,l1);
    if ((t>_zero)&&((t<=tt)||(ii!=0)))
        {
        ii=0; tt=t;
        // store color,n ...
        ray[i0].col=c;
        ray[i0].refl=refl;
        ray[i0].refr=refr;
        // position,normal
        pos=ray[i0].pos+(ray[i0].dir*t);
        ray[i0].nor=pos-v0;
        }
    }

我用了我的ray and ellipsoid intersection accuracy improvement因为它返回两个点击,而不仅仅是第一个。
如果你交叉检查球体和半球,你会看到我刚刚添加了这两行:
v2=ray[i0].pos+(ray[i0].dir*l0)-v0; if (dot(v1,v2)>0.0) l0=-1.0;
v2=ray[i0].pos+(ray[i0].dir*l1)-v0; if (dot(v1,v2)>0.0) l1=-1.0;

它只是把射线距离转换成命中位置并计算出上面提到的条件。。。

关于c - 射线追踪半球,我们在Stack Overflow上找到一个类似的问题:https://stackoverflow.com/questions/57134950/

10-12 05:03