Skip to content

Commit

Permalink
Better white furnace test temporal accumulation
Browse files Browse the repository at this point in the history
  • Loading branch information
krupitskas committed Nov 24, 2024
1 parent 9557b57 commit 4b1ff0a
Show file tree
Hide file tree
Showing 16 changed files with 417 additions and 61 deletions.
4 changes: 4 additions & 0 deletions Shaders/ConvertEquirectangularMap_VS.hlsl
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@ cbuffer CameraParameters : register(b0)
float4x4 view_inverse;
float4x4 projection_inverse;
float3 camera_position;
uint frame_number;
uint frames_accumulated;
uint reset_accumulation;
uint accumulation_enabled;
};

VertexShaderOutput main(VertexPosTexCoord IN)
Expand Down
7 changes: 5 additions & 2 deletions Shaders/ForwardPassPS.hlsl
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@ cbuffer CameraParameters : register(b0)
float4x4 view_inverse;
float4x4 projection_inverse;
float3 camera_position;
uint frame_number;
uint frames_accumulated;
uint reset_accumulation;
uint accumulation_enabled;
};

cbuffer SceneParameters : register(b1)
Expand Down Expand Up @@ -183,6 +187,5 @@ float4 main(RS2PS input) : SV_Target

float3 f = C_ambient + C_diffuse * in_shadow + emissive.xyz; // + cubeMapSample.xyz + Specular

return float4(base_color.rgb, 1);
//return float4(f.xyz, 1.0);
return float4(f.xyz, 1.0);
}
4 changes: 4 additions & 0 deletions Shaders/ForwardPassVS.hlsl
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@ cbuffer CameraParameters : register(b0)
float4x4 view_inverse;
float4x4 projection_inverse;
float3 camera_position;
uint frame_number;
uint frames_accumulated;
uint reset_accumulation;
uint accumulation_enabled;
};
#endif

Expand Down
4 changes: 4 additions & 0 deletions Shaders/IndirectForwardPass_PS.hlsl
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ cbuffer CameraParameters : register(b0)
float4x4 view_inverse;
float4x4 projection_inverse;
float3 camera_position;
uint frame_number;
uint frames_accumulated;
uint reset_accumulation;
uint accumulation_enabled;
};

cbuffer SceneParameters : register(b1)
Expand Down
4 changes: 4 additions & 0 deletions Shaders/IndirectForwardPass_VS.hlsl
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ cbuffer CameraParameters : register(b0)
float4x4 view_inverse;
float4x4 projection_inverse;
float3 camera_position;
uint frame_number;
uint frames_accumulated;
uint reset_accumulation;
uint accumulation_enabled;
};

cbuffer SceneParameters : register(b1)
Expand Down
274 changes: 253 additions & 21 deletions Shaders/RayGenRT.hlsl
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ struct Attributes

// Raytracing output texture, accessed as a UAV
RWTexture2D<float4> gOutput : register(u0);
RWTexture2D<float4> AccumulationBuffer : register(u1);

cbuffer CameraParameters : register(b0)
{
Expand All @@ -33,6 +34,11 @@ cbuffer CameraParameters : register(b0)
float4x4 view_inverse;
float4x4 projection_inverse;
float3 position;
uint frame_number;
uint rtx_frames_accumulated;
uint reset_accumulation;
uint accumulation_enabled;
uint pad;
}

uint JenkinsHash(uint x)
Expand Down Expand Up @@ -89,7 +95,6 @@ uint4 Pcg4d(uint4 v)
return v;
}


// octahedron encoding of normals
float2 octWrap(float2 v)
{
Expand Down Expand Up @@ -121,6 +126,144 @@ void DecodeNormals(float4 encodedNormals, out float3 geometryNormal, out float3
shadingNormal = decodeNormalOctahedron(encodedNormals.zw);
}

//struct MaterialProperties
//{
// float3 baseColor;
// float metalness;

// float3 emissive;
// float roughness;

// float transmissivness;
// float opacity;
//};

//MaterialProperties LoadMaterialProperties(uint materialID, float2 uvs) {
// MaterialProperties result = (MaterialProperties) 0;

// // Read base data
// MaterialData mData = materials[materialID];

// result.baseColor = mData.baseColor;
// result.emissive = mData.emissive;
// result.metalness = mData.metalness;
// result.roughness = mData.roughness;
// result.opacity = mData.opacity;

// // Load textures (using mip level 0)
// if (mData.baseColorTexIdx != INVALID_ID) {
// result.baseColor *= textures[mData.baseColorTexIdx].SampleLevel(linearSampler, uvs, 0.0f).rgb;
// }

// if (mData.emissiveTexIdx != INVALID_ID) {
// result.emissive *= textures[mData.emissiveTexIdx].SampleLevel(linearSampler, uvs, 0.0f).rgb;
// }

// if (mData.roughnessMetalnessTexIdx != INVALID_ID) {
// float3 occlusionRoughnessMetalness = textures[mData.roughnessMetalnessTexIdx].SampleLevel(linearSampler, uvs, 0.0f).rgb;
// result.metalness *= occlusionRoughnessMetalness.b;
// result.roughness *= occlusionRoughnessMetalness.g;
// }

// return result;
//}

// Clever offset_ray function from Ray Tracing Gems chapter 6
// Offsets the ray origin from current position p, along normal n (which must be geometric normal)
// so that no self-intersection can occur.
float3 OffsetRay(const float3 p, const float3 n)
{
static const float origin = 1.0f / 32.0f;
static const float float_scale = 1.0f / 65536.0f;
static const float int_scale = 256.0f;

int3 of_i = int3(int_scale * n.x, int_scale * n.y, int_scale * n.z);

float3 p_i = float3(
asfloat(asint(p.x) + ((p.x < 0) ? -of_i.x : of_i.x)),
asfloat(asint(p.y) + ((p.y < 0) ? -of_i.y : of_i.y)),
asfloat(asint(p.z) + ((p.z < 0) ? -of_i.z : of_i.z)));

return float3(abs(p.x) < origin ? p.x + float_scale * n.x : p_i.x,
abs(p.y) < origin ? p.y + float_scale * n.y : p_i.y,
abs(p.z) < origin ? p.z + float_scale * n.z : p_i.z);
}

#define PI 3.141592653589f
#define TWO_PI (2.0f * PI)
#define ONE_OVER_PI (1.0f / PI)
#define ONE_OVER_TWO_PI (1.0f / TWO_PI)

// -------------------------------------------------------------------------
// Quaternion rotations
// -------------------------------------------------------------------------

// Calculates rotation quaternion from input vector to the vector (0, 0, 1)
// Input vector must be normalized!
float4 GetRotationToZAxis(float3 input) {

// Handle special case when input is exact or near opposite of (0, 0, 1)
if (input.z < -0.99999f) return float4(1.0f, 0.0f, 0.0f, 0.0f);

return normalize(float4(input.y, -input.x, 0.0f, 1.0f + input.z));
}

// Calculates rotation quaternion from vector (0, 0, 1) to the input vector
// Input vector must be normalized!
float4 GetRotationFromZAxis(float3 input) {

// Handle special case when input is exact or near opposite of (0, 0, 1)
if (input.z < -0.99999f) return float4(1.0f, 0.0f, 0.0f, 0.0f);

return normalize(float4(-input.y, input.x, 0.0f, 1.0f + input.z));
}

// Returns the quaternion with inverted rotation
float4 InvertRotation(float4 q)
{
return float4(-q.x, -q.y, -q.z, q.w);
}

// Optimized point rotation using quaternion
// Source: https://gamedev.stackexchange.com/questions/28395/rotating-vector3-by-a-quaternion
float3 RotatePoint(float4 q, float3 v) {
const float3 qAxis = float3(q.x, q.y, q.z);
return 2.0f * dot(qAxis, v) * qAxis + (q.w * q.w - dot(qAxis, qAxis)) * v + 2.0f * q.w * cross(qAxis, v);
}

// Samples a direction within a hemisphere oriented along +Z axis with a cosine-weighted distribution
// Source: "Sampling Transformations Zoo" in Ray Tracing Gems by Shirley et al.
float3 SampleHemisphere(float2 u, out float pdf)
{
float a = sqrt(u.x);
float b = TWO_PI * u.y;

float3 result = float3(
a * cos(b),
a * sin(b),
sqrt(1.0f - u.x));

pdf = result.z * ONE_OVER_PI;

return result;
}

float3 SampleHemisphere(float2 u)
{
float pdf;
return SampleHemisphere(u, pdf);
}

// For sampling of all our diffuse BRDFs we use cosine-weighted hemisphere sampling, with PDF equal to (NdotL/PI)
float DiffusePdf(float NdotL) {
return NdotL * ONE_OVER_PI;
}

float Luminance(float3 rgb)
{
return dot(rgb, float3(0.2126f, 0.7152f, 0.0722f));
}

struct VertexLayout
{
float3 position;
Expand Down Expand Up @@ -169,6 +312,9 @@ void RayGen()
// Initialize the ray payload
HitInfo payload;

uint2 LaunchIndex = DispatchRaysIndex().xy;
uint2 LaunchDimensions = DispatchRaysDimensions().xy;

// Get the location within the dispatched 2D grid of work items
// (often maps to pixels, so this could represent a pixel coordinate).
uint2 pixel_xy = DispatchRaysIndex().xy;
Expand All @@ -192,33 +338,119 @@ void RayGen()
ray.TMin = 0;
ray.TMax = 100000;

TraceRay(SceneBVH,
RAY_FLAG_NONE,
0xFF,
0,
0,
0,
ray,
payload
);
float3 radiance = float3(0.0f, 0.0f, 0.0f);
float3 throughput = float3(1.0f, 1.0f, 1.0f);
float3 sky_value = float3(1.0f, 1.0f, 1.0f);

float2 uvs = float2(0.0f, 0.0f);
uint rng_state = InitRNG(LaunchIndex, LaunchDimensions, frame_number);

if(!payload.has_hit())
int max_bounces = 16;

for (int bounce = 0; bounce < max_bounces; bounce++)
{
gOutput[pixel_xy] = float4(255, 0, 0, 1.f);
return;
TraceRay(
SceneBVH,
RAY_FLAG_NONE,
0xFF,
0,
0,
0,
ray,
payload
);

float2 uvs = float2(0.0f, 0.0f);

if(!payload.has_hit())
{
radiance += throughput * sky_value;
break;
}

float3 geometry_normal;
float3 shading_normal;
DecodeNormals(payload.encoded_normals, geometry_normal, shading_normal);

float3 view_vec = -ray.Direction;

if (dot(geometry_normal, view_vec) < 0.0f)
{
geometry_normal = -geometry_normal;
}

if (dot(geometry_normal, shading_normal) < 0.0f)
{
shading_normal = -shading_normal;
}

// Run importance sampling of selected BRDF to generate reflecting ray direction
float3 brdf_weight;
float2 u = float2(Rand(rng_state), Rand(rng_state));

// Ray coming from "below" the hemisphere, goodbye
if (dot(shading_normal, view_vec) <= 0.0f)
{
break;
}

// Transform view direction into local space of our sampling routines
// (local space is oriented so that its positive Z axis points along the shading normal)
float4 quat_rot_to_z = GetRotationToZAxis(shading_normal);
float3 v_local = RotatePoint(quat_rot_to_z, view_vec);
const float3 n_local = float3(0.0f, 0.0f, 1.0f);

float3 ray_direction_local = float3(0.0f, 0.0f, 0.0f);

ray_direction_local = SampleHemisphere(u);

//sampleWeight = data.diffuseReflectance * diffuseTerm(data);

//// Prevent tracing direction with no contribution
//if (Luminance(sampleWeight) == 0.0f)
//{
// return false;
//}

// Transform sampled direction Llocal back to V vector space
ray_direction_local = normalize(RotatePoint(InvertRotation(quat_rot_to_z), ray_direction_local));

// Prevent tracing direction "under" the hemisphere (behind the triangle)
if (dot(geometry_normal, ray_direction_local) <= 0.0f)
{
break;
}

// Update ray
ray.Origin = OffsetRay(payload.hit_position, geometry_normal);
ray.Direction = ray_direction_local;
}

float3 geometry_normal;
float3 shading_normal;
DecodeNormals(payload.encoded_normals, geometry_normal, shading_normal);

SurfaceShaderParameters mat = MaterialBuffer[payload.material_id];
//SurfaceShaderParameters mat = MaterialBuffer[payload.material_id];
//Texture2D albedo_texture = ResourceDescriptorHeap[mat.albedo_texture_index];
//float3 albedo_color = albedo_texture.SampleLevel(LinearSampler, payload.uvs, 0.0f).rgb;

Texture2D albedo_texture = ResourceDescriptorHeap[mat.albedo_texture_index];
if(reset_accumulation > 0)
{
AccumulationBuffer[pixel_xy] = 0;
}

float3 albedo_color = albedo_texture.SampleLevel(LinearSampler, payload.uvs, 0.0f).rgb;
float3 result_color = 0;

if(accumulation_enabled > 0)
{
// Temporal accumulation
float3 previous_color = AccumulationBuffer[pixel_xy].rgb;
float3 accumulated_color = radiance;
accumulated_color = previous_color + radiance;
AccumulationBuffer[pixel_xy] = float4(accumulated_color, 1.0f);

result_color = accumulated_color / rtx_frames_accumulated;
}
else
{
result_color = radiance;
}

gOutput[pixel_xy] = float4(albedo_color, 1.f);
gOutput[pixel_xy] = float4(result_color, 1.f);
}
4 changes: 4 additions & 0 deletions Shaders/Skybox_VS.hlsl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ cbuffer CameraParameters : register(b0)
float4x4 view_inverse;
float4x4 projection_inverse;
float3 camera_position;
uint frame_number;
uint frames_accumulated;
uint reset_accumulation;
uint accumulation_enabled;
};

struct VertexPosTexCoord
Expand Down
Loading

0 comments on commit 4b1ff0a

Please sign in to comment.