screen_space_reflection_scale.glsl 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. #[compute]
  2. #version 450
  3. #VERSION_DEFINES
  4. layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in;
  5. /* Specialization Constants (Toggles) */
  6. layout(constant_id = 0) const bool sc_multiview = false;
  7. /* inputs */
  8. layout(set = 0, binding = 0) uniform sampler2D source_ssr;
  9. layout(set = 1, binding = 0) uniform sampler2D source_depth;
  10. layout(set = 1, binding = 1) uniform sampler2D source_normal;
  11. layout(rgba16f, set = 2, binding = 0) uniform restrict writeonly image2D dest_ssr;
  12. layout(r32f, set = 3, binding = 0) uniform restrict writeonly image2D dest_depth;
  13. layout(rgba8, set = 3, binding = 1) uniform restrict writeonly image2D dest_normal;
  14. layout(push_constant, std430) uniform Params {
  15. ivec2 screen_size;
  16. float camera_z_near;
  17. float camera_z_far;
  18. bool orthogonal;
  19. bool filtered;
  20. uint pad[2];
  21. }
  22. params;
  23. void main() {
  24. // Pixel being shaded
  25. ivec2 ssC = ivec2(gl_GlobalInvocationID.xy);
  26. if (any(greaterThanEqual(ssC.xy, params.screen_size))) { //too large, do nothing
  27. return;
  28. }
  29. //do not filter, SSR will generate artifacts if this is done
  30. float divisor = 0.0;
  31. vec4 color;
  32. float depth;
  33. vec4 normal;
  34. if (params.filtered) {
  35. color = vec4(0.0);
  36. depth = 0.0;
  37. normal = vec4(0.0);
  38. for (int i = 0; i < 4; i++) {
  39. ivec2 ofs = ssC << 1;
  40. if (bool(i & 1)) {
  41. ofs.x += 1;
  42. }
  43. if (bool(i & 2)) {
  44. ofs.y += 1;
  45. }
  46. color += texelFetch(source_ssr, ofs, 0);
  47. float d = texelFetch(source_depth, ofs, 0).r;
  48. vec4 nr = texelFetch(source_normal, ofs, 0);
  49. normal.xyz += normalize(nr.xyz * 2.0 - 1.0);
  50. float roughness = normal.w;
  51. if (roughness > 0.5) {
  52. roughness = 1.0 - roughness;
  53. }
  54. roughness /= (127.0 / 255.0);
  55. normal.w += roughness;
  56. if (sc_multiview) {
  57. // we're doing a full unproject so we need the value as is.
  58. depth += d;
  59. } else {
  60. // unproject our Z value so we can use it directly.
  61. d = d * 2.0 - 1.0;
  62. if (params.orthogonal) {
  63. d = ((d + (params.camera_z_far + params.camera_z_near) / (params.camera_z_far - params.camera_z_near)) * (params.camera_z_far - params.camera_z_near)) / 2.0;
  64. } else {
  65. d = 2.0 * params.camera_z_near * params.camera_z_far / (params.camera_z_far + params.camera_z_near - d * (params.camera_z_far - params.camera_z_near));
  66. }
  67. depth += -d;
  68. }
  69. }
  70. color /= 4.0;
  71. depth /= 4.0;
  72. normal.xyz = normalize(normal.xyz / 4.0) * 0.5 + 0.5;
  73. normal.w /= 4.0;
  74. normal.w = normal.w * (127.0 / 255.0);
  75. } else {
  76. ivec2 ofs = ssC << 1;
  77. color = texelFetch(source_ssr, ofs, 0);
  78. depth = texelFetch(source_depth, ofs, 0).r;
  79. normal = texelFetch(source_normal, ofs, 0);
  80. if (!sc_multiview) {
  81. // unproject our Z value so we can use it directly.
  82. depth = depth * 2.0 - 1.0;
  83. if (params.orthogonal) {
  84. depth = -(depth * (params.camera_z_far - params.camera_z_near) - (params.camera_z_far + params.camera_z_near)) / 2.0;
  85. } else {
  86. depth = 2.0 * params.camera_z_near * params.camera_z_far / (params.camera_z_far + params.camera_z_near + depth * (params.camera_z_far - params.camera_z_near));
  87. }
  88. depth = -depth;
  89. }
  90. }
  91. imageStore(dest_ssr, ssC, color);
  92. imageStore(dest_depth, ssC, vec4(depth));
  93. imageStore(dest_normal, ssC, normal);
  94. }