Last active
April 2, 2019 07:29
-
-
Save ddutchie/f6ead1cc04e04b40f5495181c94500a7 to your computer and use it in GitHub Desktop.
RayTraceMaterials
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
using System.Collections; | |
using System.Collections.Generic; | |
using System.IO; | |
using System.Linq; | |
using UnityEngine; | |
using Unity.Collections; | |
using System.Diagnostics; | |
#if UNITY_2018_3_OR_NEWER | |
using UnityEngine.Rendering; | |
#else | |
using UnityEngine.Experimental.Rendering; | |
#endif | |
public class RayTracingMaster : MonoBehaviour | |
{ | |
public ComputeShader RayTracingShader; | |
public RayTraceSpeed raySpeed; | |
public enum skyMode | |
{ | |
none, realSky, hdri | |
} | |
[Header("Environment")] | |
public skyMode theSkyMode; | |
public Color backgroundColor; | |
public Light DirectionalLight; | |
public Texture SkyboxTexture; | |
[Header("Clouds")] | |
public Color skyTint; | |
public Color groundColor; | |
public Texture3D noiseTex1, noiseTex2; | |
// public int SphereSeed; | |
//public Vector2 SphereRadius = new Vector2(3.0f, 8.0f); | |
// public uint SpheresMax = 100; | |
// public float SpherePlacementRadius = 100.0f; | |
private Camera _camera; | |
private float _lastFieldOfView; | |
private RenderTexture _target; | |
private RenderTexture _converged; | |
private Material _addMaterial; | |
private uint _currentSample = 0; | |
//private ComputeBuffer _sphereBuffer; | |
private List<Transform> _transformsToWatch = new List<Transform>(); | |
private static bool _meshObjectsNeedRebuilding = false; | |
private static List<RayTracingObject> _rayTracingObjects = new List<RayTracingObject>(); | |
private static List<MeshObject> _meshObjects = new List<MeshObject>(); | |
private static List<Vector3> _vertices = new List<Vector3>(); | |
private static List<Vector2> _M_UVs = new List<Vector2>(); | |
private static List<int> _indices = new List<int>(); | |
private ComputeBuffer _meshObjectBuffer; | |
private ComputeBuffer _vertexBuffer; | |
private ComputeBuffer _indexBuffer; | |
private ComputeBuffer _MUVBuffer; | |
[Header("Capture Settings")] | |
public UTJ.FrameCapturer.GBufferRecorder bufferCapture; | |
public TMPro.TextMeshProUGUI sampleCount; | |
string oldname; | |
public bool shouldRender = false; | |
public int qualityDivider = 8; | |
Queue<AsyncGPUReadbackRequest> _requests = new Queue<AsyncGPUReadbackRequest>(); | |
struct MeshObject | |
{ | |
public Matrix4x4 localToWorldMatrix; | |
public int indices_offset; | |
public int indices_count; | |
public Vector3 albedo; | |
public Vector3 specular; | |
public float smoothness; | |
public Vector3 emission; | |
public float transparency; | |
public float refraction; | |
public int colormapID; | |
public int normalmapID; | |
public int metalmapID; | |
public int roughmapID; | |
} | |
struct Sphere | |
{ | |
public Vector3 position; | |
public float radius; | |
public Vector3 albedo; | |
public Vector3 specular; | |
public float smoothness; | |
public Vector3 emission; | |
public float refraction; | |
} | |
private void Awake() | |
{ | |
_camera = GetComponent<Camera>(); | |
_transformsToWatch.Add(transform); | |
_transformsToWatch.Add(DirectionalLight.transform); | |
} | |
private void OnEnable() | |
{ | |
_currentSample = 0; | |
//RebuildMeshObjectBuffers(); | |
// SetUpScene(); | |
} | |
private void OnDisable() | |
{ | |
//_sphereBuffer?.Release(); | |
_meshObjectBuffer?.Release(); | |
_vertexBuffer?.Release(); | |
_indexBuffer?.Release(); | |
_MUVBuffer?.Release(); | |
} | |
IEnumerator SaveImage() { | |
yield return new WaitForEndOfFrame(); | |
//UnityEngine.ScreenCapture.CaptureScreenshot(Time.time + "-" + _currentSample + ".png"); | |
int width = Screen.width; | |
int height = Screen.height; | |
Texture2D tex = new Texture2D(width, height, TextureFormat.RGBAFloat, false); | |
// Read screen contents into the texture | |
tex.ReadPixels(new Rect(0, 0, width, height), 0, 0); | |
tex.Apply(); | |
// Encode texture into PNG | |
byte[] bytes = tex.EncodeToPNG(); | |
Object.Destroy(tex); | |
// For testing purposes, also write to a file in the project folder | |
File.WriteAllBytes(Time.time + "-" + _currentSample + ".png", bytes); | |
} | |
void SaveBitmap() | |
{ | |
var tex = new Texture2D(Screen.width, Screen.height, TextureFormat.RGBA32, false); | |
int width = Screen.width; | |
int height = Screen.height; | |
// Read screen contents into the texture | |
tex.ReadPixels(new Rect(0, 0, width, height), 0, 0); | |
tex.Apply(); | |
File.WriteAllBytes(Time.time + "-" + _currentSample + ".png", ImageConversion.EncodeToPNG(tex)); | |
Destroy(tex); | |
} | |
private void Update() | |
{ | |
if (shouldRender) | |
{ | |
if (_currentSample == 0) | |
{ | |
bufferCapture.BeginRecording(); | |
} | |
else if (_currentSample == 1) | |
{ | |
bufferCapture.EndRecording(); | |
bufferCapture.enabled = false; | |
} | |
if (_currentSample % 50 == 0) | |
{ | |
string name = Time.time + "-" + _currentSample + ".png"; | |
UnityEngine.ScreenCapture.CaptureScreenshot(name); | |
oldname = name; | |
// bufferCapture.BeginRecording(); | |
//StartCoroutine(SaveImage()); | |
//byte[] bytes = toTexture2D(_converged).EncodeToPNG(); | |
//System.IO.File.WriteAllBytes(Time.time + "-" + _currentSample + ".png", bytes); | |
//Debug.Log("Saved at sample : " + _currentSample); | |
} | |
else if (_currentSample % 51 == 0) | |
{ | |
Denoise(Application.dataPath + "/../" + oldname, Application.dataPath + "/../"+"/Capture/"); | |
//print("Should Denoise : " + Application.dataPath + "/../" + oldname); | |
// StartCoroutine(SaveImage()); | |
// UnityEngine.ScreenCapture.CaptureScreenshot(Time.time + "-" + _currentSample + ".png"); | |
} | |
else | |
{ | |
// bufferCapture.EndRecording(); | |
} | |
} | |
if (_camera.fieldOfView != _lastFieldOfView) | |
{ | |
_currentSample = 0; | |
_lastFieldOfView = _camera.fieldOfView; | |
} | |
foreach (Transform t in _transformsToWatch) | |
{ | |
if (t.hasChanged) | |
{ | |
_currentSample = 0; | |
t.hasChanged = false; | |
} | |
} | |
} | |
public static void RegisterObject(RayTracingObject obj) | |
{ | |
_rayTracingObjects.Add(obj); | |
_meshObjectsNeedRebuilding = true; | |
} | |
public static void UnregisterObject(RayTracingObject obj) | |
{ | |
_rayTracingObjects.Remove(obj); | |
_meshObjectsNeedRebuilding = true; | |
} | |
private void RebuildMeshObjectBuffers() | |
{ | |
if (!_meshObjectsNeedRebuilding) | |
{ | |
return; | |
} | |
_meshObjectsNeedRebuilding = false; | |
_currentSample = 0; | |
List<Texture2D> colorMaps = new List<Texture2D>(); | |
List<Texture2D> normalMaps = new List<Texture2D>(); | |
List<Texture2D> metalMaps = new List<Texture2D>(); | |
List<Texture2D> roughMaps = new List<Texture2D>(); | |
// Clear all lists | |
_meshObjects.Clear(); | |
_vertices.Clear(); | |
_indices.Clear(); | |
int colormapid=0; | |
int normalmapid=0; | |
int metalmapid=0; | |
int roughmapid=0; | |
// Loop over all objects and gather their data | |
foreach (RayTracingObject obj in _rayTracingObjects) | |
{ | |
bool hasColorMap = false; | |
bool hasNormalMap = false; | |
bool hasMetalMap = false; | |
bool hasRoughMap = false; | |
Mesh mesh = obj.GetComponent<MeshFilter>().sharedMesh; | |
Material mat = obj.GetComponent<MeshRenderer>().sharedMaterial; | |
if (mat.GetTexture("_MainTex")!=null) { | |
colormapid++; | |
colorMaps.Add(mat.GetTexture("_MainTex") as Texture2D); | |
hasColorMap = true; | |
} | |
if (mat.GetTexture("_BumpMap")!=null) | |
{ | |
normalmapid++; | |
normalMaps.Add(mat.GetTexture("_BumpMap") as Texture2D); | |
hasNormalMap = true; | |
} | |
if (mat.GetTexture("_SpecGlossMap")!=null) | |
{ | |
metalmapid++; | |
metalMaps.Add(mat.GetTexture("_SpecGlossMap") as Texture2D); | |
hasMetalMap = true; | |
} | |
if (mat.GetTexture("_SpecGlossMap") !=null) | |
{ | |
// roughmapid++; | |
// roughMaps.Add(mat.GetTexture("_SpecularGlossMap") as Texture2D); | |
//hasRoughMap = true; | |
} | |
// Add vertex data | |
int firstVertex = _vertices.Count; | |
_vertices.AddRange(mesh.vertices); | |
// Add index data - if the vertex buffer wasn't empty before, the | |
// indices need to be offset | |
int firstIndex = _indices.Count; | |
var indices = mesh.GetIndices(0); | |
_indices.AddRange(indices.Select(index => index + firstVertex)); | |
for (int i = 0; i < _vertices.Count; i++) | |
{ | |
_M_UVs.Add(new Vector2(_vertices[i].x, _vertices[i].z)); | |
} | |
// Add the object itself | |
_meshObjects.Add(new MeshObject() | |
{ | |
localToWorldMatrix = obj.transform.localToWorldMatrix, | |
indices_offset = firstIndex, | |
indices_count = indices.Length, | |
albedo = new Vector3(mat.GetColor("_Color").r, mat.GetColor("_Color").g, mat.GetColor("_Color").b), | |
specular = new Vector3(mat.GetColor("_SpecColor").r, mat.GetColor("_SpecColor").g, mat.GetColor("_SpecColor").b), | |
smoothness = mat.GetFloat("_Glossiness"), | |
emission = new Vector3(mat.GetColor("_EmissionColor").r, mat.GetColor("_EmissionColor").g, mat.GetColor("_EmissionColor").b), | |
transparency = mat.GetColor("_Color").a, | |
refraction = obj.IOR, | |
colormapID = colormapid, | |
normalmapID = normalmapid, | |
metalmapID = metalmapid, | |
roughmapID = roughmapid | |
}); | |
} | |
if (colorMaps.Count > 0) { | |
Texture2DArray colorMapArray = new Texture2DArray(colorMaps[0].width, colorMaps[0].height, colorMaps.Count, TextureFormat.RGBA32, true, false); | |
colorMapArray.filterMode = FilterMode.Bilinear; | |
colorMapArray.wrapMode = TextureWrapMode.Repeat; | |
// Loop through ordinary textures and copy pixels to the | |
// Texture2DArray | |
for (int i = 0; i < colorMaps.Count; i++) | |
{ | |
colorMapArray.SetPixels(colorMaps[i].GetPixels(0), | |
i, 0); | |
} | |
// Apply our changes | |
colorMapArray.Apply(); | |
// Set the texture to a material | |
RayTracingShader.SetTexture(0,"colormap", colorMapArray); | |
} | |
if (normalMaps.Count > 0) | |
{ | |
Texture2DArray normalMapArray = new Texture2DArray(normalMaps[0].width, normalMaps[0].height, normalMaps.Count, TextureFormat.RGBA32, true, false); | |
normalMapArray.filterMode = FilterMode.Bilinear; | |
normalMapArray.wrapMode = TextureWrapMode.Repeat; | |
// Loop through ordinary textures and copy pixels to the | |
// Texture2DArray | |
for (int i = 0; i < normalMaps.Count; i++) | |
{ | |
normalMapArray.SetPixels(normalMaps[i].GetPixels(0), | |
i, 0); | |
} | |
// Apply our changes | |
normalMapArray.Apply(); | |
// Set the texture to a material | |
RayTracingShader.SetTexture(0, "normalmap", normalMapArray); | |
} | |
if (metalMaps.Count > 0) | |
{ | |
Texture2DArray metalMapArray = new Texture2DArray(metalMaps[0].width, metalMaps[0].height, metalMaps.Count, TextureFormat.RGBA32, true, false); | |
metalMapArray.filterMode = FilterMode.Bilinear; | |
metalMapArray.wrapMode = TextureWrapMode.Repeat; | |
// Loop through ordinary textures and copy pixels to the | |
// Texture2DArray | |
for (int i = 0; i < metalMaps.Count; i++) | |
{ | |
metalMapArray.SetPixels(metalMaps[i].GetPixels(0), | |
i, 0); | |
} | |
// Apply our changes | |
metalMapArray.Apply(); | |
// Set the texture to a material | |
RayTracingShader.SetTexture(0, "metalmap", metalMapArray); | |
} | |
if (roughMaps.Count > 0) | |
{ | |
Texture2DArray roughMapArray = new Texture2DArray(roughMaps[0].width, roughMaps[0].height, roughMaps.Count, TextureFormat.RGBA32, true, false); | |
roughMapArray.filterMode = FilterMode.Bilinear; | |
roughMapArray.wrapMode = TextureWrapMode.Repeat; | |
// Loop through ordinary textures and copy pixels to the | |
// Texture2DArray | |
for (int i = 0; i < roughMaps.Count; i++) | |
{ | |
roughMapArray.SetPixels(roughMaps[i].GetPixels(0), | |
i, 0); | |
} | |
// Apply our changes | |
roughMapArray.Apply(); | |
// Set the texture to a material | |
RayTracingShader.SetTexture(0, "roughmap", roughMapArray); | |
} | |
CreateComputeBuffer(ref _meshObjectBuffer, _meshObjects, 136); | |
CreateComputeBuffer(ref _vertexBuffer, _vertices, 12); | |
CreateComputeBuffer(ref _indexBuffer, _indices, 4); | |
CreateComputeBuffer(ref _MUVBuffer, _M_UVs, 8); | |
} | |
private static void CreateComputeBuffer<T>(ref ComputeBuffer buffer, List<T> data, int stride) | |
where T : struct | |
{ | |
// Do we already have a compute buffer? | |
if (buffer != null) | |
{ | |
// If no data or buffer doesn't match the given criteria, release it | |
if (data.Count == 0 || buffer.count != data.Count || buffer.stride != stride) | |
{ | |
buffer.Release(); | |
buffer = null; | |
} | |
} | |
if (data.Count != 0) | |
{ | |
// If the buffer has been released or wasn't there to | |
// begin with, create it | |
if (buffer == null) | |
{ | |
buffer = new ComputeBuffer(data.Count, stride); | |
} | |
// Set data on the buffer | |
buffer.SetData(data); | |
} | |
} | |
private void SetComputeBuffer(string name, ComputeBuffer buffer) | |
{ | |
if (buffer != null) | |
{ | |
RayTracingShader.SetBuffer(0, name, buffer); | |
} | |
} | |
public Texture testTexture; | |
private void SetShaderParameters() | |
{ | |
RayTracingShader.SetTexture(0, "_SkyboxTexture", SkyboxTexture); | |
RayTracingShader.SetTexture(0, "_NoiseTex1", noiseTex1); | |
RayTracingShader.SetTexture(0, "_NoiseTex2", noiseTex2); | |
RayTracingShader.SetMatrix("_CameraToWorld", _camera.cameraToWorldMatrix); | |
RayTracingShader.SetMatrix("_CameraInverseProjection", _camera.projectionMatrix.inverse); | |
RayTracingShader.SetVector("_PixelOffset", new Vector2(Random.value, Random.value)); | |
RayTracingShader.SetFloat("_Seed", Random.value); | |
//RayTracingShader.SetVector("skyColor", new Vector3(skyColor.r , skyColor.g , skyColor.b )); | |
RayTracingShader.SetVector("_GroundColor", new Vector3(groundColor.r , groundColor.g , groundColor.b )); | |
RayTracingShader.SetVector("sunColor", new Vector3(DirectionalLight.color.r , DirectionalLight.color.g , DirectionalLight.color.b )); | |
RayTracingShader.SetVector("_SkyTint", new Vector3(skyTint.r, skyTint.g, skyTint.b)); | |
RayTracingShader.SetVector("_BackgroundColor", new Vector3(backgroundColor.r, backgroundColor.g, backgroundColor.b)); | |
RayTracingShader.SetInt("_SkyMode", (int)theSkyMode); | |
Vector3 l = DirectionalLight.transform.forward; | |
RayTracingShader.SetVector("_DirectionalLight", new Vector4(l.x, l.y, l.z, DirectionalLight.intensity)); | |
RayTracingShader.SetTexture(0,"_testTexture", testTexture); | |
//SetComputeBuffer("_Spheres", _sphereBuffer); | |
SetComputeBuffer("_MeshObjects", _meshObjectBuffer); | |
SetComputeBuffer("_Vertices", _vertexBuffer); | |
SetComputeBuffer("_Indices", _indexBuffer); | |
SetComputeBuffer("_M_UVs", _MUVBuffer); | |
} | |
private void InitRenderTexture() | |
{ | |
if (_target == null || _target.width != Screen.width/ qualityDivider || _target.height != Screen.height/ qualityDivider) | |
{ | |
// Release render texture if we already have one | |
if (_target != null) | |
{ | |
_target.Release(); | |
_converged.Release(); | |
} | |
// Get a render target for Ray Tracing | |
_target = new RenderTexture(Screen.width/ qualityDivider, Screen.height/ qualityDivider, 0, | |
RenderTextureFormat.ARGBFloat, RenderTextureReadWrite.Linear); | |
_target.enableRandomWrite = true; | |
_target.Create(); | |
_converged = new RenderTexture(Screen.width/ qualityDivider, Screen.height/ qualityDivider, 0, | |
RenderTextureFormat.ARGBFloat, RenderTextureReadWrite.Linear); | |
_converged.enableRandomWrite = true; | |
_converged.Create(); | |
// Reset sampling | |
_currentSample = 0; | |
} | |
} | |
public bool shouldRenderRays = false; | |
private void Render(RenderTexture destination) | |
{ | |
// Make sure we have a current render target | |
InitRenderTexture(); | |
// Set the target and dispatch the compute shader | |
RayTracingShader.SetTexture(0, "Result", _target); | |
int threadGroupsX = Mathf.CeilToInt(Screen.width / qualityDivider / 8.0f); | |
int threadGroupsY = Mathf.CeilToInt(Screen.height / qualityDivider / 8.0f); | |
RayTracingShader.Dispatch(0, threadGroupsX, threadGroupsY, 1); | |
// Blit the result texture to the screen | |
if (_addMaterial == null) | |
_addMaterial = new Material(Shader.Find("Hidden/AddShader")); | |
_addMaterial.SetFloat("_Sample", _currentSample); | |
Graphics.Blit(_target, _converged, _addMaterial); | |
Graphics.Blit(_converged, destination); | |
_currentSample++; | |
sampleCount.text = _currentSample + " Samples"; | |
} | |
public void Denoise(string pathToDenoise, string captureFolder) { | |
try | |
{ | |
Process myProcess = new Process(); | |
myProcess.StartInfo.WindowStyle = ProcessWindowStyle.Normal; | |
myProcess.StartInfo.CreateNoWindow = true; | |
myProcess.StartInfo.UseShellExecute = false; | |
myProcess.StartInfo.FileName = Application.streamingAssetsPath + "\\Denoiser_v2.2\\Denoiser.exe"; | |
string ipath = "-i " + pathToDenoise; | |
string opath = " -o " + pathToDenoise + "_output.png"; | |
string apath = " -a " + captureFolder + "/Albedo_0000.png"; | |
string npath = " -n " + captureFolder + "/Normal_0000.png"; | |
//print("ipath :" + ipath); | |
//print(ipath + opath); | |
myProcess.StartInfo.Arguments = ipath+opath; | |
myProcess.EnableRaisingEvents = true; | |
myProcess.Start(); | |
//myProcess.WaitForExit(); | |
//int ExitCode = myProcess.ExitCode; | |
//print(ExitCode); | |
} | |
catch (System.Exception e) | |
{ | |
print(e); | |
} | |
} | |
Texture2D toTexture2D(RenderTexture rTex) | |
{ | |
Texture2D tex = new Texture2D(rTex.width, rTex.height, TextureFormat.RGBA32, false); | |
RenderTexture.active = rTex; | |
tex.ReadPixels(new Rect(0, 0, rTex.width, rTex.height), 0, 0); | |
tex.Apply(); | |
return tex; | |
} | |
private void OnRenderImage(RenderTexture source, RenderTexture destination) | |
{ | |
raySpeed.StartLoop(); | |
if (shouldRenderRays) | |
{ | |
RebuildMeshObjectBuffers(); | |
SetShaderParameters(); | |
Render(destination); | |
} | |
else { | |
Graphics.Blit(source, destination); | |
} | |
raySpeed.EndLoop((Screen.width / qualityDivider) * (Screen.height / qualityDivider) * 8); | |
//raySpeed.UpdateRays((Screen.width / qualityDivider) * (Screen.height / qualityDivider) * 8); | |
} | |
} | |
___________________________________________________________________________________________________________________ | |
_________________________________________SHADER____________________________________________________________________ | |
#pragma kernel CSMain | |
#include "UnityCG.cginc" | |
#include "Lighting.cginc" | |
RWTexture2D<float4> Result; | |
float4x4 _CameraToWorld; | |
float4x4 _CameraInverseProjection; | |
float4 _DirectionalLight; | |
float2 _PixelOffset; | |
Texture2D<float4> _SkyboxTexture; | |
SamplerState sampler_SkyboxTexture; | |
static const float PI = 3.14159265f; | |
static const float EPSILON = 1e-8; | |
int _SkyMode; | |
float3 _BackgroundColor; | |
//------------------------------------- | |
//- UTILITY | |
float sdot(float3 x, float3 y, float f = 1.0f) | |
{ | |
return saturate(dot(x, y) * f); | |
} | |
float energy(float3 color) | |
{ | |
return dot(color, 1.0f / 3.0f); | |
} | |
//------------------------------------- | |
//- RANDOMNESS | |
float2 _Pixel; | |
float _Seed; | |
float rand() | |
{ | |
//return 0.5; | |
float result = frac(sin(_Seed / 100.0f * dot(_Pixel, float2(12.9898f, 78.233f))) * 43758.5453f); | |
_Seed += 1.0f; | |
return result; | |
} | |
//------------------------------------- | |
//- SPHERES | |
// | |
//struct Sphere | |
//{ | |
// float3 position; | |
// float radius; | |
// float3 albedo; | |
// float3 specular; | |
// float smoothness; | |
// float3 emission; | |
//}; | |
//StructuredBuffer<Sphere> _Spheres; | |
Texture2DArray<float4> colormap; | |
Texture2DArray<float4> normalmap; | |
Texture2DArray<float4> metalmap; | |
Texture2DArray<float4> roughmap; | |
SamplerState sampledMap; | |
//------------------------------------- | |
//- MESHES | |
struct MeshObject | |
{ | |
float4x4 localToWorldMatrix; | |
int indices_offset; | |
int indices_count; | |
float3 albedo; | |
float3 specular; | |
float smoothness; | |
float3 emission; | |
float transparency; | |
float refraction; | |
int colormapid; | |
int normalmapid; | |
int metalmapid; | |
int roughmapid; | |
}; | |
Texture2D<float4> _testTexture; | |
StructuredBuffer<MeshObject> _MeshObjects; | |
StructuredBuffer<float3> _Vertices; | |
StructuredBuffer<int> _Indices; | |
StructuredBuffer<float2> _M_UVs; | |
//------------------------------------- | |
//- RAY | |
struct Ray | |
{ | |
float3 origin; | |
float3 direction; | |
float3 energy; | |
float2 uv; | |
}; | |
Ray CreateRay(float3 origin, float3 direction, float2 uv) | |
{ | |
Ray ray; | |
ray.origin = origin; | |
ray.direction = direction; | |
ray.energy = float3(1.0f, 1.0f, 1.0f); | |
ray.uv = uv; | |
return ray; | |
} | |
Ray CreateCameraRay(float2 uv) | |
{ | |
// Transform the camera origin to world space | |
float3 origin = mul(_CameraToWorld, float4(0.0f, 0.0f, 0.0f, 1.0f)).xyz; | |
// Invert the perspective projection of the view-space position | |
float3 direction = mul(_CameraInverseProjection, float4(uv, 0.0f, 1.0f)).xyz; | |
// Transform the direction from camera to world space and normalize | |
direction = mul(_CameraToWorld, float4(direction, 0.0f)).xyz; | |
direction = normalize(direction); | |
return CreateRay(origin, direction,uv); | |
} | |
//------------------------------------- | |
//- RAYHIT | |
struct RayHit | |
{ | |
float3 position; | |
float distance; | |
float3 normal; | |
float3 albedo; | |
float3 specular; | |
float smoothness; | |
float3 emission; | |
float transparency; | |
float refraction; | |
int colormapid; | |
int normalmapid; | |
int metalmapid; | |
int roughmapid; | |
float2 texcoords; | |
}; | |
RayHit CreateRayHit() | |
{ | |
RayHit hit; | |
hit.position = float3(0.0f, 0.0f, 0.0f); | |
hit.distance = 1.#INF; | |
hit.normal = float3(0.0f, 0.0f, 0.0f); | |
hit.albedo = float3(0.0f, 0.0f, 0.0f); | |
hit.specular = float3(0.0f, 0.0f, 0.0f); | |
hit.smoothness = 0.0f; | |
hit.emission = float3(0.0f, 0.0f, 0.0f); | |
return hit; | |
} | |
bool Refract(float3 v, float3 n, float niOverNt, out float3 refracted) { | |
float3 uv = normalize(v); | |
float dt = dot(uv, n); | |
float discriminant = 1.0 - niOverNt * niOverNt * (1 - dt * dt); | |
if (discriminant > 0) { | |
refracted = niOverNt * (v - n * dt) - n * sqrt(discriminant); | |
return true; | |
} | |
return false; | |
} | |
float Schlick(float cosine, float refIdx) { | |
float r0 = (1 - refIdx) / (1 + refIdx); | |
r0 = r0 * r0; | |
return r0 + (1 - r0) * pow((1 - cosine), 5); | |
} | |
//------------------------------------- | |
//- INTERSECTION | |
// | |
//void IntersectGroundPlane(Ray ray, inout RayHit bestHit) | |
//{ | |
// Calculate distance along the ray where the ground plane is intersected | |
// float t = -ray.origin.y / ray.direction.y; | |
// if (t > 0 && t < bestHit.distance) | |
// { | |
// bestHit.distance = t; | |
// bestHit.position = ray.origin + t * ray.direction; | |
// bestHit.normal = float3(0.0f, 1.0f, 0.0f); | |
// bestHit.albedo = 0.5f; | |
// bestHit.specular = 1.0f; | |
// bestHit.smoothness = 1.0f; | |
// bestHit.emission = float3(0.0f, 0.0f, 0.0f); | |
// bestHit.transparency = 1.0; | |
// bestHit.refraction = 1.0; | |
// | |
// } | |
//} | |
//void IntersectSphere(Ray ray, inout RayHit bestHit, Sphere sphere) | |
//{ | |
// // Calculate distance along the ray where the sphere is intersected | |
// float3 d = ray.origin - sphere.position; | |
// float p1 = -dot(ray.direction, d); | |
// float p2sqr = p1 * p1 - dot(d, d) + sphere.radius * sphere.radius; | |
// if (p2sqr < 0) | |
// return; | |
// float p2 = sqrt(p2sqr); | |
// float t = p1 - p2 > 0 ? p1 - p2 : p1 + p2; | |
// if (t > 0 && t < bestHit.distance) | |
// { | |
// bestHit.distance = t; | |
// bestHit.position = ray.origin + t * ray.direction; | |
// bestHit.normal = normalize(bestHit.position - sphere.position); | |
// bestHit.albedo = sphere.albedo; | |
// bestHit.specular = sphere.specular; | |
// bestHit.smoothness = sphere.smoothness; | |
// bestHit.emission = sphere.emission; | |
// } | |
//} | |
bool IntersectTriangle_MT97(Ray ray, float3 vert0, float3 vert1, float3 vert2, | |
inout float t, inout float u, inout float v) | |
{ | |
// find vectors for two edges sharing vert0 | |
float3 edge1 = vert1 - vert0; | |
float3 edge2 = vert2 - vert0; | |
// begin calculating determinant - also used to calculate U parameter | |
float3 pvec = cross(ray.direction, edge2); | |
// if determinant is near zero, ray lies in plane of triangle | |
float det = dot(edge1, pvec); | |
// use backface culling | |
if (det < EPSILON) | |
return false; | |
float inv_det = 1.0f / det; | |
// calculate distance from vert0 to ray origin | |
float3 tvec = ray.origin - vert0; | |
// calculate U parameter and test bounds | |
u = dot(tvec, pvec) * inv_det; | |
if (u < 0.0 || u > 1.0f) | |
return false; | |
// prepare to test V parameter | |
float3 qvec = cross(tvec, edge1); | |
// calculate V parameter and test bounds | |
v = dot(ray.direction, qvec) * inv_det; | |
if (v < 0.0 || u + v > 1.0f) | |
return false; | |
// calculate t, ray intersects triangle | |
t = dot(edge2, qvec) * inv_det; | |
return true; | |
} | |
//float4 _Time; | |
float _Seed01; | |
float GradNoise(float2 xy) { | |
return frac(52.9829189f * frac(0.06711056f*float(xy.x) + 0.00583715f*float(xy.y))); | |
} | |
float Noise(float2 uv) { | |
return GradNoise(floor(fmod(uv, 1024)) + _Seed01 * _Time.y); | |
} | |
void IntersectMeshObject(Ray ray, inout RayHit bestHit, MeshObject meshObject) | |
{ | |
uint offset = meshObject.indices_offset; | |
uint count = offset + meshObject.indices_count; | |
for (uint i = offset; i < count; i += 3) | |
{ | |
float3 v0 = (mul(meshObject.localToWorldMatrix, float4(_Vertices[_Indices[i]], 1))).xyz; | |
float3 v1 = (mul(meshObject.localToWorldMatrix, float4(_Vertices[_Indices[i + 1]], 1))).xyz; | |
float3 v2 = (mul(meshObject.localToWorldMatrix, float4(_Vertices[_Indices[i + 2]], 1))).xyz; | |
float t, u, v; | |
if (IntersectTriangle_MT97(ray, v0, v1, v2, t, u, v)) | |
{ | |
if (t > 0 && t < bestHit.distance) | |
{ | |
bestHit.texcoords = float2(u*_M_UVs[_Indices[i + 2]].x, v*_M_UVs[_Indices[i + 2]].y)*1024; | |
bestHit.distance = t; | |
bestHit.position = ray.origin + t * ray.direction; | |
bestHit.normal = normalize(cross(v1 - v0, v2 - v0)); | |
bestHit.albedo = /*meshObject.albedo **/ colormap[float3(bestHit.texcoords, meshObject.colormapid-1)]; /* / _testTexture[float2(u, v)].xyz*/; | |
bestHit.specular = meshObject.specular; | |
bestHit.smoothness = meshObject.smoothness; | |
bestHit.emission = meshObject.emission; | |
bestHit.transparency = meshObject.transparency; | |
bestHit.refraction = meshObject.refraction; | |
bestHit.colormapid= meshObject.colormapid; | |
bestHit.normalmapid = meshObject.normalmapid; | |
bestHit.metalmapid = meshObject.metalmapid; | |
bestHit.roughmapid = meshObject.roughmapid; | |
} | |
} | |
} | |
} | |
//------------------------------------- | |
//- TRACE | |
RayHit Trace(Ray ray) | |
{ | |
RayHit bestHit = CreateRayHit(); | |
uint count, stride, i; | |
// Trace ground plane | |
// IntersectGroundPlane(ray, bestHit); | |
// Trace spheres | |
//_Spheres.GetDimensions(count, stride); | |
/*for (i = 0; i < count; i++) | |
{ | |
IntersectSphere(ray, bestHit, _Spheres[i]); | |
} | |
*/ | |
//Trace mesh objects | |
_MeshObjects.GetDimensions(count, stride); | |
if (count > 0) { | |
for (i = 0; i < count; i++) | |
{ | |
IntersectMeshObject(ray, bestHit, _MeshObjects[i]); | |
} | |
} | |
return bestHit; | |
} | |
//------------------------------------- | |
//- SKY | |
half _Exposure; | |
float3 _GroundColor; | |
half _SunSize; | |
float3 _SkyTint; | |
half _AtmosphereThickness; | |
#define GAMMA 2.2 | |
#define COLOR_2_GAMMA(color) ((unity_ColorSpaceDouble.r>2.0) ? pow(color,1.0/GAMMA) : color) | |
#define COLOR_2_LINEAR(color) color | |
#define LINEAR_2_LINEAR(color) color | |
// RGB wavelengths | |
// .35 (.62=158), .43 (.68=174), .525 (.75=190) | |
static const float3 kDefaultScatteringWavelength = float3(.65, .57, .475); | |
static const float3 kVariableRangeForScatteringWavelength = float3(.15, .15, .15); | |
#define OUTER_RADIUS 1.025 | |
static const float kOuterRadius = OUTER_RADIUS; | |
static const float kOuterRadius2 = OUTER_RADIUS * OUTER_RADIUS; | |
static const float kInnerRadius = 1.0; | |
static const float kInnerRadius2 = 1.0; | |
static const float kCameraHeight = 0.0001; | |
#define kRAYLEIGH (lerp(0, 0.0025, pow(_AtmosphereThickness,2.5))) // Rayleigh constant | |
#define kMIE 0.0010 // Mie constant | |
#define kSUN_BRIGHTNESS 20 // Sun brightness | |
#define kMAX_SCATTER 50.0 // Maximum scattering value, to prevent math overflows on Adrenos | |
static const half kSunScale = 400.0 * kSUN_BRIGHTNESS; | |
static const float kKmESun = kMIE * kSUN_BRIGHTNESS; | |
static const float kKm4PI = kMIE * 4.0 * 3.14159265; | |
static const float kScale = 1.0 / (OUTER_RADIUS - 1.0); | |
static const float kScaleDepth = 0.25; | |
static const float kScaleOverScaleDepth = (1.0 / (OUTER_RADIUS - 1.0)) / 0.25; | |
static const float kSamples = 4.0; // THIS IS UNROLLED MANUALLY, DON'T TOUCH | |
#define MIE_G (-0.990) | |
#define MIE_G2 0.9801 | |
#define SKY_GROUND_THRESHOLD 0.02 | |
float3 skyColor; | |
float3 groundColor; | |
float3 sunColor; | |
half getRayleighPhase(half eyeCos2) | |
{ | |
return 0.75 + 0.75*eyeCos2; | |
} | |
half getRayleighPhase(half3 light, half3 ray) | |
{ | |
half eyeCos = dot(light, ray); | |
return getRayleighPhase(eyeCos * eyeCos); | |
} | |
float scale(float inCos) | |
{ | |
float x = 1.0 - inCos; | |
return 0.25 * exp(-0.00287 + x * (0.459 + x * (3.83 + x * (-6.80 + x * 5.25)))); | |
} | |
//Thanks Keijiro | |
void MieSky(Ray r, float2 uv, out float3 skyColor, inout float3 groundColor, inout float3 sunColor) { | |
_Exposure = 1.45; | |
_AtmosphereThickness = 1; | |
float3 kSkyTintInGammaSpace = COLOR_2_GAMMA(_SkyTint); // convert tint from Linear back to Gamma | |
float3 kScatteringWavelength = lerp( | |
kDefaultScatteringWavelength - kVariableRangeForScatteringWavelength, | |
kDefaultScatteringWavelength + kVariableRangeForScatteringWavelength, | |
half3(1, 1, 1) - kSkyTintInGammaSpace); // using Tint in sRGB gamma allows for more visually linear interpolation and to keep (.5) at (128, gray in sRGB) point | |
float3 kInvWavelength = 1.0 / pow(kScatteringWavelength, 4); | |
float kKrESun = kRAYLEIGH * kSUN_BRIGHTNESS; | |
float kKr4PI = kRAYLEIGH * 4.0 * 3.14159265; | |
float3 cameraPos = float3(0, kInnerRadius + kCameraHeight, 0); // The camera's current position | |
// Get the ray from the camera to the vertex and its length (which is the far point of the ray passing through the atmosphere) | |
float3 eyeRay = r.direction; | |
float far = 0.0; | |
half3 cIn, cOut; | |
if (eyeRay.y >= 0.0) | |
{ | |
// Sky | |
// Calculate the length of the "atmosphere" | |
far = sqrt(kOuterRadius2 + kInnerRadius2 * eyeRay.y * eyeRay.y - kInnerRadius2) - kInnerRadius * eyeRay.y; | |
float3 pos = cameraPos + far * eyeRay; | |
// Calculate the ray's starting position, then calculate its scattering offset | |
float height = kInnerRadius + kCameraHeight; | |
float depth = exp(kScaleOverScaleDepth * (-kCameraHeight)); | |
float startAngle = dot(eyeRay, cameraPos) / height; | |
float startOffset = depth * scale(startAngle); | |
// Initialize the scattering loop variables | |
float sampleLength = far / kSamples; | |
float scaledLength = sampleLength * kScale; | |
float3 sampleRay = eyeRay * sampleLength; | |
float3 samplePoint = cameraPos + sampleRay * 0.5; | |
// Now loop through the sample rays | |
float3 frontColor = float3(0.0, 0.0, 0.0); | |
// Weird workaround: WP8 and desktop FL_9_1 do not like the for loop here | |
// (but an almost identical loop is perfectly fine in the ground calculations below) | |
// Just unrolling this manually seems to make everything fine again. | |
for(int i=0; i<int(kSamples); i++) | |
{ | |
float height = length(samplePoint); | |
float depth = exp(kScaleOverScaleDepth * (kInnerRadius - height)); | |
float lightAngle = dot(_WorldSpaceLightPos0.xyz, samplePoint) / height; | |
float cameraAngle = dot(eyeRay, samplePoint) / height; | |
float scatter = (startOffset + depth * (scale(lightAngle) - scale(cameraAngle))); | |
float3 attenuate = exp(-clamp(scatter, 0.0, kMAX_SCATTER) * (kInvWavelength * kKr4PI + kKm4PI)); | |
frontColor += attenuate * (depth * scaledLength); | |
samplePoint += sampleRay; | |
} | |
// Finally, scale the Mie and Rayleigh colors and set up the varying variables for the pixel shader | |
cIn = frontColor * (kInvWavelength * kKrESun); | |
cOut = frontColor * kKmESun; | |
} | |
else | |
{ | |
// Ground | |
far = (-kCameraHeight) / (min(-0.001, eyeRay.y)); | |
float3 pos = cameraPos + far * eyeRay; | |
// Calculate the ray's starting position, then calculate its scattering offset | |
float depth = exp((-kCameraHeight) * (1.0 / kScaleDepth)); | |
float cameraAngle = dot(-eyeRay, pos); | |
float lightAngle = dot(_WorldSpaceLightPos0.xyz, pos); | |
float cameraScale = scale(cameraAngle); | |
float lightScale = scale(lightAngle); | |
float cameraOffset = depth * cameraScale; | |
float temp = (lightScale + cameraScale); | |
// Initialize the scattering loop variables | |
float sampleLength = far / kSamples; | |
float scaledLength = sampleLength * kScale; | |
float3 sampleRay = eyeRay * sampleLength; | |
float3 samplePoint = cameraPos + sampleRay * 0.5; | |
// Now loop through the sample rays | |
float3 frontColor = float3(0.0, 0.0, 0.0); | |
float3 attenuate; | |
for(int i=0; i<int(kSamples); i++) // Loop removed because we kept hitting SM2.0 temp variable limits. Doesn't affect the image too much. | |
{ | |
float height = length(samplePoint); | |
float depth = exp(kScaleOverScaleDepth * (kInnerRadius - height)); | |
float scatter = depth * temp - cameraOffset; | |
attenuate = exp(-clamp(scatter, 0.0, kMAX_SCATTER) * (kInvWavelength * kKr4PI + kKm4PI)); | |
frontColor += attenuate * (depth * scaledLength); | |
samplePoint += sampleRay; | |
} | |
cIn = frontColor * (kInvWavelength * kKrESun + kKmESun); | |
cOut = clamp(attenuate, 0.0, 1.0); | |
} | |
groundColor = _Exposure * (cIn + COLOR_2_LINEAR(_GroundColor) * cOut); | |
skyColor = _Exposure * (cIn * getRayleighPhase(_WorldSpaceLightPos0.xyz, -eyeRay)); | |
sunColor = _Exposure * (cOut * _LightColor0.xyz); | |
} | |
half calcSunSpot(half3 vec1, half3 vec2) | |
{ | |
half3 delta = vec1 - vec2; | |
half dist = length(delta); | |
half spot = 1.0 - smoothstep(0.0, 0.03, dist); | |
return kSunScale * spot * spot; | |
} | |
float _SampleCount0 =8; | |
float _SampleCount1 = 64; | |
int _SampleCountL = 8; | |
sampler3D _NoiseTex1; | |
sampler3D _NoiseTex2; | |
float _NoiseFreq1 = 1.34; | |
float _NoiseFreq2 = 13.57; | |
float _NoiseAmp1 = -8.5; | |
float _NoiseAmp2 = 2.49; | |
float _NoiseBias = 2.19; | |
float3 _Scroll1; | |
float3 _Scroll2; | |
float _Altitude0 = 1000; | |
float _Altitude1 = 5000; | |
float _FarDist = 22000; | |
float _Scatter =0.009; | |
float _HGCoeff = 0.4; | |
float _Extinct = 0.0025; | |
float UVRandom(float2 uv) | |
{ | |
float f = dot(float2(12.9898, 78.233), uv); | |
return frac(43758.5453 * sin(f)); | |
} | |
float SampleNoise(float3 uvw) | |
{ | |
const float baseFreq = 1e-5; | |
float4 uvw1 = float4(uvw * _NoiseFreq1 * baseFreq, 0); | |
float4 uvw2 = float4(uvw * _NoiseFreq2 * baseFreq, 0); | |
uvw1.xyz += _Scroll1.xyz * _Time.x; | |
uvw2.xyz += _Scroll2.xyz * _Time.x; | |
float n1 = tex3Dlod(_NoiseTex1, uvw1).a; | |
float n2 = tex3Dlod(_NoiseTex2, uvw2).a; | |
float n = n1 * _NoiseAmp1 + n2 * _NoiseAmp2; | |
n = saturate(n + _NoiseBias); | |
float y = uvw.y - _Altitude0; | |
float h = _Altitude1 - _Altitude0; | |
n *= smoothstep(0, h * 0.1, y); | |
n *= smoothstep(0, h * 0.4, h - y); | |
return n; | |
} | |
float HenyeyGreenstein(float cosine) | |
{ | |
float g2 = _HGCoeff * _HGCoeff; | |
return 0.5 * (1 - g2) / pow(1 + g2 - 2 * _HGCoeff * cosine, 1.5); | |
} | |
float Beer(float depth) | |
{ | |
return exp(-_Extinct * depth); | |
} | |
float BeerPowder(float depth) | |
{ | |
return exp(-_Extinct * depth) * (1 - exp(-_Extinct * 2 * depth)); | |
} | |
float MarchLight(float3 pos, float rand) | |
{ | |
float3 light = _WorldSpaceLightPos0.xyz; | |
float stride = (_Altitude1 - pos.y) / (light.y * _SampleCountL); | |
pos += light * stride * rand; | |
float depth = 0; | |
UNITY_LOOP for (int s = 0; s < _SampleCountL; s++) | |
{ | |
depth += SampleNoise(pos) * stride; | |
pos += light * stride; | |
} | |
return BeerPowder(depth); | |
} | |
float4 SkyColor(Ray r, float2 uv) { | |
MieSky(r, r.uv, skyColor, groundColor, sunColor); | |
//New Method | |
half3 col = half3(0.0, 0.0, 0.0); | |
// if y > 1 [eyeRay.y < -SKY_GROUND_THRESHOLD] - ground | |
// if y >= 0 and < 1 [eyeRay.y <= 0 and > -SKY_GROUND_THRESHOLD] - horizon | |
// if y < 0 [eyeRay.y > 0] - sky | |
half3 ray = -r.direction; | |
half y = ray.y / SKY_GROUND_THRESHOLD; | |
//lerp between colors calculated by MieSky | |
col = lerp(skyColor, groundColor, saturate(y)); | |
/*if (y < 0.0) | |
{ | |
half mie = calcSunSpot(_WorldSpaceLightPos0.xyz, -ray); | |
col += mie * sunColor; | |
}*/ | |
return half4(col, 1); | |
} | |
float3 Clouds(Ray rayIn) { | |
float3 sky = SkyColor(rayIn, rayIn.uv); | |
_SampleCount0 = 64; | |
_SampleCount1 = 128; | |
_SampleCountL = 32; | |
_NoiseFreq1 = 1.34; | |
_NoiseFreq2 = 13.57; | |
_NoiseAmp1 = -8.5; | |
_NoiseAmp2 = 2.49; | |
_NoiseBias = 2.19; | |
//_Scroll1 = float3(0.01,0.08,0.06); | |
//_Scroll2 = float3(0.01, 0.05, 0.03); | |
_Altitude0 = 300; | |
_Altitude1 = 5000; | |
_FarDist = 22000; | |
_Scatter = 0.009; | |
_HGCoeff = 0.4; | |
_Extinct = 0.0025; | |
float3 ray = rayIn.direction; | |
int samples = lerp(_SampleCount1, _SampleCount0, ray.y); | |
float dist0 = _Altitude0 / ray.y; | |
float dist1 = _Altitude1 / ray.y; | |
float stride = (dist1 - dist0) / samples; | |
half3 col = sky; | |
half y = -ray.y / SKY_GROUND_THRESHOLD; | |
if (y < 0.0) | |
{ | |
half mie = calcSunSpot(_WorldSpaceLightPos0.xyz, ray); | |
col += mie * _LightColor0; | |
} | |
if (ray.y < 0.01 || dist0 >= _FarDist) return fixed4(sky+col, 1); | |
float3 light = _WorldSpaceLightPos0.xyz; | |
float dotDot = dot(ray, light); | |
//float3 viewLightDir = normalize( mul((float3x3)UNITY_MATRIX_V, _WorldSpaceLightPos0.xyz)); | |
//float3 eyeRay = normalize(mul((float3x3)unity_ObjectToWorld, _WorldSpaceLightPos0.xyz)); | |
float3 acc = 0; | |
float yMult = 0; | |
float depth = 0; | |
float hg = HenyeyGreenstein(dotDot); | |
float2 uv = rayIn.uv + _Time.x; | |
float offs = UVRandom(uv) * (dist1 - dist0) / samples; | |
float3 pos = _WorldSpaceCameraPos + ray * (dist0 + offs); | |
UNITY_LOOP for (int s = 0; s < samples; s++) | |
{ | |
float n = SampleNoise(pos); | |
if (n > 0) | |
{ | |
float density = n * stride; | |
float rand = UVRandom(uv + s + 1); | |
float scatter = ((density * _Scatter * hg)) * MarchLight(pos, rand * 0.5); | |
if (light.y > 0)acc += _LightColor0 * scatter * BeerPowder(depth); | |
depth += density; | |
} | |
pos += ray * stride; | |
} | |
col *= Beer(depth); | |
float3 sun = float3(0, 0, 0); | |
if (col.r >= 1) { | |
sun = col; | |
} | |
acc += Beer(depth) * sky; | |
acc = lerp(acc, sky, saturate(dist0 / _FarDist)); | |
acc += sun; | |
return acc; | |
} | |
//SimpleColors | |
float3 EnvColor2(Ray r, float2 uv) { | |
float3 unitDirection = (r.direction); | |
float t = 0.5 * (unitDirection.y + 1.0); | |
return 1.0 * ((1.0 - t) * float3(1.0, 1.0, 1.0) + t * float3(0.5, 0.7, 1.0)); | |
} | |
//SimpleSun+Colors | |
float3 EnvColor3(Ray r) { | |
half p = r.direction.y; | |
half3 ray = r.direction; | |
float p1 = pow(min(1.0f, 1.0f - p), 15); | |
float p2 = 1.0f - p1; | |
// Our moon circle | |
half mie = calcSunSpot(_DirectionalLight.xyz, -ray); | |
// Blend ground fog with the moon | |
half3 col = _GroundColor * p1 + mie * p2; | |
// Add sky color | |
col += _SkyTint * p2; | |
return col; | |
} | |
//------------------------------------- | |
//- SAMPLING | |
float3x3 GetTangentSpace(float3 normal) | |
{ | |
// Choose a helper vector for the cross product | |
float3 helper = float3(1, 0, 0); | |
if (abs(normal.x) > 0.99f) | |
helper = float3(0, 0, 1); | |
// Generate vectors | |
float3 tangent = normalize(cross(normal, helper)); | |
float3 binormal = normalize(cross(normal, tangent)); | |
return float3x3(tangent, binormal, normal); | |
} | |
float3 SampleHemisphere(float3 normal, float alpha) | |
{ | |
// Sample the hemisphere, where alpha determines the kind of the sampling | |
float cosTheta = pow(rand(), 1.0f / (alpha + 1.0f)); | |
float sinTheta = sqrt(1.0f - cosTheta * cosTheta); | |
float phi = 2 * PI * rand(); | |
float3 tangentSpaceDir = float3(cos(phi) * sinTheta, sin(phi) * sinTheta, cosTheta); | |
// Transform direction to world space | |
return mul(tangentSpaceDir, GetTangentSpace(normal)); | |
} | |
//------------------------------------- | |
//- SHADE | |
float SmoothnessToPhongAlpha(float s) | |
{ | |
return pow(1000.0f, s * s); | |
} | |
float3 Shadow(RayHit hit) { | |
bool shadow = false; | |
Ray shadowRay = CreateRay(hit.position + hit.normal * 0.001f, -1 * _DirectionalLight.xyz, float2(0, 0)); | |
RayHit shadowHit = Trace(shadowRay); | |
if (shadowHit.distance != 1.#INF) | |
{ | |
return float3(0.0f, 0.0f, 0.0f); | |
} | |
else return _LightColor0.xyz*_DirectionalLight.w; | |
} | |
float3 Shade(inout Ray ray, RayHit hit) | |
{ | |
if (hit.distance < 1.#INF) | |
{ | |
// Calculate chances of diffuse and specular reflection | |
hit.albedo = min(hit.transparency - hit.specular, hit.albedo); | |
float specChance = energy(hit.specular); | |
float diffChance = energy(hit.albedo); | |
float transparencyChance = (hit.transparency); | |
float sum = specChance + diffChance; | |
//specChance /= sum; | |
//diffChance /= sum; | |
//float inShadow = _LightColor0.xyz*_DirectionalLight.w; | |
//if (shadow.distance != 1.#INF) | |
//{ | |
// inShadow = float3(0.0f, 0.0f, 0.0f); | |
//} | |
// Roulette-select the ray's path | |
float roulette = rand(); | |
if (roulette < specChance) | |
{ | |
//// Specular reflection | |
//float alpha = 15.0f; | |
//ray.origin = hit.position + hit.normal * 0.001f; | |
//ray.direction = SampleHemisphere(reflect(ray.direction, hit.normal), alpha); | |
//float f = (alpha + 2) / (alpha + 1); | |
//ray.energy *= (1.0f / specChance) * hit.specular * sdot(hit.normal, ray.direction, f); | |
ray.origin = hit.position + hit.normal * 0.001f; | |
float alpha = SmoothnessToPhongAlpha(hit.smoothness); | |
if(hit.smoothness ==1)ray.direction = reflect(ray.direction, hit.normal); | |
else | |
ray.direction = SampleHemisphere(reflect(ray.direction, hit.normal), alpha); | |
float f = (alpha + 2) / (alpha + 1); | |
ray.energy *= (1.0f / specChance) * hit.specular * sdot(hit.normal, ray.direction, f); | |
//float3 specular = float3(0.6f, 0.6f, 0.6f); | |
// Reflect the ray and multiply energy with specular reflection | |
//ray.origin = hit.position + hit.normal * 0.001f; | |
//ray.direction = reflect(ray.direction, hit.normal); | |
//ray.energy *= specular; | |
// Return nothing | |
//return float3(0.0f, 0.0f, 0.0f); | |
} | |
else if (diffChance > 0 && transparencyChance == 1 && roulette < specChance + diffChance) | |
{ | |
//// Diffuse reflection | |
ray.origin = hit.position + hit.normal * 0.001f; | |
ray.direction = SampleHemisphere(hit.normal, 1.0f); | |
ray.energy *= (1.0f / diffChance) * hit.albedo; | |
} | |
else if (transparencyChance < 1) { | |
const float refIdx = hit.refraction; | |
//1.56 ior reflection; | |
float3 outwardNormal; | |
float niOverNt; | |
float3 refracted; | |
float cosine; | |
if (dot(ray.direction, hit.normal) > 0) { | |
outwardNormal = -hit.normal; | |
niOverNt = refIdx; | |
cosine = refIdx * dot(ray.direction, hit.normal) / length(ray.direction); | |
} | |
else { | |
outwardNormal = hit.normal; | |
niOverNt = 1.0 / refIdx; | |
cosine = -dot(ray.direction, hit.normal) / length(ray.direction); | |
} | |
float reflectProb = lerp(1.0, Schlick(cosine, refIdx), | |
Refract(ray.direction, outwardNormal, niOverNt, refracted)); | |
float alpha = SmoothnessToPhongAlpha(1 - transparencyChance); | |
ray.origin = hit.position; | |
ray.direction = refracted; | |
float f = (alpha + 2) / (alpha + 1); | |
ray.energy *= (1.0f / diffChance) * (hit.albedo)*(1-transparencyChance); | |
} | |
else | |
{ | |
// Terminate ray | |
ray.energy = 0.0f; | |
} | |
return hit.emission; | |
} | |
else | |
{ | |
// Erase the ray's energy - the sky doesn't reflect anything | |
ray.energy = 0.0f; | |
// Sample the skybox and write it | |
float theta = acos(ray.direction.y) / -PI; | |
float phi = atan2(ray.direction.x, -ray.direction.z) / -PI * 0.5f; | |
if (_SkyMode == 0)return (float3(_BackgroundColor.x, _BackgroundColor.y, _BackgroundColor.z)); | |
else if(_SkyMode == 1)return (Clouds(ray)); | |
else if(_SkyMode ==2)return (_SkyboxTexture.SampleLevel(sampler_SkyboxTexture, float2(phi, theta), 0).xyz); | |
else return float3(0, 0, 0); | |
} | |
} | |
//------------------------------------- | |
//- KERNEL | |
[numthreads(8,8,1)] | |
void CSMain (uint3 id : SV_DispatchThreadID) | |
{ | |
_Pixel = id.xy; | |
// Get the dimensions of the RenderTexture | |
uint width, height; | |
Result.GetDimensions(width, height); | |
// Transform pixel to [-1,1] range | |
float2 uv = float2((id.xy + _PixelOffset) / float2(width, height) * 2.0f - 1.0f); | |
// Get a ray for the UVs | |
Ray ray = CreateCameraRay(uv); | |
// Trace and shade the ray | |
float3 result = float3(0, 0, 0); | |
for (int i = 0; i < 8; i++) | |
{ | |
RayHit hit = Trace(ray); | |
//Ray shadowRay = CreateRay(hit.position + hit.normal * 0.001f, -1 * _DirectionalLight.xyz, float2(0, 0)); | |
//RayHit shadowHit = Trace(shadowRay); | |
result += ray.energy * Shade(ray, hit); | |
//result += Shadow(hit); | |
if (!any(ray.energy)) | |
break; | |
} | |
Result[id.xy] = float4(result, 1); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
https://chronokun.github.io/posts/2018-05-29--0.html