The example shaders say:
Code: Select all
//x = generic timer in range 0..1, period of 16777216 ms (4.6 hours), y = average fps, w = frame time elapsed (in seconds)
float4 Timer;
So, am I doing something wrong?
Here's my code if you wish to test it yourself:
Code: Select all
float fFilmGrain_Intensity <
string UIName = "Film Grain Intensity";
string UIWidget = "spinner";
float UIMin = 0.0;
float UIMax = 1.0;
> = 1.0;
float fFilmGrain_Speed <
string UIName = "Film Grain Speed";
string UIWidget = "spinner";
float UIMin = 0.0;
float UIMax = 100.0;
> = 1.0;
float fFilmGrain_Mean <
string UIName = "Film Grain Mean";
string UIWidget = "spinner";
float UIMin = 0.0;
float UIMax = 1.0;
> = 0.0;
float fFilmGrain_Variance <
string UIName = "Film Grain Variance";
string UIWidget = "spinner";
float UIMin = 0.0;
float UIMax = 1.0;
> = 0.5;
...
float gaussian(float z, float u, float o) {
// 2.506 ~= sqrt(2 * pi)
return (1.0 / (o * 2.506)) * exp(-(((z - u) * (z - u)) / (2.0 * (o * o))));
}
float get_film_grain(float2 uv) {
float t = Timer.w * 0.001 * fFilmGrain_Speed;
float seed = dot(uv, float2(12.9898, 78.233));
float noise = frac(sin(seed) * 43758.5453 + t);
return gaussian(noise, fFilmGrain_Mean, fFilmGrain_Variance * fFilmGrain_Variance);
}
...
// Film Grain
color += (get_film_grain(uv) * (1.0 - color)) * fFilmGrain_Intensity * 0.01;