2023-05-08 13:28:53 +02:00
|
|
|
#include <defocus/base.h>
|
2023-04-19 13:13:08 +02:00
|
|
|
#include <defocus/models.h>
|
2023-05-08 13:28:53 +02:00
|
|
|
#include <defocus/image.h>
|
2023-04-19 13:13:08 +02:00
|
|
|
|
2023-05-08 13:28:53 +02:00
|
|
|
#include <math.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
|
|
|
/* The thin lense model, implemented as a quasi-raytracer. */
|
|
|
|
|
|
|
|
/** Map uv coordinates in [0, 1) to the unit circle centered at (0, 0). */
|
|
|
|
static df_v2 concentric_map_disk(df_v2 in_p)
|
2023-04-19 13:13:08 +02:00
|
|
|
{
|
2023-05-08 13:28:53 +02:00
|
|
|
df_v2 offset = df_sub_v2(df_mul_v2(2.f, in_p), (df_v2){{1.f, 1.f}});
|
|
|
|
if (offset.u == 0.f && offset.v == 0.f)
|
|
|
|
return offset;
|
|
|
|
|
|
|
|
float theta, r;
|
|
|
|
if (fabsf(offset.u) > fabsf(offset.v)) {
|
|
|
|
r = offset.u;
|
|
|
|
theta = DF_PI_OVER_4 * (offset.v / offset.u);
|
|
|
|
} else {
|
|
|
|
r = offset.v;
|
|
|
|
theta = DF_PI_OVER_2 - DF_PI_OVER_4 * (offset.u / offset.v);
|
|
|
|
}
|
|
|
|
return df_mul_v2(r, (df_v2){{cosf(theta), sinf(theta)}});
|
|
|
|
}
|
|
|
|
|
|
|
|
void df_thin_lense(df_thin_lense_params params, const df_image *in_image, df_image *out_image)
|
|
|
|
{
|
|
|
|
/* We do the following:
|
|
|
|
* - Orthographic projection (i.e. don't do anything really except flip the y axis)
|
|
|
|
* - For each pixel:
|
|
|
|
* - Transform from raster space into camera space
|
|
|
|
* - Generate ray from pixel to plane of focus through lense center
|
|
|
|
* - For each sample:
|
|
|
|
* - Choose random point on lense
|
|
|
|
* - Trace ray from lense point through the above intersection point to the image.
|
|
|
|
* - Sum all samples
|
|
|
|
*/
|
|
|
|
|
|
|
|
int sample_count = (params.sample_count > 0) ? params.sample_count : 64;
|
|
|
|
|
|
|
|
int w, h;
|
|
|
|
df_get_image_size(out_image, &w, &h);
|
|
|
|
|
|
|
|
/* TODO(Kevin): It would be pretty trivial to
|
|
|
|
* a) parallelize this, and
|
|
|
|
* b) use SIMD for multiple rays
|
|
|
|
*/
|
|
|
|
for (int y = 0; y < h; ++y) {
|
|
|
|
float cam_y = (float)(y - h) / 2.f;
|
|
|
|
for (int x = 0; x < w; ++x) {
|
|
|
|
float cam_x = (float)(x - w) / 2.f;
|
|
|
|
|
|
|
|
df_v3 ray_dir = {{cam_x, cam_y, 1.f}};
|
|
|
|
ray_dir = df_normalize_v3(ray_dir);
|
|
|
|
ray_dir.z = 1.f;
|
|
|
|
|
|
|
|
/* Calculate the intersection with the plane of focus */
|
|
|
|
df_v3 focus_p = df_mul_v3(params.focal_distance, ray_dir);
|
|
|
|
|
|
|
|
uint32_t color[4] = {0, 0, 0, 0};
|
|
|
|
for (int sample_idx = 0; sample_idx < sample_count; ++sample_idx) {
|
|
|
|
df_v2 lens_uv = {(float)(rand() % 1024) / 1024.f, (float)(rand() % 1024) / 1024.f};
|
|
|
|
df_v2 lens_p = df_mul_v2(params.aperture, concentric_map_disk(lens_uv));
|
|
|
|
|
|
|
|
ray_dir.x = focus_p.x - lens_p.x;
|
|
|
|
ray_dir.y = focus_p.y - lens_p.y;
|
|
|
|
ray_dir.z = focus_p.z;
|
|
|
|
ray_dir = df_normalize_v3(ray_dir);
|
|
|
|
|
|
|
|
df_v3 sample_p = df_mul_v3(params.focal_distance, ray_dir);
|
|
|
|
|
|
|
|
int img_x = (int)sample_p.x + w / 2;
|
|
|
|
int img_y = (int)sample_p.y + h / 2;
|
|
|
|
|
|
|
|
df_color sample_color = df_get_image_pixel(in_image, img_x, img_y);
|
|
|
|
|
|
|
|
for (int i = 0; i < 4; ++i)
|
|
|
|
color[i] += (uint32_t)sample_color.e[i];
|
|
|
|
}
|
|
|
|
|
|
|
|
df_color pixel_color = {
|
|
|
|
{color[0] / sample_count, color[1] / sample_count, color[2] / sample_count, color[3] / sample_count}};
|
|
|
|
|
|
|
|
df_set_image_pixel(out_image, x, y, pixel_color);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#if 0
|
|
|
|
/* The guassian lens equation 1/z' - 1/z = 1/f
|
|
|
|
* with z' := distance between film and lens,
|
|
|
|
* z := focal distance, i.e. the distance between lens and the plane of focus
|
|
|
|
* f := focal length of the lens
|
|
|
|
*
|
|
|
|
* gives us: z' = fz / (f + z) */
|
|
|
|
float film_dist = (params.focal_length * params.focal_distance) / (params.focal_length + params.focal_distance);
|
|
|
|
|
|
|
|
int sample_count = (params.sample_count > 0) ? params.sample_count : 64;
|
|
|
|
|
|
|
|
int w, h;
|
|
|
|
df_get_image_size(out_image, &w, &h);
|
|
|
|
|
|
|
|
for (int y = 0; y < h; ++y) {
|
|
|
|
float v = (float)y / (float)h;
|
|
|
|
for (int x = 0; x < w; ++x) {
|
|
|
|
float u = (float)x / (float)w;
|
|
|
|
df_v2 img_p = {u, v};
|
|
|
|
float color[4] = {0, 0, 0, 0};
|
|
|
|
|
|
|
|
df_v3 focus_ray = {};
|
|
|
|
focus_ray.z = 1.f;
|
|
|
|
focus_ray.x = 0.f;
|
|
|
|
focus_ray.y = 0.f;
|
|
|
|
|
|
|
|
for (int sample_idx = 0; sample_idx < sample_count; ++sample_idx) {
|
|
|
|
/* Generate a random sample on the lense */
|
|
|
|
df_v2 sample_p = {(float)(rand() % 1024) / 1024.f, (float)(rand() % 1024) / 1024.f};
|
|
|
|
df_v2 lens_p = df_mul_v2(params.aperture, concentric_map_disk(sample_p));
|
|
|
|
|
|
|
|
/* Compute the intersection point on the plane of focus. */
|
|
|
|
/* TODO: Use a ray from img_p to the center of the lens,
|
|
|
|
* trace to focal_distance.
|
|
|
|
*/
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2023-04-19 13:13:08 +02:00
|
|
|
}
|